From 8bc668d43d26715601c9fdcef8aa2c5c7d8c4096 Mon Sep 17 00:00:00 2001 From: Alexander Kolotov Date: Fri, 4 Jul 2025 16:30:55 -0600 Subject: [PATCH 1/4] Add page size config and slice NFT tokens --- .cursor/rules/110-new-mcp-tool.mdc | 15 ++- .env.example | 3 + Dockerfile | 1 + SPEC.md | 4 +- blockscout_mcp_server/config.py | 2 + blockscout_mcp_server/tools/address_tools.py | 38 +++++-- .../test_address_tools_integration.py | 2 +- tests/tools/test_address_tools_2.py | 104 +++++++++++++++++- 8 files changed, 150 insertions(+), 19 deletions(-) diff --git a/.cursor/rules/110-new-mcp-tool.mdc b/.cursor/rules/110-new-mcp-tool.mdc index bc0cf74..826c024 100644 --- a/.cursor/rules/110-new-mcp-tool.mdc +++ b/.cursor/rules/110-new-mcp-tool.mdc @@ -397,11 +397,22 @@ async def paginated_tool_name( base_url = await get_blockscout_base_url(chain_id) response_data = await make_blockscout_request(base_url=base_url, api_path=api_path, params=query_params) - processed_items = process_items(response_data.get("items", [])) + page_size = config.nft_page_size # Use your config setting + items = response_data.get("items", []) + items_to_return = items + next_page_params = None + if len(items) > page_size: + items_to_return = items[:page_size] + last = items[page_size - 1] + next_page_params = { + "some_id": last.get("id"), + "items_count": 50, + } + + processed_items = process_items(items_to_return) # 2. Generate structured pagination pagination = None - next_page_params = response_data.get("next_page_params") if next_page_params: next_cursor = encode_cursor(next_page_params) pagination = PaginationInfo( diff --git a/.env.example b/.env.example index 270eabb..835ebbc 100644 --- a/.env.example +++ b/.env.example @@ -10,5 +10,8 @@ BLOCKSCOUT_CHAINSCOUT_TIMEOUT=15.0 BLOCKSCOUT_CHAIN_CACHE_TTL_SECONDS=1800 BLOCKSCOUT_PROGRESS_INTERVAL_SECONDS="15.0" +# The number of items to return per page for the nft_tokens_by_address tool. +BLOCKSCOUT_NFT_PAGE_SIZE=10 + BLOCKSCOUT_METADATA_URL="https://metadata.services.blockscout.com" BLOCKSCOUT_METADATA_TIMEOUT="30.0" diff --git a/Dockerfile b/Dockerfile index 5ca30f7..be948c2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,5 +24,6 @@ ENV BLOCKSCOUT_CHAINSCOUT_URL="https://chains.blockscout.com" ENV BLOCKSCOUT_CHAINSCOUT_TIMEOUT="15.0" ENV BLOCKSCOUT_CHAIN_CACHE_TTL_SECONDS="1800" ENV BLOCKSCOUT_PROGRESS_INTERVAL_SECONDS="15.0" +ENV BLOCKSCOUT_NFT_PAGE_SIZE="10" CMD ["python", "-m", "blockscout_mcp_server"] diff --git a/SPEC.md b/SPEC.md index 872d86e..038c66e 100644 --- a/SPEC.md +++ b/SPEC.md @@ -202,8 +202,8 @@ sequenceDiagram - **Improved Robustness**: It treats pagination as an atomic unit, preventing the AI from incorrectly constructing or omitting parameters for the next request. - **Simplified Tool Signatures**: Tool functions only need one optional `cursor: str` argument for pagination, keeping their schemas clean. - **Mechanism:** - When the Blockscout API returns a `next_page_params` dictionary, the server serializes this dictionary into a compact JSON string, which is then Base64URL-encoded. This creates a single, opaque, and URL-safe string that serves as the cursor for the next page. + **Mechanism:** + The server fetches a full page of results from Blockscout but returns only a small slice to the client (default 10 items). Pagination cursors are generated from the last item of that slice. When the Blockscout API returns a `next_page_params` dictionary, the server serializes it into a compact JSON string and Base64URL-encodes it, producing an opaque cursor for the next page. **Example:** diff --git a/blockscout_mcp_server/config.py b/blockscout_mcp_server/config.py index 25e7f2c..7cde7e7 100644 --- a/blockscout_mcp_server/config.py +++ b/blockscout_mcp_server/config.py @@ -21,5 +21,7 @@ class ServerConfig(BaseSettings): chain_cache_ttl_seconds: int = 1800 # Default 30 minutes progress_interval_seconds: float = 15.0 # Default interval for periodic progress updates + nft_page_size: int = 10 + config = ServerConfig() diff --git a/blockscout_mcp_server/tools/address_tools.py b/blockscout_mcp_server/tools/address_tools.py index bab7a6c..ea0e28e 100644 --- a/blockscout_mcp_server/tools/address_tools.py +++ b/blockscout_mcp_server/tools/address_tools.py @@ -4,6 +4,7 @@ from mcp.server.fastmcp import Context from pydantic import Field +from blockscout_mcp_server.config import config from blockscout_mcp_server.models import ( AddressInfoData, AddressLogItem, @@ -199,10 +200,25 @@ async def nft_tokens_by_address( await report_and_log_progress(ctx, progress=2.0, total=2.0, message="Successfully fetched NFT data.") - items_data = response_data.get("items", []) + page_size = config.nft_page_size + original_items = response_data.get("items", []) + + items_to_return = original_items + next_page_params = None + + if len(original_items) > page_size: + items_to_return = original_items[:page_size] + last_item_for_cursor = original_items[page_size - 1] + token_info = last_item_for_cursor.get("token", {}) + next_page_params = { + "token_contract_address_hash": token_info.get("address_hash"), + "token_type": token_info.get("type"), + "items_count": 50, + } + nft_holdings: list[NftCollectionHolding] = [] - for item in items_data: + for item in items_to_return: token = item.get("token", {}) token_instances: list[NftTokenInstance] = [] @@ -240,19 +256,17 @@ async def nft_tokens_by_address( ) ) - # Since there could be more than one page of collections for the same address, - # the pagination information is extracted from API response and added explicitly - # to the tool response pagination = None - next_page_params = response_data.get("next_page_params") if next_page_params: - next_cursor = encode_cursor(next_page_params) - pagination = PaginationInfo( - next_call=NextCallInfo( - tool_name="nft_tokens_by_address", - params={"chain_id": chain_id, "address": address, "cursor": next_cursor}, + filtered_next_page_params = {k: v for k, v in next_page_params.items() if v is not None} + if filtered_next_page_params: + next_cursor = encode_cursor(filtered_next_page_params) + pagination = PaginationInfo( + next_call=NextCallInfo( + tool_name="nft_tokens_by_address", + params={"chain_id": chain_id, "address": address, "cursor": next_cursor}, + ) ) - ) return build_tool_response(data=nft_holdings, pagination=pagination) diff --git a/tests/integration/test_address_tools_integration.py b/tests/integration/test_address_tools_integration.py index d89a9e0..de79cd4 100644 --- a/tests/integration/test_address_tools_integration.py +++ b/tests/integration/test_address_tools_integration.py @@ -24,7 +24,7 @@ async def test_nft_tokens_by_address_integration(mock_ctx): assert isinstance(result, ToolResponse) assert isinstance(result.data, list) - assert len(result.data) > 0 + assert 0 < len(result.data) <= 10 assert result.pagination is not None first_holding = result.data[0] diff --git a/tests/tools/test_address_tools_2.py b/tests/tools/test_address_tools_2.py index 5a5683c..8ec6e57 100644 --- a/tests/tools/test_address_tools_2.py +++ b/tests/tools/test_address_tools_2.py @@ -4,6 +4,7 @@ import httpx import pytest +from blockscout_mcp_server.config import config from blockscout_mcp_server.models import ( NftCollectionHolding, PaginationInfo, @@ -318,7 +319,15 @@ async def test_nft_tokens_by_address_with_pagination(mock_ctx): address = "0x123abc" mock_base_url = "https://eth.blockscout.com" - mock_api_response = {"items": [], "next_page_params": {"block_number": 123, "cursor": "foo"}} + items = [ + { + "token": {"address_hash": f"0xhash{i}", "type": "ERC-721"}, + "amount": "1", + "token_instances": [], + } + for i in range(11) + ] + mock_api_response = {"items": items} fake_cursor = "ENCODED_CURSOR" with ( @@ -336,7 +345,13 @@ async def test_nft_tokens_by_address_with_pagination(mock_ctx): result = await nft_tokens_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) - mock_encode_cursor.assert_called_once_with(mock_api_response["next_page_params"]) + mock_encode_cursor.assert_called_once_with( + { + "token_contract_address_hash": items[9]["token"]["address_hash"], + "token_type": items[9]["token"]["type"], + "items_count": 50, + } + ) assert isinstance(result, ToolResponse) assert isinstance(result.pagination, PaginationInfo) assert result.pagination.next_call.tool_name == "nft_tokens_by_address" @@ -388,3 +403,88 @@ async def test_nft_tokens_by_address_invalid_cursor(mock_ctx): ): with pytest.raises(ValueError, match="bad"): await nft_tokens_by_address(chain_id=chain_id, address=address, cursor=invalid_cursor, ctx=mock_ctx) + + +@pytest.mark.asyncio +async def test_nft_tokens_by_address_response_sliced(mock_ctx): + chain_id = "1" + address = "0x123abc" + mock_base_url = "https://eth.blockscout.com" + + items = [ + { + "token": {"address_hash": f"0xhash{i}", "type": "ERC-721"}, + "amount": "1", + "token_instances": [], + } + for i in range(15) + ] + mock_api_response = {"items": items} + + with ( + patch( + "blockscout_mcp_server.tools.address_tools.get_blockscout_base_url", new_callable=AsyncMock + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.address_tools.make_blockscout_request", new_callable=AsyncMock + ) as mock_request, + patch("blockscout_mcp_server.tools.address_tools.encode_cursor") as mock_encode_cursor, + ): + mock_get_url.return_value = mock_base_url + mock_request.return_value = mock_api_response + mock_encode_cursor.return_value = "CURSOR" + + result = await nft_tokens_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) + + assert len(result.data) == 10 + assert result.pagination is not None + mock_encode_cursor.assert_called_once_with( + { + "token_contract_address_hash": items[9]["token"]["address_hash"], + "token_type": items[9]["token"]["type"], + "items_count": 50, + } + ) + + +@pytest.mark.asyncio +async def test_nft_tokens_by_address_custom_page_size(mock_ctx): + chain_id = "1" + address = "0x123abc" + mock_base_url = "https://eth.blockscout.com" + + items = [ + { + "token": {"address_hash": f"0xhash{i}", "type": "ERC-721"}, + "amount": "1", + "token_instances": [], + } + for i in range(10) + ] + mock_api_response = {"items": items} + + with ( + patch( + "blockscout_mcp_server.tools.address_tools.get_blockscout_base_url", new_callable=AsyncMock + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.address_tools.make_blockscout_request", new_callable=AsyncMock + ) as mock_request, + patch("blockscout_mcp_server.tools.address_tools.encode_cursor") as mock_encode_cursor, + patch.object(config, "nft_page_size", 5), + ): + mock_get_url.return_value = mock_base_url + mock_request.return_value = mock_api_response + mock_encode_cursor.return_value = "CURSOR" + + result = await nft_tokens_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) + + assert len(result.data) == 5 + assert result.pagination is not None + mock_encode_cursor.assert_called_once_with( + { + "token_contract_address_hash": items[4]["token"]["address_hash"], + "token_type": items[4]["token"]["type"], + "items_count": 50, + } + ) From 638ecd979848123f81e555d1253ed09fa86db152 Mon Sep 17 00:00:00 2001 From: Alexander Kolotov Date: Fri, 4 Jul 2025 16:57:11 -0600 Subject: [PATCH 2/4] Clarify pagination docs --- .cursor/rules/110-new-mcp-tool.mdc | 8 ++++---- SPEC.md | 23 ++++++++++++++++------- 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/.cursor/rules/110-new-mcp-tool.mdc b/.cursor/rules/110-new-mcp-tool.mdc index 826c024..0b5ad2f 100644 --- a/.cursor/rules/110-new-mcp-tool.mdc +++ b/.cursor/rules/110-new-mcp-tool.mdc @@ -397,13 +397,13 @@ async def paginated_tool_name( base_url = await get_blockscout_base_url(chain_id) response_data = await make_blockscout_request(base_url=base_url, api_path=api_path, params=query_params) - page_size = config.nft_page_size # Use your config setting + PAGE_SIZE = 10 # This should be a configurable value per-tool items = response_data.get("items", []) items_to_return = items next_page_params = None - if len(items) > page_size: - items_to_return = items[:page_size] - last = items[page_size - 1] + if len(items) > PAGE_SIZE: + items_to_return = items[:PAGE_SIZE] + last = items[PAGE_SIZE - 1] next_page_params = { "some_id": last.get("id"), "items_count": 50, diff --git a/SPEC.md b/SPEC.md index 038c66e..5545de5 100644 --- a/SPEC.md +++ b/SPEC.md @@ -176,8 +176,8 @@ sequenceDiagram } } } - } - ``` + } + ``` 3. **Response Processing and Context Optimization**: @@ -203,7 +203,7 @@ sequenceDiagram - **Simplified Tool Signatures**: Tool functions only need one optional `cursor: str` argument for pagination, keeping their schemas clean. **Mechanism:** - The server fetches a full page of results from Blockscout but returns only a small slice to the client (default 10 items). Pagination cursors are generated from the last item of that slice. When the Blockscout API returns a `next_page_params` dictionary, the server serializes it into a compact JSON string and Base64URL-encodes it, producing an opaque cursor for the next page. + When the Blockscout API returns a `next_page_params` dictionary, the server serializes this dictionary into a compact JSON string, which is then Base64URL-encoded. This creates a single, opaque, and URL-safe string that serves as the cursor for the next page. **Example:** @@ -232,9 +232,18 @@ sequenceDiagram } } } - ``` + ``` + + **c) Response Slicing and Context-Aware Pagination:** + To prevent overwhelming the LLM with long lists of items (e.g., token holdings, transaction logs), the server implements a response slicing strategy. This conserves context while ensuring all data remains accessible through robust pagination. + + - **Mechanism**: The server fetches a full page of data from the Blockscout API (typically 50 items) but returns only a smaller, configurable slice to the client (e.g., 10 items). If the original response contained more items than the slice size, pagination is initiated. + - **Cursor Generation**: Instead of using the `next_page_params` directly from the Blockscout API (which would skip most of the fetched items), the server generates a new pagination cursor based on the **last item of the returned slice**. This ensures the next request starts exactly where the previous one left off, providing seamless continuity. + - **Configuration**: The size of the slice returned to the client is configurable via environment variables (e.g., `BLOCKSCOUT_NFT_PAGE_SIZE`), allowing for fine-tuning of context usage. + + This strategy combines the network efficiency of fetching larger data chunks from the backend with the context efficiency of providing smaller, digestible responses to the AI. - **c) Automatic Pagination Instructions for LLM Guidance:** + **d) Automatic Pagination Instructions for LLM Guidance:** To address the common issue of LLMs ignoring structured pagination data, the server implements a multi-layered approach to ensure LLMs actually use pagination when available: - **Enhanced General Rules**: Server instructions include explicit pagination handling rules that LLMs receive upfront - **Automatic Instruction Generation**: When a tool response includes pagination, the server automatically appends motivational instructions to the `instructions` field (e.g., "⚠️ MORE DATA AVAILABLE: Use pagination.next_call to get the next page.") @@ -242,7 +251,7 @@ sequenceDiagram This balanced approach provides both human-readable motivation and machine-readable execution details, significantly improving the likelihood that LLMs will fetch complete datasets for comprehensive analysis. - **d) Log Data Field Truncation** + **e) Log Data Field Truncation** To prevent LLM context overflow from excessively large `data` fields in transaction logs, the server implements a smart truncation strategy. @@ -253,7 +262,7 @@ sequenceDiagram This approach maintains a small context footprint by default while providing a reliable "escape hatch" for high-fidelity data retrieval when necessary. - **e) Transaction Input Data Truncation** + **f) Transaction Input Data Truncation** To handle potentially massive transaction input data, the `get_transaction_info` tool employs a multi-faceted truncation strategy. From 157ef085296be98c089f84d44b3efbd369e158fb Mon Sep 17 00:00:00 2001 From: Alexander Kolotov Date: Mon, 7 Jul 2025 22:07:55 -0600 Subject: [PATCH 3/4] Implement pagination page size for log tools (#127) --- .cursor/rules/110-new-mcp-tool.mdc | 107 ++-- .cursor/rules/210-unit-testing-guidelines.mdc | 35 +- .env.example | 7 + AGENTS.md | 21 +- Dockerfile | 2 + SPEC.md | 66 ++- blockscout_mcp_server/config.py | 2 + blockscout_mcp_server/tools/address_tools.py | 34 +- blockscout_mcp_server/tools/common.py | 73 ++- .../tools/transaction_tools.py | 253 +++++---- .../test_address_tools_integration.py | 2 +- .../test_transaction_tools_integration.py | 12 +- tests/tools/test_address_logs.py | 54 +- tests/tools/test_common.py | 217 ++++++++ tests/tools/test_transaction_tools.py | 505 ++++++++++++++---- tests/tools/test_transaction_tools_2.py | 10 +- tests/tools/test_transaction_tools_3.py | 106 +++- tests/tools/test_transaction_tools_helpers.py | 38 ++ .../test_transaction_tools_pagination.py | 293 ++++++++++ 19 files changed, 1502 insertions(+), 335 deletions(-) create mode 100644 tests/tools/test_transaction_tools_pagination.py diff --git a/.cursor/rules/110-new-mcp-tool.mdc b/.cursor/rules/110-new-mcp-tool.mdc index 0b5ad2f..7d7b95f 100644 --- a/.cursor/rules/110-new-mcp-tool.mdc +++ b/.cursor/rules/110-new-mcp-tool.mdc @@ -353,15 +353,50 @@ return build_tool_response( #### 5. Handling Pagination with Opaque Cursors (`return_type: ToolResponse[list[dict]]`) -For tools that return paginated data, do not expose individual pagination parameters (like `page`, `offset`, `items_count`) in the tool's signature. Instead, use a single, opaque `cursor` string. This improves robustness and saves LLM context. The implementation involves both handling an incoming cursor and generating the next one. +For tools that return paginated data, do not expose individual pagination parameters (like `page`, `offset`, `items_count`) in the tool's signature. Instead, use a single, opaque `cursor` string. This improves robustness and saves LLM context. + +**Context Conservation Strategy:** +Many blockchain APIs return large datasets (50+ items per page) that would overwhelm LLM context. To balance network efficiency with context conservation, tools should: + +- Fetch larger pages from APIs (typically 50 items) for network efficiency +- Return smaller slices to the LLM (typically 10-20 items) to conserve context +- Generate pagination objects that allow the LLM to request additional pages when needed **A. Handling the Incoming Cursor:** Your tool should accept an optional `cursor` argument. If it's provided, use the `apply_cursor_to_params` helper from `tools/common.py`. This helper centralizes the logic for decoding the cursor and handling potential `InvalidCursorError` exceptions, raising a user-friendly `ValueError` automatically. **B. Generating Structured Pagination:** -In your response, check for `next_page_params` from the API. If they exist, create `PaginationInfo` and `NextCallInfo` objects with the structured parameters for the next call. +**ALWAYS use the `create_items_pagination` helper** from `tools/common.py` instead of manually creating pagination objects. This function implements the response slicing strategy described above, while also ensuring consistency and handling edge cases properly. + +**C. Page Size Configuration:** +For each new paginated tool, you must add a dedicated page size configuration variable: -**C. Tool Description Guidelines:** +1. **Add to `blockscout_mcp_server/config.py`**: + + ```python + class ServerConfig(BaseSettings): + # Existing page sizes + nft_page_size: int = 10 + logs_page_size: int = 10 + advanced_filters_page_size: int = 10 + + # Add your new page size + my_tool_page_size: int = 15 # Adjust based on typical item size + ``` + +2. **Add to `.env.example`**: + + ```shell + BLOCKSCOUT_MY_TOOL_PAGE_SIZE=15 + ``` + +3. **Add to `Dockerfile`**: + + ```dockerfile + ENV BLOCKSCOUT_MY_TOOL_PAGE_SIZE="15" + ``` + +**D. Tool Description Guidelines:** For paginated tools, **MUST** include this exact notice in the docstring: `**SUPPORTS PAGINATION**: If response includes 'pagination' field, use the provided next_call to get additional pages.` **Complete Example Pattern:** @@ -372,11 +407,25 @@ from pydantic import Field from blockscout_mcp_server.tools.common import ( make_blockscout_request, get_blockscout_base_url, - encode_cursor, apply_cursor_to_params, - build_tool_response + build_tool_response, + create_items_pagination, ) -from blockscout_mcp_server.models import ToolResponse, PaginationInfo, NextCallInfo +from blockscout_mcp_server.models import ToolResponse +from blockscout_mcp_server.config import config + +def extract_cursor_params(item: dict) -> dict: + """Extract cursor parameters from an item for pagination continuation. + + This function determines which fields from the last item should be used + as cursor parameters for the next page request. The returned dictionary + will be encoded as an opaque cursor string. + """ + return { + "some_id": item.get("id"), # Primary pagination key + "timestamp": item.get("timestamp"), # Secondary sort key if needed + "items_count": 50, # Page size for next request + } async def paginated_tool_name( chain_id: Annotated[str, Field(description="The ID of the blockchain")], @@ -397,37 +446,27 @@ async def paginated_tool_name( base_url = await get_blockscout_base_url(chain_id) response_data = await make_blockscout_request(base_url=base_url, api_path=api_path, params=query_params) - PAGE_SIZE = 10 # This should be a configurable value per-tool + # 2. Process/transform items if needed items = response_data.get("items", []) - items_to_return = items - next_page_params = None - if len(items) > PAGE_SIZE: - items_to_return = items[:PAGE_SIZE] - last = items[PAGE_SIZE - 1] - next_page_params = { - "some_id": last.get("id"), - "items_count": 50, - } - - processed_items = process_items(items_to_return) - - # 2. Generate structured pagination - pagination = None - if next_page_params: - next_cursor = encode_cursor(next_page_params) - pagination = PaginationInfo( - next_call=NextCallInfo( - tool_name="paginated_tool_name", - params={ - "chain_id": chain_id, - "address": address, - "cursor": next_cursor - } - ) - ) + processed_items = process_items(items) # Your transformation logic here + + # 3. Use create_items_pagination helper to handle slicing and pagination + sliced_items, pagination = create_items_pagination( + items=processed_items, + page_size=config.my_tool_page_size, # Use the page size you configured above + tool_name="paginated_tool_name", + next_call_base_params={ + "chain_id": chain_id, + "address": address, + # Include other non-cursor parameters that should be preserved + }, + cursor_extractor=extract_cursor_params, + force_pagination=False, # Set to True if you know there are more pages despite few items + ) - return build_tool_response(data=processed_items, pagination=pagination) + return build_tool_response(data=sliced_items, pagination=pagination) ``` + #### 6. Simplifying Address Objects to Save Context (`return_type: ToolResponse[dict]`) **Rationale:** Many Blockscout API endpoints return addresses as complex JSON objects containing the hash, name, tags, etc. To conserve LLM context and encourage compositional tool use, we must simplify these objects into a single address string. If the AI needs more details about an address, it should be guided to use the dedicated `get_address_info` tool. diff --git a/.cursor/rules/210-unit-testing-guidelines.mdc b/.cursor/rules/210-unit-testing-guidelines.mdc index 56891a0..e2d5499 100644 --- a/.cursor/rules/210-unit-testing-guidelines.mdc +++ b/.cursor/rules/210-unit-testing-guidelines.mdc @@ -7,6 +7,37 @@ alwaysApply: false This document provides detailed guidelines for writing effective unit tests for MCP tool functions and related components. +## **HIGH PRIORITY: Keep Unit Tests Simple and Focused** + +**Each unit test must be narrow and specific.** A single test should verify one specific behavior or scenario. If a test attempts to cover multiple scenarios or different groups of input parameters, **split it into separate tests**. + +**Simple tests are:** + +- Easier to understand and maintain +- Faster to debug when they fail +- More reliable and less prone to false positives +- Better at pinpointing the exact cause of failures + +**Example - Split complex tests:** + +```python +# BAD: One test covering multiple scenarios +def test_lookup_token_complex(): + # Tests both success and error cases + # Tests multiple input parameter combinations + # Hard to debug when it fails + +# GOOD: Separate focused tests +def test_lookup_token_success(): + # Tests only the success scenario + +def test_lookup_token_invalid_symbol(): + # Tests only invalid symbol error case + +def test_lookup_token_network_error(): + # Tests only network error handling +``` + ## Key Testing Patterns & Guidelines ### A. Use the `mock_ctx` Fixture @@ -16,6 +47,7 @@ A reusable `pytest` fixture named `mock_ctx` is defined in `tests/conftest.py`. **DO NOT** create a manual `MagicMock` for the context within your test functions. **Correct Usage:** + ```python import pytest @@ -38,8 +70,6 @@ For tools that return a `ToolResponse` object containing structured data, **DO N However, the approach depends on the complexity of the tool. - - ### C. Handling Repetitive Data in Assertions (DAMP vs. DRY) When testing tools that transform a list of items (e.g., `lookup_token_by_symbol`), explicitly writing out the entire `expected_result` can lead to large, repetitive, and hard-to-maintain test code. @@ -47,6 +77,7 @@ When testing tools that transform a list of items (e.g., `lookup_token_by_symbol In these cases, it is better to **programmatically generate the `expected_result`** from the `mock_api_response`. This keeps the test maintainable while still explicitly documenting the transformation logic itself. **Correct Usage:** + ```python import copy from blockscout_mcp_server.models import ToolResponse diff --git a/.env.example b/.env.example index 835ebbc..e42fc72 100644 --- a/.env.example +++ b/.env.example @@ -13,5 +13,12 @@ BLOCKSCOUT_PROGRESS_INTERVAL_SECONDS="15.0" # The number of items to return per page for the nft_tokens_by_address tool. BLOCKSCOUT_NFT_PAGE_SIZE=10 +# The number of log items to return per page for get_address_logs and get_transaction_logs. +BLOCKSCOUT_LOGS_PAGE_SIZE=10 + +# The number of items to return per page for tools using the advanced filters endpoint. +BLOCKSCOUT_ADVANCED_FILTERS_PAGE_SIZE=10 + BLOCKSCOUT_METADATA_URL="https://metadata.services.blockscout.com" BLOCKSCOUT_METADATA_TIMEOUT="30.0" + diff --git a/AGENTS.md b/AGENTS.md index 258299b..8882fe9 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -13,7 +13,7 @@ mcp-server/ │ ├── models.py # Defines standardized Pydantic models for all tool responses │ └── tools/ # Sub-package for tool implementations │ ├── __init__.py # Initializes the tools sub-package -│ ├── common.py # Shared utilities for tools (e.g., HTTP client, chain resolution, progress reporting, data processing and truncation helpers) +│ ├── common.py # Shared utilities and common functionality for all tools │ ├── get_instructions.py # Implements the __get_instructions__ tool │ ├── ens_tools.py # Implements ENS-related tools │ ├── search_tools.py # Implements search-related tools (e.g., lookup_token_by_symbol) @@ -108,6 +108,9 @@ mcp-server/ * `BLOCKSCOUT_CHAINSCOUT_TIMEOUT`: Timeout for Chainscout API requests. * `BLOCKSCOUT_CHAIN_CACHE_TTL_SECONDS`: Time-to-live for chain resolution cache. * `BLOCKSCOUT_PROGRESS_INTERVAL_SECONDS`: Interval for periodic progress updates in long-running operations. + * `BLOCKSCOUT_NFT_PAGE_SIZE`: Page size for NFT token queries (default: 10). + * `BLOCKSCOUT_LOGS_PAGE_SIZE`: Page size for address logs queries (default: 10). + * `BLOCKSCOUT_ADVANCED_FILTERS_PAGE_SIZE`: Page size for advanced filter queries (default: 10). 2. **`tests/` (Test Suite)** * This directory contains the complete test suite for the project, divided into two categories: @@ -164,19 +167,9 @@ mcp-server/ * **`tools/` (Sub-package for Tool Implementations)** * **`__init__.py`**: Marks `tools` as a sub-package. May re-export tool functions for easier import into `server.py`. * **`common.py`**: - * Contains shared utility functions for all tool modules, including data processing and truncation helpers. - * Implements chain resolution and caching mechanism with `get_blockscout_base_url` function. - * Implements helper functions (`encode_cursor`, `decode_cursor`) and a custom exception (`InvalidCursorError`) for handling opaque pagination cursors. - * Contains asynchronous HTTP client functions for different API endpoints: - * `make_blockscout_request`: Takes base_url (resolved from chain_id), API path, and parameters for Blockscout API calls. - * `make_bens_request`: For BENS API calls. - * `make_chainscout_request`: For Chainscout API calls. - * `make_metadata_request`: For Blockscout Metadata API calls. - * These functions handle: - * API key inclusion - * Common HTTP error patterns - * URL construction - * Response parsing + * Provides shared utilities and common functionality for all MCP tools. + * Handles API communication, chain resolution, pagination, data processing, and error handling. + * Implements standardized patterns used across the tool ecosystem. * **Individual Tool Modules** (e.g., `ens_tools.py`, `transaction_tools.py`): * Each file will group logically related tools. * Each tool will be implemented as an `async` Python function. diff --git a/Dockerfile b/Dockerfile index be948c2..1c92026 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,5 +25,7 @@ ENV BLOCKSCOUT_CHAINSCOUT_TIMEOUT="15.0" ENV BLOCKSCOUT_CHAIN_CACHE_TTL_SECONDS="1800" ENV BLOCKSCOUT_PROGRESS_INTERVAL_SECONDS="15.0" ENV BLOCKSCOUT_NFT_PAGE_SIZE="10" +ENV BLOCKSCOUT_LOGS_PAGE_SIZE="10" +ENV BLOCKSCOUT_ADVANCED_FILTERS_PAGE_SIZE="10" CMD ["python", "-m", "blockscout_mcp_server"] diff --git a/SPEC.md b/SPEC.md index 5545de5..b5e587e 100644 --- a/SPEC.md +++ b/SPEC.md @@ -190,33 +190,35 @@ sequenceDiagram **Specific Optimizations:** - **a) Address Object Simplification:** - Many Blockscout API endpoints return addresses as complex JSON objects containing hash, name, contract flags, public tags, and other metadata. To conserve LLM context, the server systematically simplifies these objects into single address strings (e.g., `"0x123..."`) before returning responses. This approach: - - **Reduces Context Consumption**: A single address string uses significantly less context than a full address object with multiple fields - - **Encourages Compositional Tool Use**: When detailed address information is needed, the AI is guided to use dedicated tools like `get_address_info` - - **Maintains Essential Functionality**: The core address hash is preserved, which is sufficient for most blockchain operations - - **b) Opaque Cursor Strategy for Pagination:** - For handling large, paginated datasets, the server uses an **opaque cursor** strategy that avoids exposing multiple, complex pagination parameters (e.g., `page`, `offset`, `items_count`) in tool signatures and responses. This approach provides several key benefits: - - **Context Conservation**: A single cursor string consumes significantly less LLM context than a list of individual parameters. - - **Improved Robustness**: It treats pagination as an atomic unit, preventing the AI from incorrectly constructing or omitting parameters for the next request. - - **Simplified Tool Signatures**: Tool functions only need one optional `cursor: str` argument for pagination, keeping their schemas clean. + **a) Address Object Simplification:** + Many Blockscout API endpoints return addresses as complex JSON objects containing hash, name, contract flags, public tags, and other metadata. To conserve LLM context, the server systematically simplifies these objects into single address strings (e.g., `"0x123..."`) before returning responses. This approach: + + - **Reduces Context Consumption**: A single address string uses significantly less context than a full address object with multiple fields + - **Encourages Compositional Tool Use**: When detailed address information is needed, the AI is guided to use dedicated tools like `get_address_info` + - **Maintains Essential Functionality**: The core address hash is preserved, which is sufficient for most blockchain operations + + **b) Opaque Cursor Strategy for Pagination:** + For handling large, paginated datasets, the server uses an **opaque cursor** strategy that avoids exposing multiple, complex pagination parameters (e.g., `page`, `offset`, `items_count`) in tool signatures and responses. This approach provides several key benefits: + + - **Context Conservation**: A single cursor string consumes significantly less LLM context than a list of individual parameters. + - **Improved Robustness**: It treats pagination as an atomic unit, preventing the AI from incorrectly constructing or omitting parameters for the next request. + - **Simplified Tool Signatures**: Tool functions only need one optional `cursor: str` argument for pagination, keeping their schemas clean. **Mechanism:** When the Blockscout API returns a `next_page_params` dictionary, the server serializes this dictionary into a compact JSON string, which is then Base64URL-encoded. This creates a single, opaque, and URL-safe string that serves as the cursor for the next page. - **Example:** + **Example:** - - **Blockscout API `next_page_params`:** + - **Blockscout API `next_page_params`:** ```json { "block_number": 18999999, "index": 42, "items_count": 50 } ``` - - **Generated Opaque Cursor:** + - **Generated Opaque Cursor:** `eyJibG9ja19udW1iZXIiOjE4OTk5OTk5LCJpbmRleCI6NDIsIml0ZW1zX2NvdW50Ijo1MH0` - - **Final Tool Response (JSON):** + - **Final Tool Response (JSON):** ```json { @@ -243,13 +245,37 @@ sequenceDiagram This strategy combines the network efficiency of fetching larger data chunks from the backend with the context efficiency of providing smaller, digestible responses to the AI. + **c) Response Slicing and Context-Aware Pagination:** + + To prevent overwhelming the LLM with long lists of items (e.g., token holdings, transaction logs), the server implements a response slicing strategy. This conserves context while ensuring all data remains accessible through robust pagination. + + **Basic Slicing Mechanism:** + + - The server fetches a full page of data from the Blockscout API (typically 50 items) but returns only a smaller, configurable slice to the client (e.g., 10 items). If the original response contained more items than the slice size, pagination is initiated. + - **Cursor Generation**: Instead of using the `next_page_params` directly from the Blockscout API (which would skip most of the fetched items), the server generates a new pagination cursor based on the **last item of the returned slice**. This ensures the next request starts exactly where the previous one left off, providing seamless continuity. + - **Configuration**: The size of the slice returned to the client is configurable via environment variables (e.g., `BLOCKSCOUT_*_PAGE_SIZE`), allowing for fine-tuning of context usage. + + **Advanced Multi-Page Fetching with Filtering:** + For tools that apply significant filtering (e.g., `get_transactions_by_address` which excludes token transfers), the server implements a sophisticated multi-page fetching strategy to handle cases where filtering removes most items from each API page: + + - **Smart Pagination Logic**: The server fetches up to 10 consecutive full-size pages from the Blockscout API, filtering and accumulating items until it has enough for a meaningful client response. + - **Sparse Data Detection**: If after fetching 10 pages the last page contained no filtered items and the accumulated results are still insufficient for a full client page, the data is considered "too sparse" and pagination is terminated to avoid infinite loops with minimal results. + - **Pagination Decision**: The server offers pagination to the client only when: + 1. It has accumulated more than the target page size (definitive evidence of more data), OR + 2. It reached the 10-page limit AND the last fetched page contained items AND the API indicates more pages are available (likely more data) + - **Efficiency Balance**: This approach balances network efficiency (fetching larger chunks) with context efficiency (returning smaller slices) while handling the complex reality of heavily filtered blockchain data. + + This strategy combines the network efficiency of fetching larger data chunks from the backend with the context efficiency of providing smaller, digestible responses to the AI. + **d) Automatic Pagination Instructions for LLM Guidance:** - To address the common issue of LLMs ignoring structured pagination data, the server implements a multi-layered approach to ensure LLMs actually use pagination when available: - - **Enhanced General Rules**: Server instructions include explicit pagination handling rules that LLMs receive upfront - - **Automatic Instruction Generation**: When a tool response includes pagination, the server automatically appends motivational instructions to the `instructions` field (e.g., "⚠️ MORE DATA AVAILABLE: Use pagination.next_call to get the next page.") - - **Tool Description Enhancement**: All paginated tools include prominent **"SUPPORTS PAGINATION"** notices in their docstrings - This balanced approach provides both human-readable motivation and machine-readable execution details, significantly improving the likelihood that LLMs will fetch complete datasets for comprehensive analysis. + To address the common issue of LLMs ignoring structured pagination data, the server implements a multi-layered approach to ensure LLMs actually use pagination when available: + + - **Enhanced General Rules**: Server instructions include explicit pagination handling rules that LLMs receive upfront + - **Automatic Instruction Generation**: When a tool response includes pagination, the server automatically appends motivational instructions to the `instructions` field (e.g., "⚠️ MORE DATA AVAILABLE: Use pagination.next_call to get the next page.") + - **Tool Description Enhancement**: All paginated tools include prominent **"SUPPORTS PAGINATION"** notices in their docstrings + + This balanced approach provides both human-readable motivation and machine-readable execution details, significantly improving the likelihood that LLMs will fetch complete datasets for comprehensive analysis. **e) Log Data Field Truncation** diff --git a/blockscout_mcp_server/config.py b/blockscout_mcp_server/config.py index 7cde7e7..b57ac49 100644 --- a/blockscout_mcp_server/config.py +++ b/blockscout_mcp_server/config.py @@ -22,6 +22,8 @@ class ServerConfig(BaseSettings): progress_interval_seconds: float = 15.0 # Default interval for periodic progress updates nft_page_size: int = 10 + logs_page_size: int = 10 + advanced_filters_page_size: int = 10 config = ServerConfig() diff --git a/blockscout_mcp_server/tools/address_tools.py b/blockscout_mcp_server/tools/address_tools.py index ea0e28e..7f8b347 100644 --- a/blockscout_mcp_server/tools/address_tools.py +++ b/blockscout_mcp_server/tools/address_tools.py @@ -20,7 +20,9 @@ _process_and_truncate_log_items, apply_cursor_to_params, build_tool_response, + create_items_pagination, encode_cursor, + extract_log_cursor_params, get_blockscout_base_url, make_blockscout_request, make_metadata_request, @@ -313,7 +315,7 @@ async def get_address_logs( original_items, was_truncated = _process_and_truncate_log_items(response_data.get("items", [])) - log_items: list[AddressLogItem] = [] + log_items_dicts: list[dict] = [] # To preserve the LLM context, only specific fields are added to the response for item in original_items: curated_item = { @@ -327,7 +329,7 @@ async def get_address_logs( if item.get("data_truncated"): curated_item["data_truncated"] = True - log_items.append(AddressLogItem(**curated_item)) + log_items_dicts.append(curated_item) data_description = [ "Items Structure:", @@ -361,26 +363,18 @@ async def get_address_logs( f'`curl "{base_url}/api/v2/transactions/{{THE_TRANSACTION_HASH}}/logs"`', ] - # Since there could be more than one page of logs for the same address, - # the pagination information is extracted from API response and added explicitly - # to the tool response - pagination = None - next_page_params = response_data.get("next_page_params") - if next_page_params: - next_cursor = encode_cursor(next_page_params) - pagination = PaginationInfo( - next_call=NextCallInfo( - tool_name="get_address_logs", - params={ - "chain_id": chain_id, - "address": address, - "cursor": next_cursor, - }, - ) - ) + sliced_items, pagination = create_items_pagination( + items=log_items_dicts, + page_size=config.logs_page_size, + tool_name="get_address_logs", + next_call_base_params={"chain_id": chain_id, "address": address}, + cursor_extractor=extract_log_cursor_params, + ) + + sliced_log_items = [AddressLogItem(**item) for item in sliced_items] return build_tool_response( - data=log_items, + data=sliced_log_items, data_description=data_description, notes=notes, pagination=pagination, diff --git a/blockscout_mcp_server/tools/common.py b/blockscout_mcp_server/tools/common.py index e4a5a00..438d55d 100644 --- a/blockscout_mcp_server/tools/common.py +++ b/blockscout_mcp_server/tools/common.py @@ -13,7 +13,7 @@ INPUT_DATA_TRUNCATION_LIMIT, LOG_DATA_TRUNCATION_LIMIT, ) -from blockscout_mcp_server.models import PaginationInfo, ToolResponse +from blockscout_mcp_server.models import NextCallInfo, PaginationInfo, ToolResponse class ChainNotFoundError(ValueError): @@ -483,3 +483,74 @@ def apply_cursor_to_params(cursor: str | None, params: dict) -> None: raise ValueError( "Invalid or expired pagination cursor. Please make a new request without the cursor to start over." ) + + +def create_items_pagination( + *, + items: list[dict], + page_size: int, + tool_name: str, + next_call_base_params: dict, + cursor_extractor: Callable[[dict], dict], + force_pagination: bool = False, +) -> tuple[list[dict], PaginationInfo | None]: + """ + Slice items list and generate pagination info if needed. + + Args: + force_pagination: If True, creates pagination even when items <= page_size, + using the last item for cursor generation. Useful when the caller + knows there are more pages available despite having few items. + """ + if len(items) <= page_size and not force_pagination: + return items, None + + # Determine pagination behavior + if len(items) > page_size: + # Normal case: slice items and use item at page_size - 1 for cursor + sliced_items = items[:page_size] + last_item_for_cursor = items[page_size - 1] + else: + # Force pagination case: use all items and last item for cursor + sliced_items = items + last_item_for_cursor = items[-1] if items else None + + # Only create pagination if we have an item to generate cursor from + if not last_item_for_cursor: + return sliced_items, None + + next_page_params = cursor_extractor(last_item_for_cursor) + next_cursor = encode_cursor(next_page_params) + + final_params = next_call_base_params.copy() + final_params["cursor"] = next_cursor + + pagination = PaginationInfo( + next_call=NextCallInfo( + tool_name=tool_name, + params=final_params, + ) + ) + + return sliced_items, pagination + + +def extract_log_cursor_params(item: dict) -> dict: + """Return cursor parameters extracted from a log item.""" + + return { + "block_number": item.get("block_number"), + "index": item.get("index"), + } + + +def extract_advanced_filters_cursor_params(item: dict) -> dict: + """Return cursor parameters extracted from an advanced-filters item.""" + + return { + "block_number": item.get("block_number"), + "transaction_index": item.get("transaction_index"), + "internal_transaction_index": item.get("internal_transaction_index"), + "token_transfer_batch_index": item.get("token_transfer_batch_index"), + "token_transfer_index": item.get("token_transfer_index"), + } diff --git a/blockscout_mcp_server/tools/transaction_tools.py b/blockscout_mcp_server/tools/transaction_tools.py index 8abc8f3..1bc0f46 100644 --- a/blockscout_mcp_server/tools/transaction_tools.py +++ b/blockscout_mcp_server/tools/transaction_tools.py @@ -7,8 +7,6 @@ from blockscout_mcp_server.constants import INPUT_DATA_TRUNCATION_LIMIT from blockscout_mcp_server.models import ( AdvancedFilterItem, - NextCallInfo, - PaginationInfo, ToolResponse, TransactionInfoData, TransactionLogItem, @@ -19,7 +17,9 @@ _recursively_truncate_and_flag_long_strings, apply_cursor_to_params, build_tool_response, - encode_cursor, + create_items_pagination, + extract_advanced_filters_cursor_params, + extract_log_cursor_params, get_blockscout_base_url, make_blockscout_request, make_request_with_periodic_progress, @@ -119,6 +119,94 @@ def _transform_transaction_info(data: dict) -> dict: return transformed_data +async def _fetch_filtered_transactions_with_smart_pagination( + base_url: str, + api_path: str, + initial_params: dict, + target_page_size: int, + ctx: Context, + max_pages_to_fetch: int = 10, # Prevent infinite loops + progress_start_step: float = 2.0, + total_steps: float = 12.0, +) -> tuple[list[dict], bool]: + """ + Fetch and accumulate filtered transaction items across multiple pages until we have enough items. + + This function handles the complex case where filtering removes most items from each page, + requiring us to fetch multiple pages to get enough filtered results. + + The key insight: we accumulate items until we have target_page_size + 1 items (so create_items_pagination + can detect if pagination is needed) OR until no more API pages are available. + + Returns: + tuple of (filtered_items, has_more_pages_available) + - filtered_items: list of raw API data, not transformed + - has_more_pages_available: True if there are likely more pages with data available + """ + accumulated_items = [] + current_params = initial_params.copy() + pages_fetched = 0 + last_page_had_items = False + api_has_more_pages = False + + while pages_fetched < max_pages_to_fetch: + current_step = progress_start_step + pages_fetched + + # Fetch current page using periodic progress reporting to provide ability + # to track progress the page fetch + response_data = await make_request_with_periodic_progress( + ctx=ctx, + request_function=make_blockscout_request, + request_args={"base_url": base_url, "api_path": api_path, "params": current_params}, + total_duration_hint=config.bs_timeout, + progress_interval_seconds=config.progress_interval_seconds, + in_progress_message_template=( + f"Fetching page {pages_fetched + 1}, accumulated {len(accumulated_items)} items... " + f"({{elapsed_seconds:.0f}}s / {{total_hint:.0f}}s hint)" + ), + tool_overall_total_steps=total_steps, + current_step_number=current_step, + current_step_message_prefix=f"Fetching page {pages_fetched + 1}", + ) + + original_items = response_data.get("items", []) + next_page_params = response_data.get("next_page_params") + pages_fetched += 1 + + # Filter items from current page + filtered_items = [item for item in original_items if item.get("type") not in EXCLUDED_TX_TYPES] + + # Track if this page had items and if API indicates more pages + last_page_had_items = len(filtered_items) > 0 + api_has_more_pages = next_page_params is not None + + # Add to accumulated items + accumulated_items.extend(filtered_items) + + # Check if we have enough items for pagination decision + # We need target_page_size + 1 so create_items_pagination can detect if pagination is needed + if len(accumulated_items) > target_page_size: + # We have more than a page, so there are definitely more items available + break + + if not next_page_params: + # No more pages available, return whatever we have + break + + # Prepare for next page + current_params.update(next_page_params) + + # Determine if there are more pages available: + # 1. If we have > target_page_size items, there are definitely more pages + # 2. If we hit the page limit but the last page had items AND the API says there are more pages, + # then there are likely more pages with data + has_more_pages = len(accumulated_items) > target_page_size or ( + pages_fetched >= max_pages_to_fetch and last_page_had_items and api_has_more_pages + ) + + return accumulated_items, has_more_pages + + async def get_transactions_by_address( chain_id: Annotated[str, Field(description="The ID of the blockchain")], address: Annotated[str, Field(description="Address which either sender or receiver of the transaction")], @@ -158,7 +246,9 @@ async def get_transactions_by_address( apply_cursor_to_params(cursor, query_params) - tool_overall_total_steps = 2.0 + # Calculate total steps: + # 1 (URL resolution) + 10 (max iterations in _fetch_filtered_transactions_with_smart_pagination) + 1 (finalization) + tool_overall_total_steps = 12.0 # Report start of operation await report_and_log_progress( @@ -170,7 +260,7 @@ async def get_transactions_by_address( base_url = await get_blockscout_base_url(chain_id) - # Report progress after resolving Blockscout URL + # Report progress after resolving Blockscout URL (step 1 complete) await report_and_log_progress( ctx, progress=1.0, @@ -178,61 +268,56 @@ async def get_transactions_by_address( message="Resolved Blockscout instance URL. Now fetching transactions...", ) - # Use the periodic progress wrapper for the potentially long-running API call - response_data = await make_request_with_periodic_progress( + # Use smart pagination that handles filtering across multiple pages (steps 2-11) + # internally, it uses make_request_with_periodic_progress to report progress for each page fetch + filtered_items, has_more_pages = await _fetch_filtered_transactions_with_smart_pagination( + base_url=base_url, + api_path=api_path, + initial_params=query_params, + target_page_size=config.advanced_filters_page_size, ctx=ctx, - request_function=make_blockscout_request, - request_args={ - "base_url": base_url, - "api_path": api_path, - "params": query_params, - }, - total_duration_hint=config.bs_timeout, # Use configured timeout - progress_interval_seconds=config.progress_interval_seconds, # Use configured interval - in_progress_message_template="Query in progress... ({elapsed_seconds:.0f}s / {total_hint:.0f}s hint)", - tool_overall_total_steps=tool_overall_total_steps, - current_step_number=2.0, # This is the 2nd step of the tool - current_step_message_prefix="Fetching transactions", + progress_start_step=2.0, + total_steps=tool_overall_total_steps, ) - # The wrapper make_request_with_periodic_progress handles the final progress report for this step. - # So, no explicit ctx.report_progress(progress=2.0, ...) is needed here. - - original_items = response_data.get("items", []) - - filtered_items = [item for item in original_items if item.get("type") not in EXCLUDED_TX_TYPES] + # Report completion after fetching all needed pages (step 12) + await report_and_log_progress( + ctx, + progress=tool_overall_total_steps, + total=tool_overall_total_steps, + message="Successfully fetched transaction data.", + ) + # Transform filtered items (separate responsibility from filtering/pagination) fields_to_remove = [ "total", "token", "token_transfer_batch_index", "token_transfer_index", ] - transformed_items = [_transform_advanced_filter_item(item, fields_to_remove) for item in filtered_items] - # All the fields returned by the API except the ones in `fields_to_remove` are added to the response - result_data = [AdvancedFilterItem.model_validate(item) for item in transformed_items] - - pagination = None - next_page_params = response_data.get("next_page_params") - if next_page_params: - next_cursor = encode_cursor(next_page_params) - pagination = PaginationInfo( - next_call=NextCallInfo( - tool_name="get_transactions_by_address", - params={ - "chain_id": chain_id, - "address": address, - "age_from": age_from, - "age_to": age_to, - "methods": methods, - "cursor": next_cursor, - }, - ) - ) + # Use create_items_pagination to handle slicing and pagination logic + # Force pagination if we know there are more pages available despite having few items + final_items, pagination = create_items_pagination( + items=transformed_items, + page_size=config.advanced_filters_page_size, + tool_name="get_transactions_by_address", + next_call_base_params={ + "chain_id": chain_id, + "address": address, + "age_from": age_from, + "age_to": age_to, + "methods": methods, + }, + cursor_extractor=extract_advanced_filters_cursor_params, + force_pagination=has_more_pages and len(transformed_items) <= config.advanced_filters_page_size, + ) + + # Convert to AdvancedFilterItem objects + validated_items = [AdvancedFilterItem.model_validate(item) for item in final_items] - return build_tool_response(data=result_data, pagination=pagination) + return build_tool_response(data=validated_items, pagination=pagination) async def get_token_transfers_by_address( @@ -331,28 +416,23 @@ async def get_token_transfers_by_address( transformed_items = [_transform_advanced_filter_item(item, fields_to_remove) for item in original_items] + sliced_items, pagination = create_items_pagination( + items=transformed_items, + page_size=config.advanced_filters_page_size, + tool_name="get_token_transfers_by_address", + next_call_base_params={ + "chain_id": chain_id, + "address": address, + "age_from": age_from, + "age_to": age_to, + "token": token, + }, + cursor_extractor=extract_advanced_filters_cursor_params, + ) # All the fields returned by the API except the ones in `fields_to_remove` are added to the response - result_data = [AdvancedFilterItem.model_validate(item) for item in transformed_items] - - pagination = None - next_page_params = response_data.get("next_page_params") - if next_page_params: - next_cursor = encode_cursor(next_page_params) - pagination = PaginationInfo( - next_call=NextCallInfo( - tool_name="get_token_transfers_by_address", - params={ - "chain_id": chain_id, - "address": address, - "age_from": age_from, - "age_to": age_to, - "token": token, - "cursor": next_cursor, - }, - ) - ) + sliced_items = [AdvancedFilterItem.model_validate(item) for item in sliced_items] - return build_tool_response(data=result_data, pagination=pagination) + return build_tool_response(data=sliced_items, pagination=pagination) async def transaction_summary( @@ -476,6 +556,7 @@ async def get_transaction_logs( Get comprehensive transaction logs. Unlike standard eth_getLogs, this tool returns enriched logs, primarily focusing on decoded event parameters with their types and values (if event decoding is applicable). Essential for analyzing smart contract events, tracking token transfers, monitoring DeFi protocol interactions, debugging event emissions, and understanding complex multi-contract transaction flows. + **SUPPORTS PAGINATION**: If response includes 'pagination' field, use the provided next_call to get additional pages. """ # noqa: E501 api_path = f"/api/v2/transactions/{transaction_hash}/logs" params = {} @@ -501,13 +582,14 @@ async def get_transaction_logs( original_items, was_truncated = _process_and_truncate_log_items(response_data.get("items", [])) + log_items_dicts: list[dict] = [] # To preserve the LLM context, only specific fields are added to the response - log_items: list[TransactionLogItem] = [] for item in original_items: + address_value = ( + item.get("address", {}).get("hash") if isinstance(item.get("address"), dict) else item.get("address") + ) curated_item = { - "address": item.get("address", {}).get("hash") - if isinstance(item.get("address"), dict) - else item.get("address"), + "address": address_value, "block_number": item.get("block_number"), "topics": item.get("topics"), "data": item.get("data"), @@ -516,8 +598,7 @@ async def get_transaction_logs( } if item.get("data_truncated"): curated_item["data_truncated"] = True - - log_items.append(TransactionLogItem(**curated_item)) + log_items_dicts.append(curated_item) data_description = [ "Items Structure:", @@ -552,23 +633,15 @@ async def get_transaction_logs( "You would then need to parse the JSON response and find the specific log by its index.", ] - # Since there could be more than one page of logs for the same transaction, - # the pagination information is extracted from API response and added explicitly - # to the tool response - pagination = None - next_page_params = response_data.get("next_page_params") - if next_page_params: - next_cursor = encode_cursor(next_page_params) - pagination = PaginationInfo( - next_call=NextCallInfo( - tool_name="get_transaction_logs", - params={ - "chain_id": chain_id, - "transaction_hash": transaction_hash, - "cursor": next_cursor, - }, - ) - ) + sliced_items, pagination = create_items_pagination( + items=log_items_dicts, + page_size=config.logs_page_size, + tool_name="get_transaction_logs", + next_call_base_params={"chain_id": chain_id, "transaction_hash": transaction_hash}, + cursor_extractor=extract_log_cursor_params, + ) + + log_items = [TransactionLogItem(**item) for item in sliced_items] await report_and_log_progress(ctx, progress=2.0, total=2.0, message="Successfully fetched transaction logs.") diff --git a/tests/integration/test_address_tools_integration.py b/tests/integration/test_address_tools_integration.py index de79cd4..e920e6a 100644 --- a/tests/integration/test_address_tools_integration.py +++ b/tests/integration/test_address_tools_integration.py @@ -55,7 +55,7 @@ async def test_get_address_logs_integration(mock_ctx): assert isinstance(result, ToolResponse) assert result.pagination is not None assert isinstance(result.data, list) - assert len(result.data) > 0 + assert 0 < len(result.data) <= 10 first_log = result.data[0] assert isinstance(first_log, AddressLogItem) diff --git a/tests/integration/test_transaction_tools_integration.py b/tests/integration/test_transaction_tools_integration.py index 18d68b8..955af25 100644 --- a/tests/integration/test_transaction_tools_integration.py +++ b/tests/integration/test_transaction_tools_integration.py @@ -58,7 +58,7 @@ async def test_get_transaction_logs_integration(mock_ctx): # 2. Verify the basic structure assert isinstance(result.data, list) - assert len(result.data) > 0 + assert 0 < len(result.data) <= 10 # 3. Validate the schema of the first transformed log item. first_log = result.data[0] @@ -242,6 +242,10 @@ async def test_get_transactions_by_address_integration(mock_ctx): items = result.data assert isinstance(items, list) + assert len(items) <= 10 + if len(items) == 10: + assert result.pagination is not None, "Pagination info should be present when a full page is returned." + if not items: pytest.skip("No non-token transactions found for the given address and time range to verify.") @@ -276,6 +280,10 @@ async def test_get_token_transfers_by_address_integration(mock_ctx): items = result.data assert isinstance(items, list) + assert len(items) <= 10 + if len(items) == 10: + assert result.pagination is not None, "Pagination info should be present when a full page is returned." + if not items: pytest.skip("No token transfers found for the given address and time range.") @@ -359,7 +367,7 @@ async def test_get_transaction_logs_paginated_search_for_truncation(mock_ctx): """ tx_hash = "0xa519e3af3f07190727f490c599baf3e65ee335883d6f420b433f7b83f62cb64d" chain_id = "1" - MAX_PAGES_TO_CHECK = 5 + MAX_PAGES_TO_CHECK = 20 cursor = None found_truncated_log = False diff --git a/tests/tools/test_address_logs.py b/tests/tools/test_address_logs.py index d8d4e5c..a8e60be 100644 --- a/tests/tools/test_address_logs.py +++ b/tests/tools/test_address_logs.py @@ -3,7 +3,13 @@ import httpx import pytest -from blockscout_mcp_server.models import AddressLogItem, ToolResponse +from blockscout_mcp_server.config import config +from blockscout_mcp_server.models import ( + AddressLogItem, + NextCallInfo, + PaginationInfo, + ToolResponse, +) from blockscout_mcp_server.tools.address_tools import get_address_logs from blockscout_mcp_server.tools.common import encode_cursor @@ -98,16 +104,24 @@ async def test_get_address_logs_with_pagination(mock_ctx): new_callable=AsyncMock, ) as mock_request, patch("blockscout_mcp_server.tools.address_tools._process_and_truncate_log_items") as mock_process_logs, - patch("blockscout_mcp_server.tools.address_tools.encode_cursor") as mock_encode_cursor, + patch("blockscout_mcp_server.tools.address_tools.create_items_pagination") as mock_create_pagination, ): mock_get_url.return_value = mock_base_url mock_request.return_value = mock_api_response mock_process_logs.return_value = (mock_api_response["items"], False) - mock_encode_cursor.return_value = fake_cursor + mock_create_pagination.return_value = ( + mock_api_response["items"], + PaginationInfo( + next_call=NextCallInfo( + tool_name="get_address_logs", + params={"chain_id": chain_id, "address": address, "cursor": fake_cursor}, + ) + ), + ) result = await get_address_logs(chain_id=chain_id, address=address, ctx=mock_ctx) - mock_encode_cursor.assert_called_once_with(mock_api_response["next_page_params"]) + mock_create_pagination.assert_called_once() assert isinstance(result, ToolResponse) assert isinstance(result.data[0], AddressLogItem) assert result.pagination is not None @@ -123,6 +137,38 @@ async def test_get_address_logs_with_pagination(mock_ctx): assert mock_ctx.info.call_count == 3 +@pytest.mark.asyncio +async def test_get_address_logs_custom_page_size(mock_ctx): + chain_id = "1" + address = "0x123" + mock_base_url = "https://eth.blockscout.com" + + mock_api_response = {"items": [{"block_number": i, "index": i} for i in range(10)]} + + with ( + patch( + "blockscout_mcp_server.tools.address_tools.get_blockscout_base_url", + new_callable=AsyncMock, + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.address_tools.make_blockscout_request", + new_callable=AsyncMock, + ) as mock_request, + patch("blockscout_mcp_server.tools.address_tools._process_and_truncate_log_items") as mock_process_logs, + patch("blockscout_mcp_server.tools.address_tools.create_items_pagination") as mock_create_pagination, + patch.object(config, "logs_page_size", 5), + ): + mock_get_url.return_value = mock_base_url + mock_request.return_value = mock_api_response + mock_process_logs.return_value = (mock_api_response["items"], False) + mock_create_pagination.return_value = (mock_api_response["items"][:5], None) + + await get_address_logs(chain_id=chain_id, address=address, ctx=mock_ctx) + + mock_create_pagination.assert_called_once() + assert mock_create_pagination.call_args.kwargs["page_size"] == 5 + + @pytest.mark.asyncio async def test_get_address_logs_with_optional_params(mock_ctx): chain_id = "1" diff --git a/tests/tools/test_common.py b/tests/tools/test_common.py index 64b24e6..e4ee214 100644 --- a/tests/tools/test_common.py +++ b/tests/tools/test_common.py @@ -14,6 +14,7 @@ _recursively_truncate_and_flag_long_strings, apply_cursor_to_params, build_tool_response, + create_items_pagination, decode_cursor, encode_cursor, ) @@ -329,3 +330,219 @@ def test_apply_cursor_to_params_invalid_cursor_raises_value_error(): mock_decode.side_effect = InvalidCursorError with pytest.raises(ValueError, match="Invalid or expired pagination cursor"): apply_cursor_to_params("invalid", params) + + +def test_create_items_pagination_with_more_items(): + """Verify the helper slices the list and creates a pagination object.""" + items = [{"index": i} for i in range(20)] + page_size = 10 + + sliced, pagination = create_items_pagination( + items=items, + page_size=page_size, + tool_name="test_tool", + next_call_base_params={"chain_id": "1"}, + cursor_extractor=lambda item: {"index": item["index"]}, + ) + + assert len(sliced) == page_size + assert sliced[0]["index"] == 0 + assert sliced[-1]["index"] == page_size - 1 + assert pagination is not None + assert pagination.next_call.tool_name == "test_tool" + decoded_cursor = decode_cursor(pagination.next_call.params["cursor"]) + assert decoded_cursor == {"index": page_size - 1} + + +def test_create_items_pagination_with_fewer_items(): + """Verify the helper does nothing when items are below the page size.""" + items = [{"index": i} for i in range(5)] + page_size = 10 + + sliced, pagination = create_items_pagination( + items=items, + page_size=page_size, + tool_name="test_tool", + next_call_base_params={"chain_id": "1"}, + cursor_extractor=lambda item: {"index": item["index"]}, + ) + + assert sliced == items + assert pagination is None + + +def test_create_items_pagination_force_pagination_with_fewer_items(): + """Verify force_pagination=True creates pagination even when items are below page size.""" + items = [{"index": i} for i in range(5)] + page_size = 10 + + sliced, pagination = create_items_pagination( + items=items, + page_size=page_size, + tool_name="test_tool", + next_call_base_params={"chain_id": "1"}, + cursor_extractor=lambda item: {"index": item["index"]}, + force_pagination=True, + ) + + assert sliced == items # All items should be returned + assert pagination is not None # Pagination should be created + assert pagination.next_call.tool_name == "test_tool" + decoded_cursor = decode_cursor(pagination.next_call.params["cursor"]) + assert decoded_cursor == {"index": 4} # Last item index + + +def test_create_items_pagination_force_pagination_with_empty_items(): + """Verify force_pagination=True handles empty items list gracefully.""" + items = [] + page_size = 10 + + sliced, pagination = create_items_pagination( + items=items, + page_size=page_size, + tool_name="test_tool", + next_call_base_params={"chain_id": "1"}, + cursor_extractor=lambda item: {"index": item["index"]}, + force_pagination=True, + ) + + assert sliced == [] + assert pagination is None # No pagination when no items + + +def test_create_items_pagination_force_pagination_with_more_items(): + """Verify force_pagination=True behaves normally when items exceed page size.""" + items = [{"index": i} for i in range(20)] + page_size = 10 + + sliced, pagination = create_items_pagination( + items=items, + page_size=page_size, + tool_name="test_tool", + next_call_base_params={"chain_id": "1"}, + cursor_extractor=lambda item: {"index": item["index"]}, + force_pagination=True, + ) + + assert len(sliced) == page_size + assert sliced[0]["index"] == 0 + assert sliced[-1]["index"] == page_size - 1 + assert pagination is not None + assert pagination.next_call.tool_name == "test_tool" + decoded_cursor = decode_cursor(pagination.next_call.params["cursor"]) + assert decoded_cursor == {"index": page_size - 1} # Same behavior as normal case + + +def test_create_items_pagination_force_pagination_cursor_generation(): + """Verify force_pagination=True uses the last item for cursor generation.""" + items = [{"block_number": 100, "index": 1}, {"block_number": 200, "index": 2}] + page_size = 10 + + sliced, pagination = create_items_pagination( + items=items, + page_size=page_size, + tool_name="test_tool", + next_call_base_params={"chain_id": "1"}, + cursor_extractor=lambda item: {"block_number": item["block_number"], "index": item["index"]}, + force_pagination=True, + ) + + assert sliced == items + assert pagination is not None + decoded_cursor = decode_cursor(pagination.next_call.params["cursor"]) + assert decoded_cursor == {"block_number": 200, "index": 2} # Last item's data + + +def test_create_items_pagination_normal_cursor_generation(): + """Verify normal pagination uses the item at page_size-1 for cursor generation.""" + items = [{"block_number": 100 + i, "index": i} for i in range(15)] + page_size = 10 + + sliced, pagination = create_items_pagination( + items=items, + page_size=page_size, + tool_name="test_tool", + next_call_base_params={"chain_id": "1"}, + cursor_extractor=lambda item: {"block_number": item["block_number"], "index": item["index"]}, + ) + + assert len(sliced) == page_size + assert pagination is not None + decoded_cursor = decode_cursor(pagination.next_call.params["cursor"]) + # Should use items[page_size - 1] = items[9] = {"block_number": 109, "index": 9} + assert decoded_cursor == {"block_number": 109, "index": 9} + + +def test_create_items_pagination_preserves_base_params(): + """Verify pagination preserves base parameters and adds cursor.""" + items = [{"index": i} for i in range(5)] + page_size = 10 + base_params = {"chain_id": "1", "address": "0x123", "other": "value"} + + sliced, pagination = create_items_pagination( + items=items, + page_size=page_size, + tool_name="test_tool", + next_call_base_params=base_params, + cursor_extractor=lambda item: {"index": item["index"]}, + force_pagination=True, + ) + + assert pagination is not None + params = pagination.next_call.params + assert params["chain_id"] == "1" + assert params["address"] == "0x123" + assert params["other"] == "value" + assert "cursor" in params + decoded_cursor = decode_cursor(params["cursor"]) + assert decoded_cursor == {"index": 4} + + +def test_extract_log_cursor_params(): + """Verify the log cursor extractor works correctly.""" + from blockscout_mcp_server.tools.common import extract_log_cursor_params + + complete_item = {"block_number": 123, "index": 7, "data": "0x"} + assert extract_log_cursor_params(complete_item) == {"block_number": 123, "index": 7} + + missing_fields_item = {"data": "0xdead"} + assert extract_log_cursor_params(missing_fields_item) == {"block_number": None, "index": None} + + assert extract_log_cursor_params({}) == {"block_number": None, "index": None} + + +def test_extract_advanced_filters_cursor_params(): + """Verify the advanced filters cursor extractor works correctly.""" + from blockscout_mcp_server.tools.common import ( + extract_advanced_filters_cursor_params, + ) + + item = { + "block_number": 100, + "transaction_index": 5, + "internal_transaction_index": 2, + "token_transfer_batch_index": None, + "token_transfer_index": 1, + "other_field": "ignore", + } + + expected_params = { + "block_number": 100, + "transaction_index": 5, + "internal_transaction_index": 2, + "token_transfer_batch_index": None, + "token_transfer_index": 1, + } + + assert extract_advanced_filters_cursor_params(item) == expected_params + + item_missing = {"block_number": 200} + expected_missing = { + "block_number": 200, + "transaction_index": None, + "internal_transaction_index": None, + "token_transfer_batch_index": None, + "token_transfer_index": None, + } + + assert extract_advanced_filters_cursor_params(item_missing) == expected_missing diff --git a/tests/tools/test_transaction_tools.py b/tests/tools/test_transaction_tools.py index d15662b..32a08f7 100644 --- a/tests/tools/test_transaction_tools.py +++ b/tests/tools/test_transaction_tools.py @@ -4,8 +4,11 @@ import httpx import pytest +from blockscout_mcp_server.config import config from blockscout_mcp_server.models import ( AdvancedFilterItem, + NextCallInfo, + PaginationInfo, ToolResponse, TransactionSummaryData, ) @@ -18,10 +21,10 @@ @pytest.mark.asyncio -async def test_get_transactions_by_address_calls_wrapper_correctly(mock_ctx): +async def test_get_transactions_by_address_calls_smart_pagination_correctly(mock_ctx): """ - Verify get_transactions_by_address calls the periodic progress wrapper with correct arguments. - This tests the integration without testing the wrapper's internal logic. + Verify get_transactions_by_address calls the smart pagination function with correct arguments. + This tests the integration without testing the pagination function's internal logic. """ # ARRANGE chain_id = "1" @@ -30,19 +33,21 @@ async def test_get_transactions_by_address_calls_wrapper_correctly(mock_ctx): age_to = "2023-01-02T00:00:00.00Z" methods = "0x304e6ade" mock_base_url = "https://eth.blockscout.com" - mock_api_response = {"items": [], "next_page_params": None} + mock_filtered_items = [] + mock_has_more_pages = False - # We patch the wrapper and the base URL getter + # We patch the smart pagination function and the base URL getter with ( patch( "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock ) as mock_get_url, patch( - "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", new_callable=AsyncMock - ) as mock_wrapper, + "blockscout_mcp_server.tools.transaction_tools._fetch_filtered_transactions_with_smart_pagination", + new_callable=AsyncMock, + ) as mock_smart_pagination, ): mock_get_url.return_value = mock_base_url - mock_wrapper.return_value = mock_api_response + mock_smart_pagination.return_value = (mock_filtered_items, mock_has_more_pages) # ACT result = await get_transactions_by_address( @@ -60,44 +65,31 @@ async def test_get_transactions_by_address_calls_wrapper_correctly(mock_ctx): assert result.data == [] mock_get_url.assert_called_once_with(chain_id) - # Assert that the wrapper was called once - mock_wrapper.assert_called_once() + # Assert that the smart pagination function was called once + mock_smart_pagination.assert_called_once() - # Assert that the wrapper was called with the correct arguments - # This is the most important part of this test. - call_args, call_kwargs = mock_wrapper.call_args + # Assert that the smart pagination function was called with the correct arguments + call_args, call_kwargs = mock_smart_pagination.call_args - # Verify the wrapper was called with correct parameters + # Verify the smart pagination function was called with correct parameters + assert call_kwargs["base_url"] == mock_base_url + assert call_kwargs["api_path"] == "/api/v2/advanced-filters" assert call_kwargs["ctx"] == mock_ctx + assert call_kwargs["progress_start_step"] == 2.0 + assert call_kwargs["total_steps"] == 12.0 - # Import the actual function to compare - from blockscout_mcp_server.tools.common import make_blockscout_request - - assert call_kwargs["request_function"] == make_blockscout_request - - # Check the request_args that should be passed to make_blockscout_request - expected_request_args = { - "base_url": mock_base_url, - "api_path": "/api/v2/advanced-filters", - "params": { - "to_address_hashes_to_include": address, - "from_address_hashes_to_include": address, - "age_from": age_from, - "age_to": age_to, - "methods": methods, - }, + # Check the initial_params that should be passed to the smart pagination function + expected_initial_params = { + "to_address_hashes_to_include": address, + "from_address_hashes_to_include": address, + "age_from": age_from, + "age_to": age_to, + "methods": methods, } - assert call_kwargs["request_args"] == expected_request_args + assert call_kwargs["initial_params"] == expected_initial_params - # Verify other wrapper configuration - assert call_kwargs["tool_overall_total_steps"] == 2.0 - assert call_kwargs["current_step_number"] == 2.0 - assert call_kwargs["current_step_message_prefix"] == "Fetching transactions" - assert "total_duration_hint" in call_kwargs - assert "progress_interval_seconds" in call_kwargs - - # Verify progress was reported correctly before the wrapper call - assert mock_ctx.report_progress.call_count == 2 # Start + after URL resolution + # Verify progress was reported correctly before the smart pagination call + assert mock_ctx.report_progress.call_count == 3 # Start + after URL resolution + completion @pytest.mark.asyncio @@ -109,18 +101,20 @@ async def test_get_transactions_by_address_minimal_params(mock_ctx): chain_id = "1" address = "0x123abc" mock_base_url = "https://eth.blockscout.com" - mock_api_response = {"items": [{"hash": "0xabc123"}]} + mock_filtered_items = [{"hash": "0xabc123"}] + mock_has_more_pages = False with ( patch( "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock ) as mock_get_url, patch( - "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", new_callable=AsyncMock - ) as mock_wrapper, + "blockscout_mcp_server.tools.transaction_tools._fetch_filtered_transactions_with_smart_pagination", + new_callable=AsyncMock, + ) as mock_smart_pagination, ): mock_get_url.return_value = mock_base_url - mock_wrapper.return_value = mock_api_response + mock_smart_pagination.return_value = (mock_filtered_items, mock_has_more_pages) # ACT - Only provide required parameters result = await get_transactions_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) @@ -132,16 +126,16 @@ async def test_get_transactions_by_address_minimal_params(mock_ctx): assert isinstance(result.data[0], AdvancedFilterItem) assert result.data[0].model_dump(by_alias=True)["hash"] == "0xabc123" mock_get_url.assert_called_once_with(chain_id) - mock_wrapper.assert_called_once() + mock_smart_pagination.assert_called_once() - # Check that the request_args only include the required parameters - call_args, call_kwargs = mock_wrapper.call_args + # Check that the initial_params only include the required parameters + call_args, call_kwargs = mock_smart_pagination.call_args expected_params = { "to_address_hashes_to_include": address, "from_address_hashes_to_include": address, # No optional parameters should be included } - assert call_kwargs["request_args"]["params"] == expected_params + assert call_kwargs["initial_params"] == expected_params @pytest.mark.asyncio @@ -151,31 +145,24 @@ async def test_get_transactions_by_address_transforms_response(mock_ctx): address = "0x123" mock_base_url = "https://eth.blockscout.com" - mock_api_response = { - "items": [ - { - "type": "call", - "from": {"hash": "0xfrom_hash_1"}, - "to": {"hash": "0xto_hash_1"}, - "value": "kept1", - "token": "should be removed", - "total": "should be removed", - }, - { - "type": "ERC-20", - "from": {"hash": "0xfrom_hash_2"}, - "to": {"hash": "0xto_hash_2"}, - "token": {"symbol": "USDC"}, - }, - { - "type": "creation", - "from": {"hash": "0xfrom_hash_3"}, - "to": None, - "value": "kept2", - }, - ], - "next_page_params": None, - } + # Mock the filtered items returned by smart pagination (ERC-20 transactions already filtered out) + mock_filtered_items = [ + { + "type": "call", + "from": {"hash": "0xfrom_hash_1"}, + "to": {"hash": "0xto_hash_1"}, + "value": "kept1", + "token": "should be removed", + "total": "should be removed", + }, + { + "type": "creation", + "from": {"hash": "0xfrom_hash_3"}, + "to": None, + "value": "kept2", + }, + ] + mock_has_more_pages = False expected_items = [ { @@ -195,11 +182,12 @@ async def test_get_transactions_by_address_transforms_response(mock_ctx): "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock ) as mock_get_url, patch( - "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", new_callable=AsyncMock - ) as mock_wrapper, + "blockscout_mcp_server.tools.transaction_tools._fetch_filtered_transactions_with_smart_pagination", + new_callable=AsyncMock, + ) as mock_smart_pagination, ): mock_get_url.return_value = mock_base_url - mock_wrapper.return_value = mock_api_response + mock_smart_pagination.return_value = (mock_filtered_items, mock_has_more_pages) result = await get_transactions_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) @@ -224,9 +212,8 @@ async def test_get_transactions_by_address_with_pagination(mock_ctx): address = "0x123abc" mock_base_url = "https://eth.blockscout.com" - mock_api_response = {"items": [], "next_page_params": {"page": 2}} - - fake_cursor = "ENCODED_CURSOR" + mock_filtered_items = [] + mock_has_more_pages = True # This should trigger force_pagination with ( patch( @@ -234,21 +221,61 @@ async def test_get_transactions_by_address_with_pagination(mock_ctx): new_callable=AsyncMock, ) as mock_get_url, patch( - "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", + "blockscout_mcp_server.tools.transaction_tools._fetch_filtered_transactions_with_smart_pagination", new_callable=AsyncMock, - ) as mock_wrapper, - patch("blockscout_mcp_server.tools.transaction_tools.encode_cursor") as mock_encode_cursor, + ) as mock_smart_pagination, + patch("blockscout_mcp_server.tools.transaction_tools.create_items_pagination") as mock_create_pagination, ): mock_get_url.return_value = mock_base_url - mock_wrapper.return_value = mock_api_response - mock_encode_cursor.return_value = fake_cursor + mock_smart_pagination.return_value = (mock_filtered_items, mock_has_more_pages) + mock_create_pagination.return_value = ( + [], + PaginationInfo( + next_call=NextCallInfo( + tool_name="get_transactions_by_address", + params={"cursor": "CUR"}, + ) + ), + ) result = await get_transactions_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) - mock_encode_cursor.assert_called_once_with(mock_api_response["next_page_params"]) - assert result.pagination is not None - assert result.pagination.next_call.tool_name == "get_transactions_by_address" - assert result.pagination.next_call.params["cursor"] == fake_cursor + mock_create_pagination.assert_called_once() + # Verify that force_pagination was set to True due to has_more_pages + assert mock_create_pagination.call_args.kwargs["force_pagination"] is True + assert isinstance(result.pagination, PaginationInfo) + + +@pytest.mark.asyncio +async def test_get_transactions_by_address_custom_page_size(mock_ctx): + chain_id = "1" + address = "0x123" + mock_base_url = "https://eth.blockscout.com" + + items = [{"block_number": i} for i in range(10)] + mock_filtered_items = items + mock_has_more_pages = False + + with ( + patch( + "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", + new_callable=AsyncMock, + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.transaction_tools._fetch_filtered_transactions_with_smart_pagination", + new_callable=AsyncMock, + ) as mock_smart_pagination, + patch("blockscout_mcp_server.tools.transaction_tools.create_items_pagination") as mock_create_pagination, + patch.object(config, "advanced_filters_page_size", 5), + ): + mock_get_url.return_value = mock_base_url + mock_smart_pagination.return_value = (mock_filtered_items, mock_has_more_pages) + mock_create_pagination.return_value = (items[:5], None) + + await get_transactions_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) + + mock_create_pagination.assert_called_once() + assert mock_create_pagination.call_args.kwargs["page_size"] == 5 @pytest.mark.asyncio @@ -259,21 +286,24 @@ async def test_get_transactions_by_address_with_cursor_param(mock_ctx): decoded = {"page": 2} mock_base_url = "https://eth.blockscout.com" + mock_filtered_items = [] + mock_has_more_pages = False + with ( patch( "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock, ) as mock_get_url, patch( - "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", + "blockscout_mcp_server.tools.transaction_tools._fetch_filtered_transactions_with_smart_pagination", new_callable=AsyncMock, - ) as mock_wrapper, + ) as mock_smart_pagination, patch( "blockscout_mcp_server.tools.transaction_tools.apply_cursor_to_params", ) as mock_apply_cursor, ): mock_get_url.return_value = mock_base_url - mock_wrapper.return_value = {"items": []} + mock_smart_pagination.return_value = (mock_filtered_items, mock_has_more_pages) mock_apply_cursor.side_effect = lambda cur, params: params.update(decoded) await get_transactions_by_address( @@ -284,8 +314,8 @@ async def test_get_transactions_by_address_with_cursor_param(mock_ctx): ) mock_apply_cursor.assert_called_once_with(cursor, ANY) - call_args, call_kwargs = mock_wrapper.call_args - params = call_kwargs["request_args"]["params"] + call_args, call_kwargs = mock_smart_pagination.call_args + params = call_kwargs["initial_params"] expected_params = { "to_address_hashes_to_include": address, "from_address_hashes_to_include": address, @@ -463,8 +493,7 @@ async def test_get_token_transfers_by_address_with_pagination(mock_ctx): address = "0x123abc" mock_base_url = "https://eth.blockscout.com" - mock_api_response = {"items": [], "next_page_params": {"page": 2}} - fake_cursor = "ENCODED_CURSOR" + mock_api_response = {"items": []} with ( patch( @@ -475,18 +504,55 @@ async def test_get_token_transfers_by_address_with_pagination(mock_ctx): "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", new_callable=AsyncMock, ) as mock_wrapper, - patch("blockscout_mcp_server.tools.transaction_tools.encode_cursor") as mock_encode_cursor, + patch("blockscout_mcp_server.tools.transaction_tools.create_items_pagination") as mock_create_pagination, ): mock_get_url.return_value = mock_base_url mock_wrapper.return_value = mock_api_response - mock_encode_cursor.return_value = fake_cursor + mock_create_pagination.return_value = ( + [], + PaginationInfo( + next_call=NextCallInfo( + tool_name="get_token_transfers_by_address", + params={"cursor": "CUR"}, + ) + ), + ) result = await get_token_transfers_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) - mock_encode_cursor.assert_called_once_with(mock_api_response["next_page_params"]) - assert result.pagination is not None - assert result.pagination.next_call.tool_name == "get_token_transfers_by_address" - assert result.pagination.next_call.params["cursor"] == fake_cursor + mock_create_pagination.assert_called_once() + assert isinstance(result.pagination, PaginationInfo) + + +@pytest.mark.asyncio +async def test_get_token_transfers_by_address_custom_page_size(mock_ctx): + chain_id = "1" + address = "0x123" + mock_base_url = "https://eth.blockscout.com" + + items = [{"block_number": i} for i in range(10)] + mock_api_response = {"items": items} + + with ( + patch( + "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", + new_callable=AsyncMock, + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", + new_callable=AsyncMock, + ) as mock_wrapper, + patch("blockscout_mcp_server.tools.transaction_tools.create_items_pagination") as mock_create_pagination, + patch.object(config, "advanced_filters_page_size", 5), + ): + mock_get_url.return_value = mock_base_url + mock_wrapper.return_value = mock_api_response + mock_create_pagination.return_value = (items[:5], None) + + await get_token_transfers_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) + + mock_create_pagination.assert_called_once() + assert mock_create_pagination.call_args.kwargs["page_size"] == 5 @pytest.mark.asyncio @@ -534,17 +600,17 @@ async def test_get_token_transfers_by_address_with_cursor_param(mock_ctx): @pytest.mark.asyncio -async def test_get_transactions_by_address_wrapper_error(mock_ctx): +async def test_get_transactions_by_address_smart_pagination_error(mock_ctx): """ - Verify that errors from the periodic progress wrapper are properly propagated. + Verify that errors from the smart pagination function are properly propagated. """ # ARRANGE chain_id = "1" address = "0x123abc" mock_base_url = "https://eth.blockscout.com" - # Simulate an error from the wrapper (which could be an API error or timeout) - wrapper_error = httpx.HTTPStatusError( + # Simulate an error from the smart pagination function + smart_pagination_error = httpx.HTTPStatusError( "Service Unavailable", request=MagicMock(), response=MagicMock(status_code=503) ) @@ -553,25 +619,230 @@ async def test_get_transactions_by_address_wrapper_error(mock_ctx): "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock ) as mock_get_url, patch( - "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", new_callable=AsyncMock - ) as mock_wrapper, + "blockscout_mcp_server.tools.transaction_tools._fetch_filtered_transactions_with_smart_pagination", + new_callable=AsyncMock, + ) as mock_smart_pagination, ): mock_get_url.return_value = mock_base_url - mock_wrapper.side_effect = wrapper_error + mock_smart_pagination.side_effect = smart_pagination_error # ACT & ASSERT - with pytest.raises(httpx.HTTPStatusError): - await get_transactions_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) + with pytest.raises(httpx.HTTPStatusError) as exc_info: + await get_transactions_by_address( + chain_id=chain_id, + address=address, + ctx=mock_ctx, + ) - # Verify the chain lookup succeeded - mock_get_url.assert_called_once_with(chain_id) + assert exc_info.value == smart_pagination_error + mock_smart_pagination.assert_called_once() - # Verify the wrapper was called and failed - mock_wrapper.assert_called_once() - # Progress should have been reported twice (start + after URL resolution) before the wrapper error - assert mock_ctx.report_progress.call_count == 2 - assert mock_ctx.info.call_count == 2 +@pytest.mark.asyncio +async def test_get_transactions_by_address_sparse_data_scenario(mock_ctx): + """ + Test handling of scenarios where most transactions are filtered out, requiring multiple page fetches. + + This test simulates a realistic sparse data scenario where the API returns many pages + of mostly filtered transactions (ERC-20, ERC-721, etc.) but only a few valid transactions + per page, requiring the smart pagination to accumulate results across multiple pages. + """ + # ARRANGE + chain_id = "1" + address = "0x123abc" + mock_base_url = "https://eth.blockscout.com" + + # Create a scenario where we have many filtered transactions but few valid ones + # This simulates a real-world scenario where an address has many token transfers + # but few direct contract calls + + # Each page has 20 transactions but only 1-2 are valid (not filtered) + page_responses = [] + valid_transactions = [] + + for page_num in range(1, 6): # 5 pages total + page_items = [] + + # Add 1-2 valid transactions per page + for i in range(1, 3): # 2 valid transactions per page + valid_tx = { + "type": "call", + "hash": f"0x{page_num}_{i}", + "block_number": 1000 - (page_num * 10) - i, + "from": "0xfrom", + "to": "0xto", + "value": "1000000000000000000", + } + page_items.append(valid_tx) + valid_transactions.append(valid_tx) + + # Add many filtered transactions to simulate sparse data + for i in range(18): # 18 filtered transactions per page + filtered_tx = { + "type": "ERC-20", # Will be filtered out + "hash": f"0xfiltered_{page_num}_{i}", + "block_number": 1000 - (page_num * 10) - i - 10, + "from": "0xfrom", + "to": "0xto", + "token": {"symbol": "USDC"}, + "total": "1000000", + } + page_items.append(filtered_tx) + + page_responses.append( + {"items": page_items, "next_page_params": {"page": page_num + 1} if page_num < 5 else None} + ) + + # Mock the smart pagination function to return accumulated results + # In real implementation, this would be handled by _fetch_filtered_transactions_with_smart_pagination + accumulated_valid_transactions = valid_transactions[:10] # First 10 valid transactions + has_more_pages = True + + with ( + patch( + "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.transaction_tools._fetch_filtered_transactions_with_smart_pagination", + new_callable=AsyncMock, + ) as mock_smart_pagination, + patch.object(config, "advanced_filters_page_size", 10), + ): + mock_get_url.return_value = mock_base_url + mock_smart_pagination.return_value = (accumulated_valid_transactions, has_more_pages) + + # ACT + result = await get_transactions_by_address( + chain_id=chain_id, + address=address, + ctx=mock_ctx, + ) + + # ASSERT + # Should have called smart pagination function + mock_smart_pagination.assert_called_once() + + # Verify the call arguments to smart pagination + call_args = mock_smart_pagination.call_args + assert call_args[1]["base_url"] == mock_base_url + assert call_args[1]["api_path"] == "/api/v2/advanced-filters" + assert call_args[1]["target_page_size"] == 10 + assert call_args[1]["ctx"] == mock_ctx + + # Should return exactly 10 transactions (page size) + assert len(result.data) == 10 + + # Should have pagination since has_more_pages is True + assert result.pagination is not None + assert result.pagination.next_call.tool_name == "get_transactions_by_address" + + # All returned transactions should be valid (not filtered) + assert all(item.type == "call" for item in result.data) + + # Verify transactions are properly transformed + assert result.data[0].hash == "0x1_1" # First valid transaction + assert result.data[1].hash == "0x1_2" # Second valid transaction + assert result.data[2].hash == "0x2_1" # Third valid transaction + + # Verify no filtered transactions made it through + assert all(hasattr(item, "token") is False for item in result.data) # token field should be removed + + +@pytest.mark.asyncio +async def test_get_transactions_by_address_multi_page_progress_reporting(mock_ctx): + """ + Test that progress is correctly reported during multi-page fetching operations. + + This test verifies that the enhanced progress reporting system correctly tracks + and reports progress through all phases of the multi-page smart pagination: + 1. Initial operation start (step 0) + 2. URL resolution (step 1) + 3. Multi-page fetching (steps 2-11, handled by smart pagination) + 4. Final completion (step 12) + """ + # ARRANGE + chain_id = "1" + address = "0x123abc" + mock_base_url = "https://eth.blockscout.com" + + # Mock data that would be returned by smart pagination + mock_transactions = [ + {"type": "call", "hash": f"0x{i}", "block_number": 1000 - i, "from": "0xfrom", "to": "0xto"} for i in range(5) + ] + has_more_pages = False + + with ( + patch( + "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.transaction_tools._fetch_filtered_transactions_with_smart_pagination", + new_callable=AsyncMock, + ) as mock_smart_pagination, + patch( + "blockscout_mcp_server.tools.transaction_tools.report_and_log_progress", new_callable=AsyncMock + ) as mock_progress, + patch.object(config, "advanced_filters_page_size", 10), + ): + mock_get_url.return_value = mock_base_url + mock_smart_pagination.return_value = (mock_transactions, has_more_pages) + + # ACT + result = await get_transactions_by_address( + chain_id=chain_id, + address=address, + ctx=mock_ctx, + ) + + # ASSERT + # Verify the result is correct + assert len(result.data) == 5 + assert result.pagination is None # No pagination since has_more_pages is False + + # Verify progress reporting was called correctly + progress_calls = mock_progress.call_args_list + + # Should have exactly 3 progress reports from get_transactions_by_address: + # 1. Initial start (progress=0.0, total=12.0) + # 2. After URL resolution (progress=1.0, total=12.0) + # 3. Final completion (progress=12.0, total=12.0) + assert len(progress_calls) == 3 + + # Each call structure: call(ctx, progress=X, total=Y, message=Z) + # args are in call_args_list[i][0] tuple (just ctx) + # kwargs are in call_args_list[i][1] dict (progress, total, message) + + # Verify initial progress report (step 0) + initial_call_args, initial_call_kwargs = progress_calls[0] + assert initial_call_args[0] == mock_ctx # ctx + assert initial_call_kwargs["progress"] == 0.0 + assert initial_call_kwargs["total"] == 12.0 + assert "Starting to fetch transactions" in initial_call_kwargs["message"] + assert address in initial_call_kwargs["message"] + assert chain_id in initial_call_kwargs["message"] + + # Verify URL resolution progress (step 1) + url_resolution_call_args, url_resolution_call_kwargs = progress_calls[1] + assert url_resolution_call_args[0] == mock_ctx # ctx + assert url_resolution_call_kwargs["progress"] == 1.0 + assert url_resolution_call_kwargs["total"] == 12.0 + assert "Resolved Blockscout instance URL" in url_resolution_call_kwargs["message"] + + # Verify final completion progress (step 12) + completion_call_args, completion_call_kwargs = progress_calls[2] + assert completion_call_args[0] == mock_ctx # ctx + assert completion_call_kwargs["progress"] == 12.0 + assert completion_call_kwargs["total"] == 12.0 + assert "Successfully fetched transaction data" in completion_call_kwargs["message"] + + # Verify smart pagination was called with correct progress parameters + smart_pagination_call_args = mock_smart_pagination.call_args[1] + assert smart_pagination_call_args["progress_start_step"] == 2.0 + assert smart_pagination_call_args["total_steps"] == 12.0 + assert smart_pagination_call_args["ctx"] == mock_ctx + + # Note: The smart pagination function internally handles progress reporting + # for steps 2-11 using make_request_with_periodic_progress for each page fetch @pytest.mark.asyncio diff --git a/tests/tools/test_transaction_tools_2.py b/tests/tools/test_transaction_tools_2.py index b247161..2d396e1 100644 --- a/tests/tools/test_transaction_tools_2.py +++ b/tests/tools/test_transaction_tools_2.py @@ -383,7 +383,7 @@ async def test_get_transaction_logs_success(mock_ctx): """ # ARRANGE chain_id = "1" - hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + tx_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" mock_base_url = "https://eth.blockscout.com" mock_api_response = { @@ -393,7 +393,7 @@ async def test_get_transaction_logs_success(mock_ctx): "topics": ["0xtopic1...", "0xtopic2..."], "data": "0xdata123...", "log_index": "0", - "transaction_hash": hash, + "transaction_hash": tx_hash, "block_number": 19000000, "block_hash": "0xblockhash1...", "decoded": {"name": "EventA"}, @@ -404,7 +404,7 @@ async def test_get_transaction_logs_success(mock_ctx): "topics": ["0xtopic3..."], "data": "0xdata456...", "log_index": "1", - "transaction_hash": hash, + "transaction_hash": tx_hash, "block_number": 19000000, "block_hash": "0xblockhash2...", "decoded": {"name": "EventB"}, @@ -447,12 +447,12 @@ async def test_get_transaction_logs_success(mock_ctx): mock_request.return_value = mock_api_response # ACT - result = await get_transaction_logs(chain_id=chain_id, transaction_hash=hash, ctx=mock_ctx) + result = await get_transaction_logs(chain_id=chain_id, transaction_hash=tx_hash, ctx=mock_ctx) # ASSERT mock_get_url.assert_called_once_with(chain_id) mock_request.assert_called_once_with( - base_url=mock_base_url, api_path=f"/api/v2/transactions/{hash}/logs", params={} + base_url=mock_base_url, api_path=f"/api/v2/transactions/{tx_hash}/logs", params={} ) assert isinstance(result, ToolResponse) diff --git a/tests/tools/test_transaction_tools_3.py b/tests/tools/test_transaction_tools_3.py index 71902bf..6782fcd 100644 --- a/tests/tools/test_transaction_tools_3.py +++ b/tests/tools/test_transaction_tools_3.py @@ -4,7 +4,13 @@ import httpx import pytest -from blockscout_mcp_server.models import ToolResponse, TransactionLogItem +from blockscout_mcp_server.config import config +from blockscout_mcp_server.models import ( + NextCallInfo, + PaginationInfo, + ToolResponse, + TransactionLogItem, +) from blockscout_mcp_server.tools.common import encode_cursor from blockscout_mcp_server.tools.transaction_tools import get_transaction_logs @@ -16,7 +22,7 @@ async def test_get_transaction_logs_empty_logs(mock_ctx): """ # ARRANGE chain_id = "1" - hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + tx_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" mock_base_url = "https://eth.blockscout.com" mock_api_response = {"items": []} @@ -37,12 +43,12 @@ async def test_get_transaction_logs_empty_logs(mock_ctx): mock_process_logs.return_value = (mock_api_response["items"], False) # ACT - result = await get_transaction_logs(chain_id=chain_id, transaction_hash=hash, ctx=mock_ctx) + result = await get_transaction_logs(chain_id=chain_id, transaction_hash=tx_hash, ctx=mock_ctx) # ASSERT mock_get_url.assert_called_once_with(chain_id) mock_request.assert_called_once_with( - base_url=mock_base_url, api_path=f"/api/v2/transactions/{hash}/logs", params={} + base_url=mock_base_url, api_path=f"/api/v2/transactions/{tx_hash}/logs", params={} ) mock_process_logs.assert_called_once_with(mock_api_response["items"]) assert isinstance(result, ToolResponse) @@ -60,7 +66,7 @@ async def test_get_transaction_logs_api_error(mock_ctx): """ # ARRANGE chain_id = "1" - hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + tx_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" mock_base_url = "https://eth.blockscout.com" api_error = httpx.HTTPStatusError("Internal Server Error", request=MagicMock(), response=MagicMock(status_code=500)) @@ -78,11 +84,11 @@ async def test_get_transaction_logs_api_error(mock_ctx): # ACT & ASSERT with pytest.raises(httpx.HTTPStatusError): - await get_transaction_logs(chain_id=chain_id, transaction_hash=hash, ctx=mock_ctx) + await get_transaction_logs(chain_id=chain_id, transaction_hash=tx_hash, ctx=mock_ctx) mock_get_url.assert_called_once_with(chain_id) mock_request.assert_called_once_with( - base_url=mock_base_url, api_path=f"/api/v2/transactions/{hash}/logs", params={} + base_url=mock_base_url, api_path=f"/api/v2/transactions/{tx_hash}/logs", params={} ) @@ -93,7 +99,7 @@ async def test_get_transaction_logs_complex_logs(mock_ctx): """ # ARRANGE chain_id = "1" - hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + tx_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" mock_base_url = "https://eth.blockscout.com" mock_api_response = { @@ -107,7 +113,7 @@ async def test_get_transaction_logs_complex_logs(mock_ctx): ], "data": "0x0000000000000000000000000000000000000000000000000de0b6b3a7640000", "log_index": "42", - "transaction_hash": hash, + "transaction_hash": tx_hash, "block_number": 19000000, "block_hash": "0xblock123...", "transaction_index": 10, @@ -145,12 +151,12 @@ async def test_get_transaction_logs_complex_logs(mock_ctx): mock_request.return_value = mock_api_response # ACT - result = await get_transaction_logs(chain_id=chain_id, transaction_hash=hash, ctx=mock_ctx) + result = await get_transaction_logs(chain_id=chain_id, transaction_hash=tx_hash, ctx=mock_ctx) # ASSERT mock_get_url.assert_called_once_with(chain_id) mock_request.assert_called_once_with( - base_url=mock_base_url, api_path=f"/api/v2/transactions/{hash}/logs", params={} + base_url=mock_base_url, api_path=f"/api/v2/transactions/{tx_hash}/logs", params={} ) assert isinstance(result, ToolResponse) assert result.pagination is None @@ -171,7 +177,7 @@ async def test_get_transaction_logs_complex_logs(mock_ctx): async def test_get_transaction_logs_with_pagination(mock_ctx): """Verify pagination hint is included when next_page_params present.""" chain_id = "1" - hash = "0xabc123" + tx_hash = "0xabc123" mock_base_url = "https://eth.blockscout.com" mock_api_response = { @@ -181,7 +187,7 @@ async def test_get_transaction_logs_with_pagination(mock_ctx): "topics": [], "data": "0x", "log_index": "0", - "transaction_hash": hash, + "transaction_hash": tx_hash, "block_number": 1, "decoded": None, "index": 0, @@ -213,16 +219,34 @@ async def test_get_transaction_logs_with_pagination(mock_ctx): new_callable=AsyncMock, ) as mock_request, patch("blockscout_mcp_server.tools.transaction_tools._process_and_truncate_log_items") as mock_process_logs, - patch("blockscout_mcp_server.tools.transaction_tools.encode_cursor") as mock_encode_cursor, + patch("blockscout_mcp_server.tools.transaction_tools.create_items_pagination") as mock_create_pagination, ): mock_get_url.return_value = mock_base_url mock_request.return_value = mock_api_response mock_process_logs.return_value = (mock_api_response["items"], False) - mock_encode_cursor.return_value = fake_cursor + curated_dicts = [ + { + "address": "0xcontract1", + "block_number": 1, + "topics": [], + "data": "0x", + "decoded": None, + "index": 0, + } + ] + mock_create_pagination.return_value = ( + curated_dicts, + PaginationInfo( + next_call=NextCallInfo( + tool_name="get_transaction_logs", + params={"chain_id": chain_id, "transaction_hash": tx_hash, "cursor": fake_cursor}, + ) + ), + ) - result = await get_transaction_logs(chain_id=chain_id, transaction_hash=hash, ctx=mock_ctx) + result = await get_transaction_logs(chain_id=chain_id, transaction_hash=tx_hash, ctx=mock_ctx) - mock_encode_cursor.assert_called_once_with(mock_api_response["next_page_params"]) + mock_create_pagination.assert_called_once() assert isinstance(result, ToolResponse) actual = result.data[0] expected = expected_log_items[0] @@ -237,7 +261,7 @@ async def test_get_transaction_logs_with_pagination(mock_ctx): mock_get_url.assert_called_once_with(chain_id) mock_request.assert_called_once_with( base_url=mock_base_url, - api_path=f"/api/v2/transactions/{hash}/logs", + api_path=f"/api/v2/transactions/{tx_hash}/logs", params={}, ) mock_process_logs.assert_called_once_with(mock_api_response["items"]) @@ -249,7 +273,7 @@ async def test_get_transaction_logs_with_pagination(mock_ctx): async def test_get_transaction_logs_with_cursor(mock_ctx): """Verify provided cursor is decoded and used in request.""" chain_id = "1" - hash = "0xabc123" + tx_hash = "0xabc123" mock_base_url = "https://eth.blockscout.com" decoded_params = {"block_number": 42, "index": 1, "items_count": 25} @@ -272,12 +296,12 @@ async def test_get_transaction_logs_with_cursor(mock_ctx): mock_request.return_value = mock_api_response mock_process_logs.return_value = (mock_api_response["items"], False) - result = await get_transaction_logs(chain_id=chain_id, transaction_hash=hash, cursor=cursor, ctx=mock_ctx) + result = await get_transaction_logs(chain_id=chain_id, transaction_hash=tx_hash, cursor=cursor, ctx=mock_ctx) mock_get_url.assert_called_once_with(chain_id) mock_request.assert_called_once_with( base_url=mock_base_url, - api_path=f"/api/v2/transactions/{hash}/logs", + api_path=f"/api/v2/transactions/{tx_hash}/logs", params=decoded_params, ) mock_process_logs.assert_called_once_with(mock_api_response["items"]) @@ -309,7 +333,7 @@ async def test_get_transaction_logs_with_truncation_note(mock_ctx): """Verify the truncation note is added when the helper indicates truncation.""" # ARRANGE chain_id = "1" - hash = "0xabc123" + tx_hash = "0xabc123" mock_base_url = "https://eth.blockscout.com" truncated_item = {"data": "0xlong...", "data_truncated": True} mock_api_response = {"items": [truncated_item]} @@ -328,7 +352,7 @@ async def test_get_transaction_logs_with_truncation_note(mock_ctx): mock_process_logs.return_value = ([truncated_item], True) # ACT - result = await get_transaction_logs(chain_id=chain_id, transaction_hash=hash, ctx=mock_ctx) + result = await get_transaction_logs(chain_id=chain_id, transaction_hash=tx_hash, ctx=mock_ctx) # ASSERT expected_log_items = [ @@ -361,7 +385,7 @@ async def test_get_transaction_logs_with_truncation_note(mock_ctx): async def test_get_transaction_logs_with_decoded_truncation_note(mock_ctx): """Verify truncation note appears when decoded data is truncated.""" chain_id = "1" - hash = "0xabc123" + tx_hash = "0xabc123" mock_base_url = "https://eth.blockscout.com" truncated_item = { @@ -392,7 +416,7 @@ async def test_get_transaction_logs_with_decoded_truncation_note(mock_ctx): mock_request.return_value = mock_api_response mock_process_logs.return_value = ([truncated_item], True) - result = await get_transaction_logs(chain_id=chain_id, transaction_hash=hash, ctx=mock_ctx) + result = await get_transaction_logs(chain_id=chain_id, transaction_hash=tx_hash, ctx=mock_ctx) expected_log_items = [ TransactionLogItem( @@ -417,3 +441,35 @@ async def test_get_transaction_logs_with_decoded_truncation_note(mock_ctx): assert result.notes is not None assert "One or more log items" in result.notes[0] assert actual.model_extra.get("data_truncated") is None + + +@pytest.mark.asyncio +async def test_get_transaction_logs_custom_page_size(mock_ctx): + chain_id = "1" + tx_hash = "0xabc" + mock_base_url = "https://eth.blockscout.com" + + mock_api_response = {"items": [{"block_number": i, "index": i} for i in range(10)]} + + with ( + patch( + "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", + new_callable=AsyncMock, + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.transaction_tools.make_blockscout_request", + new_callable=AsyncMock, + ) as mock_request, + patch("blockscout_mcp_server.tools.transaction_tools._process_and_truncate_log_items") as mock_process_logs, + patch("blockscout_mcp_server.tools.transaction_tools.create_items_pagination") as mock_create_pagination, + patch.object(config, "logs_page_size", 5), + ): + mock_get_url.return_value = mock_base_url + mock_request.return_value = mock_api_response + mock_process_logs.return_value = (mock_api_response["items"], False) + mock_create_pagination.return_value = (mock_api_response["items"][:5], None) + + await get_transaction_logs(chain_id=chain_id, transaction_hash=tx_hash, ctx=mock_ctx) + + mock_create_pagination.assert_called_once() + assert mock_create_pagination.call_args.kwargs["page_size"] == 5 diff --git a/tests/tools/test_transaction_tools_helpers.py b/tests/tools/test_transaction_tools_helpers.py index 95cde93..0e253a0 100644 --- a/tests/tools/test_transaction_tools_helpers.py +++ b/tests/tools/test_transaction_tools_helpers.py @@ -152,3 +152,41 @@ def test_transform_item_with_missing_keys(): } assert _transform_advanced_filter_item(raw_item, fields_to_remove) == expected + + +def test_transformation_preserves_unknown_fields(): + """ + Test that transformation preserves fields not in the removal list. + """ + # ARRANGE + item_with_extra_fields = { + "type": "call", + "hash": "0x1", + "from": {"hash": "0xfrom"}, + "to": {"hash": "0xto"}, + "value": "1000", + "gas_used": "21000", + "custom_field": "should_be_kept", + "token": "should_be_removed", + "timestamp": "2024-01-01T00:00:00Z", + } + fields_to_remove = ["token"] + + # ACT + transformed = _transform_advanced_filter_item(item_with_extra_fields, fields_to_remove) + + # ASSERT + # Standard transformations applied + assert transformed["from"] == "0xfrom" + assert transformed["to"] == "0xto" + + # Specified field removed + assert "token" not in transformed + + # Other fields preserved + assert transformed["type"] == "call" + assert transformed["hash"] == "0x1" + assert transformed["value"] == "1000" + assert transformed["gas_used"] == "21000" + assert transformed["custom_field"] == "should_be_kept" + assert transformed["timestamp"] == "2024-01-01T00:00:00Z" diff --git a/tests/tools/test_transaction_tools_pagination.py b/tests/tools/test_transaction_tools_pagination.py new file mode 100644 index 0000000..e9e2a86 --- /dev/null +++ b/tests/tools/test_transaction_tools_pagination.py @@ -0,0 +1,293 @@ +""" +Tests for enhanced pagination functionality in transaction tools. + +This module tests the multi-page fetching strategy introduced in issue-130 +where the server fetches up to 10 full-size pages when filtering results. +""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from blockscout_mcp_server.config import config +from blockscout_mcp_server.tools.transaction_tools import get_transactions_by_address + + +@pytest.mark.asyncio +async def test_get_transactions_by_address_multi_page_fetching(mock_ctx): + """ + Test that get_transactions_by_address fetches multiple pages when initial results are sparse due to filtering. + + This test simulates the scenario where each page contains mostly filtered-out transactions + (ERC-20, ERC-721, etc.) but some valid transactions, requiring multiple pages to accumulate + enough results for pagination decision. + """ + # ARRANGE + chain_id = "1" + address = "0x123abc" + mock_base_url = "https://eth.blockscout.com" + + # Mock page 1: mostly filtered transactions with 2 valid ones + page1_items = [ + {"type": "call", "hash": "0x1", "block_number": 100}, + {"type": "ERC-20", "hash": "0x2", "block_number": 99}, # will be filtered out + {"type": "call", "hash": "0x3", "block_number": 98}, + {"type": "ERC-721", "hash": "0x4", "block_number": 97}, # will be filtered out + {"type": "ERC-1155", "hash": "0x5", "block_number": 96}, # will be filtered out + ] + + # Mock page 2: mostly filtered transactions with 2 valid ones + page2_items = [ + {"type": "call", "hash": "0x6", "block_number": 95}, + {"type": "ERC-20", "hash": "0x7", "block_number": 94}, # will be filtered out + {"type": "call", "hash": "0x8", "block_number": 93}, + {"type": "ERC-404", "hash": "0x9", "block_number": 92}, # will be filtered out + ] + + # Mock page 3: enough valid transactions to trigger pagination + page3_items = [ + {"type": "call", "hash": "0x10", "block_number": 91}, + {"type": "call", "hash": "0x11", "block_number": 90}, + {"type": "call", "hash": "0x12", "block_number": 89}, + {"type": "call", "hash": "0x13", "block_number": 88}, + {"type": "call", "hash": "0x14", "block_number": 87}, + {"type": "call", "hash": "0x15", "block_number": 86}, + {"type": "call", "hash": "0x16", "block_number": 85}, + {"type": "call", "hash": "0x17", "block_number": 84}, + {"type": "call", "hash": "0x18", "block_number": 83}, + {"type": "call", "hash": "0x19", "block_number": 82}, + ] + + # Expected API responses for each page + api_responses = [ + {"items": page1_items, "next_page_params": {"page": 2}}, + {"items": page2_items, "next_page_params": {"page": 3}}, + {"items": page3_items, "next_page_params": {"page": 4}}, + ] + + with ( + patch( + "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", new_callable=AsyncMock + ) as mock_request, + patch.object(config, "advanced_filters_page_size", 10), + ): + mock_get_url.return_value = mock_base_url + mock_request.side_effect = api_responses + + # ACT + result = await get_transactions_by_address( + chain_id=chain_id, + address=address, + ctx=mock_ctx, + ) + + # ASSERT + # Should have called make_request_with_periodic_progress 3 times (3 pages) + assert mock_request.call_count == 3 + + # Should have accumulated filtered transactions from all 3 pages + # Page 1: 2 valid transactions (types "call") + # Page 2: 2 valid transactions (types "call") + # Page 3: 10 valid transactions (types "call") + # Total: 14 valid transactions + assert len(result.data) == 10 # Should be sliced to page_size + + # Should have pagination since we have more than page_size valid transactions + assert result.pagination is not None + + # Verify the transactions are properly transformed and ordered + assert all(item.type == "call" for item in result.data) + assert result.data[0].hash == "0x1" # First transaction from page 1 + assert result.data[1].hash == "0x3" # Second valid transaction from page 1 + + +@pytest.mark.asyncio +async def test_get_transactions_by_address_stops_at_10_pages(mock_ctx): + """ + Test that get_transactions_by_address stops fetching at 10 pages maximum. + + This test ensures that even if there are more pages available, the function + will stop at 10 pages to prevent infinite loops or excessive API calls. + """ + # ARRANGE + chain_id = "1" + address = "0x123abc" + mock_base_url = "https://eth.blockscout.com" + + # Create 10 pages of sparse results (1 valid transaction per page) + api_responses = [] + for i in range(10): + page_items = [ + {"type": "call", "hash": f"0x{i + 1}", "block_number": 100 - i}, + {"type": "ERC-20", "hash": f"0x{i + 100}", "block_number": 99 - i}, # filtered out + {"type": "ERC-721", "hash": f"0x{i + 200}", "block_number": 98 - i}, # filtered out + ] + api_responses.append( + { + "items": page_items, + "next_page_params": {"page": i + 2}, # Always indicate more pages + } + ) + + with ( + patch( + "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", new_callable=AsyncMock + ) as mock_request, + patch.object( + config, "advanced_filters_page_size", 20 + ), # Large page size to ensure we don't hit pagination limit + ): + mock_get_url.return_value = mock_base_url + mock_request.side_effect = api_responses + + # ACT + result = await get_transactions_by_address( + chain_id=chain_id, + address=address, + ctx=mock_ctx, + ) + + # ASSERT + # Should have called make_request_with_periodic_progress exactly 10 times (max pages) + assert mock_request.call_count == 10 + + # Should have accumulated 10 valid transactions (1 per page) + assert len(result.data) == 10 + + # Should still have pagination since we stopped at max pages and had next_page_params + assert result.pagination is not None + + # Verify all transactions are valid (not filtered out) + assert all(item.type == "call" for item in result.data) + assert result.data[0].hash == "0x1" + assert result.data[9].hash == "0x10" + + +@pytest.mark.asyncio +async def test_get_transactions_by_address_single_page_sufficient(mock_ctx): + """ + Test that get_transactions_by_address works correctly when a single page has sufficient results. + + This test ensures that when the first page contains enough valid transactions + after filtering, no additional pages are fetched. + """ + # ARRANGE + chain_id = "1" + address = "0x123abc" + mock_base_url = "https://eth.blockscout.com" + + # Create a single page with enough valid transactions + page_items = [{"type": "call", "hash": f"0x{i + 1}", "block_number": 100 - i} for i in range(15)] + + # Add some filtered transactions to ensure filtering works + page_items.extend( + [ + {"type": "ERC-20", "hash": "0x100", "block_number": 85}, + {"type": "ERC-721", "hash": "0x101", "block_number": 84}, + ] + ) + + api_response = { + "items": page_items, + "next_page_params": {"page": 2}, # Indicate more pages available + } + + with ( + patch( + "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", new_callable=AsyncMock + ) as mock_request, + patch.object(config, "advanced_filters_page_size", 10), + ): + mock_get_url.return_value = mock_base_url + mock_request.return_value = api_response + + # ACT + result = await get_transactions_by_address( + chain_id=chain_id, + address=address, + ctx=mock_ctx, + ) + + # ASSERT + # Should have called make_request_with_periodic_progress only once + assert mock_request.call_count == 1 + + # Should have 10 transactions (page size limit) + assert len(result.data) == 10 + + # Should have pagination since we have more than page_size valid transactions + assert result.pagination is not None + + # Verify all returned transactions are valid (not filtered out) + assert all(item.type == "call" for item in result.data) + + +@pytest.mark.asyncio +async def test_get_transactions_by_address_no_more_pages_available(mock_ctx): + """ + Test that get_transactions_by_address correctly handles the case when no more pages are available. + + This test ensures that when the API indicates no more pages (next_page_params is None), + the function returns all available results without pagination. + """ + # ARRANGE + chain_id = "1" + address = "0x123abc" + mock_base_url = "https://eth.blockscout.com" + + # Create a single page with few valid transactions and no next page + page_items = [ + {"type": "call", "hash": "0x1", "block_number": 100}, + {"type": "call", "hash": "0x2", "block_number": 99}, + {"type": "ERC-20", "hash": "0x3", "block_number": 98}, # filtered out + {"type": "call", "hash": "0x4", "block_number": 97}, + ] + + api_response = { + "items": page_items, + "next_page_params": None, # No more pages available + } + + with ( + patch( + "blockscout_mcp_server.tools.transaction_tools.get_blockscout_base_url", new_callable=AsyncMock + ) as mock_get_url, + patch( + "blockscout_mcp_server.tools.transaction_tools.make_request_with_periodic_progress", new_callable=AsyncMock + ) as mock_request, + patch.object(config, "advanced_filters_page_size", 10), + ): + mock_get_url.return_value = mock_base_url + mock_request.return_value = api_response + + # ACT + result = await get_transactions_by_address( + chain_id=chain_id, + address=address, + ctx=mock_ctx, + ) + + # ASSERT + # Should have called make_request_with_periodic_progress only once + assert mock_request.call_count == 1 + + # Should have 3 valid transactions (filtered out 1 ERC-20) + assert len(result.data) == 3 + + # Should NOT have pagination since we have fewer than page_size and no more pages + assert result.pagination is None + + # Verify all returned transactions are valid (not filtered out) + assert all(item.type == "call" for item in result.data) + assert result.data[0].hash == "0x1" + assert result.data[1].hash == "0x2" + assert result.data[2].hash == "0x4" From 1c13bb139a9397fbb2968472ef61282f7477df99 Mon Sep 17 00:00:00 2001 From: Alexander Kolotov Date: Tue, 8 Jul 2025 04:27:23 +0000 Subject: [PATCH 4/4] `nft_tokens_by_address` refactored to use `create_items_pagination` --- blockscout_mcp_server/tools/address_tools.py | 109 ++++++++------- tests/tools/test_address_tools_2.py | 136 +++++++++++++++---- 2 files changed, 171 insertions(+), 74 deletions(-) diff --git a/blockscout_mcp_server/tools/address_tools.py b/blockscout_mcp_server/tools/address_tools.py index 7f8b347..61bbee8 100644 --- a/blockscout_mcp_server/tools/address_tools.py +++ b/blockscout_mcp_server/tools/address_tools.py @@ -167,6 +167,21 @@ async def get_tokens_by_address( return build_tool_response(data=token_holdings, pagination=pagination) +def extract_nft_cursor_params(item: dict) -> dict: + """Extract cursor parameters from an NFT collection item for pagination continuation. + + This function determines which fields from the last item should be used + as cursor parameters for the next page request. The returned dictionary + will be encoded as an opaque cursor string. + """ + token_info = item.get("token", {}) + return { + "token_contract_address_hash": token_info.get("address_hash"), + "token_type": token_info.get("type"), + "items_count": 50, + } + + async def nft_tokens_by_address( chain_id: Annotated[str, Field(description="The ID of the blockchain")], address: Annotated[str, Field(description="NFT owner address")], @@ -202,74 +217,74 @@ async def nft_tokens_by_address( await report_and_log_progress(ctx, progress=2.0, total=2.0, message="Successfully fetched NFT data.") - page_size = config.nft_page_size + # Process all items first to prepare for pagination original_items = response_data.get("items", []) + processed_items = [] - items_to_return = original_items - next_page_params = None - - if len(original_items) > page_size: - items_to_return = original_items[:page_size] - last_item_for_cursor = original_items[page_size - 1] - token_info = last_item_for_cursor.get("token", {}) - next_page_params = { - "token_contract_address_hash": token_info.get("address_hash"), - "token_type": token_info.get("type"), - "items_count": 50, - } - - nft_holdings: list[NftCollectionHolding] = [] - - for item in items_to_return: + for item in original_items: token = item.get("token", {}) - token_instances: list[NftTokenInstance] = [] + token_instances = [] for instance in item.get("token_instances", []): # To preserve the LLM context, only specific fields for NFT instances are # added to the response metadata = instance.get("metadata", {}) or {} token_instances.append( - NftTokenInstance( - id=instance.get("id", ""), - name=metadata.get("name"), - description=metadata.get("description"), - image_url=metadata.get("image_url"), - external_app_url=metadata.get("external_url"), - metadata_attributes=metadata.get("attributes"), - ) + { + "id": instance.get("id", ""), + "name": metadata.get("name"), + "description": metadata.get("description"), + "image_url": metadata.get("image_url"), + "external_app_url": metadata.get("external_url"), + "metadata_attributes": metadata.get("attributes"), + } ) # To preserve the LLM context, only specific fields for NFT collections are # added to the response - collection_info = NftCollectionInfo( - type=token.get("type", ""), - address=token.get("address_hash", ""), - name=token.get("name"), - symbol=token.get("symbol"), - holders_count=token.get("holders_count") or 0, - total_supply=token.get("total_supply") or 0, - ) + collection_info = { + "type": token.get("type", ""), + "address": token.get("address_hash", ""), + "name": token.get("name"), + "symbol": token.get("symbol"), + "holders_count": token.get("holders_count") or 0, + "total_supply": token.get("total_supply") or 0, + } + processed_item = { + "token": token, # Keep original token info for cursor extraction + "amount": item.get("amount", ""), + "token_instances": token_instances, + "collection_info": collection_info, + } + processed_items.append(processed_item) + + # Use create_items_pagination helper to handle slicing and pagination + sliced_items, pagination = create_items_pagination( + items=processed_items, + page_size=config.nft_page_size, + tool_name="nft_tokens_by_address", + next_call_base_params={ + "chain_id": chain_id, + "address": address, + }, + cursor_extractor=extract_nft_cursor_params, + force_pagination=False, + ) + + # Convert sliced items to NftCollectionHolding objects + nft_holdings: list[NftCollectionHolding] = [] + for item in sliced_items: + collection_info = NftCollectionInfo(**item["collection_info"]) + token_instances = [NftTokenInstance(**instance) for instance in item["token_instances"]] nft_holdings.append( NftCollectionHolding( collection=collection_info, - amount=item.get("amount", ""), + amount=item["amount"], token_instances=token_instances, ) ) - pagination = None - if next_page_params: - filtered_next_page_params = {k: v for k, v in next_page_params.items() if v is not None} - if filtered_next_page_params: - next_cursor = encode_cursor(filtered_next_page_params) - pagination = PaginationInfo( - next_call=NextCallInfo( - tool_name="nft_tokens_by_address", - params={"chain_id": chain_id, "address": address, "cursor": next_cursor}, - ) - ) - return build_tool_response(data=nft_holdings, pagination=pagination) diff --git a/tests/tools/test_address_tools_2.py b/tests/tools/test_address_tools_2.py index 8ec6e57..2ad3e3a 100644 --- a/tests/tools/test_address_tools_2.py +++ b/tests/tools/test_address_tools_2.py @@ -6,6 +6,7 @@ from blockscout_mcp_server.config import config from blockscout_mcp_server.models import ( + NextCallInfo, NftCollectionHolding, PaginationInfo, ToolResponse, @@ -330,6 +331,13 @@ async def test_nft_tokens_by_address_with_pagination(mock_ctx): mock_api_response = {"items": items} fake_cursor = "ENCODED_CURSOR" + mock_pagination = PaginationInfo( + next_call=NextCallInfo( + tool_name="nft_tokens_by_address", + params={"chain_id": chain_id, "address": address, "cursor": fake_cursor}, + ) + ) + with ( patch( "blockscout_mcp_server.tools.address_tools.get_blockscout_base_url", new_callable=AsyncMock @@ -337,21 +345,44 @@ async def test_nft_tokens_by_address_with_pagination(mock_ctx): patch( "blockscout_mcp_server.tools.address_tools.make_blockscout_request", new_callable=AsyncMock ) as mock_request, - patch("blockscout_mcp_server.tools.address_tools.encode_cursor") as mock_encode_cursor, + patch("blockscout_mcp_server.tools.address_tools.create_items_pagination") as mock_create_pagination, ): mock_get_url.return_value = mock_base_url mock_request.return_value = mock_api_response - mock_encode_cursor.return_value = fake_cursor + + # Create processed items format that the function expects + processed_items = [] + for item in items[:10]: + token = item.get("token", {}) + processed_item = { + "token": token, + "amount": item.get("amount", ""), + "token_instances": [], + "collection_info": { + "type": token.get("type", ""), + "address": token.get("address_hash", ""), + "name": token.get("name"), + "symbol": token.get("symbol"), + "holders_count": token.get("holders_count") or 0, + "total_supply": token.get("total_supply") or 0, + }, + } + processed_items.append(processed_item) + + # Return processed items and pagination info + mock_create_pagination.return_value = (processed_items, mock_pagination) result = await nft_tokens_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) - mock_encode_cursor.assert_called_once_with( - { - "token_contract_address_hash": items[9]["token"]["address_hash"], - "token_type": items[9]["token"]["type"], - "items_count": 50, - } - ) + # Verify create_items_pagination was called with correct parameters + mock_create_pagination.assert_called_once() + call_args = mock_create_pagination.call_args + assert call_args[1]["page_size"] == 10 # default nft_page_size + assert call_args[1]["tool_name"] == "nft_tokens_by_address" + assert call_args[1]["next_call_base_params"] == {"chain_id": chain_id, "address": address} + assert callable(call_args[1]["cursor_extractor"]) + assert call_args[1]["force_pagination"] is False + assert isinstance(result, ToolResponse) assert isinstance(result.pagination, PaginationInfo) assert result.pagination.next_call.tool_name == "nft_tokens_by_address" @@ -421,6 +452,13 @@ async def test_nft_tokens_by_address_response_sliced(mock_ctx): ] mock_api_response = {"items": items} + mock_pagination = PaginationInfo( + next_call=NextCallInfo( + tool_name="nft_tokens_by_address", + params={"chain_id": chain_id, "address": address, "cursor": "CURSOR"}, + ) + ) + with ( patch( "blockscout_mcp_server.tools.address_tools.get_blockscout_base_url", new_callable=AsyncMock @@ -428,23 +466,42 @@ async def test_nft_tokens_by_address_response_sliced(mock_ctx): patch( "blockscout_mcp_server.tools.address_tools.make_blockscout_request", new_callable=AsyncMock ) as mock_request, - patch("blockscout_mcp_server.tools.address_tools.encode_cursor") as mock_encode_cursor, + patch("blockscout_mcp_server.tools.address_tools.create_items_pagination") as mock_create_pagination, ): mock_get_url.return_value = mock_base_url mock_request.return_value = mock_api_response - mock_encode_cursor.return_value = "CURSOR" + + # Create processed items format that the function expects + processed_items = [] + for item in items[:10]: + token = item.get("token", {}) + processed_item = { + "token": token, + "amount": item.get("amount", ""), + "token_instances": [], + "collection_info": { + "type": token.get("type", ""), + "address": token.get("address_hash", ""), + "name": token.get("name"), + "symbol": token.get("symbol"), + "holders_count": token.get("holders_count") or 0, + "total_supply": token.get("total_supply") or 0, + }, + } + processed_items.append(processed_item) + + # Return processed items and pagination info + mock_create_pagination.return_value = (processed_items, mock_pagination) result = await nft_tokens_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) assert len(result.data) == 10 assert result.pagination is not None - mock_encode_cursor.assert_called_once_with( - { - "token_contract_address_hash": items[9]["token"]["address_hash"], - "token_type": items[9]["token"]["type"], - "items_count": 50, - } - ) + # Verify create_items_pagination was called with correct parameters + mock_create_pagination.assert_called_once() + call_args = mock_create_pagination.call_args + assert call_args[1]["page_size"] == 10 # default nft_page_size + assert call_args[1]["tool_name"] == "nft_tokens_by_address" @pytest.mark.asyncio @@ -463,6 +520,13 @@ async def test_nft_tokens_by_address_custom_page_size(mock_ctx): ] mock_api_response = {"items": items} + mock_pagination = PaginationInfo( + next_call=NextCallInfo( + tool_name="nft_tokens_by_address", + params={"chain_id": chain_id, "address": address, "cursor": "CURSOR"}, + ) + ) + with ( patch( "blockscout_mcp_server.tools.address_tools.get_blockscout_base_url", new_callable=AsyncMock @@ -470,21 +534,39 @@ async def test_nft_tokens_by_address_custom_page_size(mock_ctx): patch( "blockscout_mcp_server.tools.address_tools.make_blockscout_request", new_callable=AsyncMock ) as mock_request, - patch("blockscout_mcp_server.tools.address_tools.encode_cursor") as mock_encode_cursor, + patch("blockscout_mcp_server.tools.address_tools.create_items_pagination") as mock_create_pagination, patch.object(config, "nft_page_size", 5), ): mock_get_url.return_value = mock_base_url mock_request.return_value = mock_api_response - mock_encode_cursor.return_value = "CURSOR" + + # Create processed items format that the function expects + processed_items = [] + for item in items[:5]: + token = item.get("token", {}) + processed_item = { + "token": token, + "amount": item.get("amount", ""), + "token_instances": [], + "collection_info": { + "type": token.get("type", ""), + "address": token.get("address_hash", ""), + "name": token.get("name"), + "symbol": token.get("symbol"), + "holders_count": token.get("holders_count") or 0, + "total_supply": token.get("total_supply") or 0, + }, + } + processed_items.append(processed_item) + + # Return processed items and pagination info + mock_create_pagination.return_value = (processed_items, mock_pagination) result = await nft_tokens_by_address(chain_id=chain_id, address=address, ctx=mock_ctx) assert len(result.data) == 5 assert result.pagination is not None - mock_encode_cursor.assert_called_once_with( - { - "token_contract_address_hash": items[4]["token"]["address_hash"], - "token_type": items[4]["token"]["type"], - "items_count": 50, - } - ) + # Verify create_items_pagination was called with custom page size + mock_create_pagination.assert_called_once() + call_args = mock_create_pagination.call_args + assert call_args[1]["page_size"] == 5 # custom nft_page_size