Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,8 @@ BLOCKSCOUT_PROGRESS_INTERVAL_SECONDS="15.0"
# The number of items to return per page for the nft_tokens_by_address tool.
BLOCKSCOUT_NFT_PAGE_SIZE=10

# The number of log items to return per page for get_address_logs and get_transaction_logs.
BLOCKSCOUT_LOGS_PAGE_SIZE=10

BLOCKSCOUT_METADATA_URL="https://metadata.services.blockscout.com"
BLOCKSCOUT_METADATA_TIMEOUT="30.0"
1 change: 1 addition & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,6 @@ ENV BLOCKSCOUT_CHAINSCOUT_TIMEOUT="15.0"
ENV BLOCKSCOUT_CHAIN_CACHE_TTL_SECONDS="1800"
ENV BLOCKSCOUT_PROGRESS_INTERVAL_SECONDS="15.0"
ENV BLOCKSCOUT_NFT_PAGE_SIZE="10"
ENV BLOCKSCOUT_LOGS_PAGE_SIZE="10"

CMD ["python", "-m", "blockscout_mcp_server"]
1 change: 1 addition & 0 deletions blockscout_mcp_server/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ class ServerConfig(BaseSettings):
progress_interval_seconds: float = 15.0 # Default interval for periodic progress updates

nft_page_size: int = 10
logs_page_size: int = 10


config = ServerConfig()
35 changes: 14 additions & 21 deletions blockscout_mcp_server/tools/address_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,9 @@
_process_and_truncate_log_items,
apply_cursor_to_params,
build_tool_response,
create_items_pagination,
encode_cursor,
extract_log_cursor_params,
get_blockscout_base_url,
make_blockscout_request,
make_metadata_request,
Expand Down Expand Up @@ -313,8 +315,7 @@ async def get_address_logs(

original_items, was_truncated = _process_and_truncate_log_items(response_data.get("items", []))

log_items: list[AddressLogItem] = []
# To preserve the LLM context, only specific fields are added to the response
log_items_dicts: list[dict] = []
for item in original_items:
curated_item = {
"block_number": item.get("block_number"),
Expand All @@ -327,7 +328,7 @@ async def get_address_logs(
if item.get("data_truncated"):
curated_item["data_truncated"] = True

log_items.append(AddressLogItem(**curated_item))
log_items_dicts.append(curated_item)

data_description = [
"Items Structure:",
Expand Down Expand Up @@ -361,26 +362,18 @@ async def get_address_logs(
f'`curl "{base_url}/api/v2/transactions/{{THE_TRANSACTION_HASH}}/logs"`',
]

# Since there could be more than one page of logs for the same address,
# the pagination information is extracted from API response and added explicitly
# to the tool response
pagination = None
next_page_params = response_data.get("next_page_params")
if next_page_params:
next_cursor = encode_cursor(next_page_params)
pagination = PaginationInfo(
next_call=NextCallInfo(
tool_name="get_address_logs",
params={
"chain_id": chain_id,
"address": address,
"cursor": next_cursor,
},
)
)
sliced_items, pagination = create_items_pagination(
items=log_items_dicts,
page_size=config.logs_page_size,
tool_name="get_address_logs",
next_call_base_params={"chain_id": chain_id, "address": address},
cursor_extractor=extract_log_cursor_params,
)

sliced_log_items = [AddressLogItem(**item) for item in sliced_items]

return build_tool_response(
data=log_items,
data=sliced_log_items,
data_description=data_description,
notes=notes,
pagination=pagination,
Expand Down
41 changes: 40 additions & 1 deletion blockscout_mcp_server/tools/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
INPUT_DATA_TRUNCATION_LIMIT,
LOG_DATA_TRUNCATION_LIMIT,
)
from blockscout_mcp_server.models import PaginationInfo, ToolResponse
from blockscout_mcp_server.models import NextCallInfo, PaginationInfo, ToolResponse


class ChainNotFoundError(ValueError):
Expand Down Expand Up @@ -483,3 +483,42 @@ def apply_cursor_to_params(cursor: str | None, params: dict) -> None:
raise ValueError(
"Invalid or expired pagination cursor. Please make a new request without the cursor to start over."
)


def create_items_pagination(
*,
items: list[dict],
page_size: int,
tool_name: str,
next_call_base_params: dict,
cursor_extractor: Callable[[dict], dict],
) -> tuple[list[dict], PaginationInfo | None]:
"""Slice items list and generate pagination info if needed."""
if len(items) <= page_size:
return items, None

sliced_items = items[:page_size]
last_item_for_cursor = items[page_size - 1]
next_page_params = cursor_extractor(last_item_for_cursor)
next_cursor = encode_cursor(next_page_params)

final_params = next_call_base_params.copy()
final_params["cursor"] = next_cursor

pagination = PaginationInfo(
next_call=NextCallInfo(
tool_name=tool_name,
params=final_params,
)
)

return sliced_items, pagination


def extract_log_cursor_params(item: dict) -> dict:
"""Return cursor parameters extracted from a log item."""

return {
"block_number": item.get("block_number"),
"index": item.get("index"),
}
59 changes: 27 additions & 32 deletions blockscout_mcp_server/tools/transaction_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@
_recursively_truncate_and_flag_long_strings,
apply_cursor_to_params,
build_tool_response,
create_items_pagination,
encode_cursor,
extract_log_cursor_params,
get_blockscout_base_url,
make_blockscout_request,
make_request_with_periodic_progress,
Expand Down Expand Up @@ -502,22 +504,6 @@ async def get_transaction_logs(
original_items, was_truncated = _process_and_truncate_log_items(response_data.get("items", []))

# To preserve the LLM context, only specific fields are added to the response
log_items: list[TransactionLogItem] = []
for item in original_items:
curated_item = {
"address": item.get("address", {}).get("hash")
if isinstance(item.get("address"), dict)
else item.get("address"),
"block_number": item.get("block_number"),
"topics": item.get("topics"),
"data": item.get("data"),
"decoded": item.get("decoded"),
"index": item.get("index"),
}
if item.get("data_truncated"):
curated_item["data_truncated"] = True

log_items.append(TransactionLogItem(**curated_item))

data_description = [
"Items Structure:",
Expand Down Expand Up @@ -552,23 +538,32 @@ async def get_transaction_logs(
"You would then need to parse the JSON response and find the specific log by its index.",
]

# Since there could be more than one page of logs for the same transaction,
# the pagination information is extracted from API response and added explicitly
# to the tool response
pagination = None
next_page_params = response_data.get("next_page_params")
if next_page_params:
next_cursor = encode_cursor(next_page_params)
pagination = PaginationInfo(
next_call=NextCallInfo(
tool_name="get_transaction_logs",
params={
"chain_id": chain_id,
"transaction_hash": transaction_hash,
"cursor": next_cursor,
},
)
log_items_dicts: list[dict] = []
for item in original_items:
address_value = (
item.get("address", {}).get("hash") if isinstance(item.get("address"), dict) else item.get("address")
)
curated_item = {
"address": address_value,
"block_number": item.get("block_number"),
"topics": item.get("topics"),
"data": item.get("data"),
"decoded": item.get("decoded"),
"index": item.get("index"),
}
if item.get("data_truncated"):
curated_item["data_truncated"] = True
log_items_dicts.append(curated_item)

sliced_items, pagination = create_items_pagination(
items=log_items_dicts,
page_size=config.logs_page_size,
tool_name="get_transaction_logs",
next_call_base_params={"chain_id": chain_id, "transaction_hash": transaction_hash},
cursor_extractor=extract_log_cursor_params,
)

log_items = [TransactionLogItem(**item) for item in sliced_items]

await report_and_log_progress(ctx, progress=2.0, total=2.0, message="Successfully fetched transaction logs.")

Expand Down
2 changes: 1 addition & 1 deletion tests/integration/test_address_tools_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ async def test_get_address_logs_integration(mock_ctx):
assert isinstance(result, ToolResponse)
assert result.pagination is not None
assert isinstance(result.data, list)
assert len(result.data) > 0
assert 0 < len(result.data) <= 10

first_log = result.data[0]
assert isinstance(first_log, AddressLogItem)
Expand Down
2 changes: 1 addition & 1 deletion tests/integration/test_transaction_tools_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ async def test_get_transaction_logs_integration(mock_ctx):

# 2. Verify the basic structure
assert isinstance(result.data, list)
assert len(result.data) > 0
assert 0 < len(result.data) <= 10

# 3. Validate the schema of the first transformed log item.
first_log = result.data[0]
Expand Down
54 changes: 50 additions & 4 deletions tests/tools/test_address_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,13 @@
import httpx
import pytest

from blockscout_mcp_server.models import AddressLogItem, ToolResponse
from blockscout_mcp_server.config import config
from blockscout_mcp_server.models import (
AddressLogItem,
NextCallInfo,
PaginationInfo,
ToolResponse,
)
from blockscout_mcp_server.tools.address_tools import get_address_logs
from blockscout_mcp_server.tools.common import encode_cursor

Expand Down Expand Up @@ -98,16 +104,24 @@ async def test_get_address_logs_with_pagination(mock_ctx):
new_callable=AsyncMock,
) as mock_request,
patch("blockscout_mcp_server.tools.address_tools._process_and_truncate_log_items") as mock_process_logs,
patch("blockscout_mcp_server.tools.address_tools.encode_cursor") as mock_encode_cursor,
patch("blockscout_mcp_server.tools.address_tools.create_items_pagination") as mock_create_pagination,
):
mock_get_url.return_value = mock_base_url
mock_request.return_value = mock_api_response
mock_process_logs.return_value = (mock_api_response["items"], False)
mock_encode_cursor.return_value = fake_cursor
mock_create_pagination.return_value = (
mock_api_response["items"],
PaginationInfo(
next_call=NextCallInfo(
tool_name="get_address_logs",
params={"chain_id": chain_id, "address": address, "cursor": fake_cursor},
)
),
)

result = await get_address_logs(chain_id=chain_id, address=address, ctx=mock_ctx)

mock_encode_cursor.assert_called_once_with(mock_api_response["next_page_params"])
mock_create_pagination.assert_called_once()
assert isinstance(result, ToolResponse)
assert isinstance(result.data[0], AddressLogItem)
assert result.pagination is not None
Expand All @@ -123,6 +137,38 @@ async def test_get_address_logs_with_pagination(mock_ctx):
assert mock_ctx.info.call_count == 3


@pytest.mark.asyncio
async def test_get_address_logs_custom_page_size(mock_ctx):
chain_id = "1"
address = "0x123"
mock_base_url = "https://eth.blockscout.com"

mock_api_response = {"items": [{"block_number": i, "index": i} for i in range(10)]}

with (
patch(
"blockscout_mcp_server.tools.address_tools.get_blockscout_base_url",
new_callable=AsyncMock,
) as mock_get_url,
patch(
"blockscout_mcp_server.tools.address_tools.make_blockscout_request",
new_callable=AsyncMock,
) as mock_request,
patch("blockscout_mcp_server.tools.address_tools._process_and_truncate_log_items") as mock_process_logs,
patch("blockscout_mcp_server.tools.address_tools.create_items_pagination") as mock_create_pagination,
patch.object(config, "logs_page_size", 5),
):
mock_get_url.return_value = mock_base_url
mock_request.return_value = mock_api_response
mock_process_logs.return_value = (mock_api_response["items"], False)
mock_create_pagination.return_value = (mock_api_response["items"][:5], None)

await get_address_logs(chain_id=chain_id, address=address, ctx=mock_ctx)

mock_create_pagination.assert_called_once()
assert mock_create_pagination.call_args.kwargs["page_size"] == 5


@pytest.mark.asyncio
async def test_get_address_logs_with_optional_params(mock_ctx):
chain_id = "1"
Expand Down
53 changes: 53 additions & 0 deletions tests/tools/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
_recursively_truncate_and_flag_long_strings,
apply_cursor_to_params,
build_tool_response,
create_items_pagination,
decode_cursor,
encode_cursor,
)
Expand Down Expand Up @@ -329,3 +330,55 @@ def test_apply_cursor_to_params_invalid_cursor_raises_value_error():
mock_decode.side_effect = InvalidCursorError
with pytest.raises(ValueError, match="Invalid or expired pagination cursor"):
apply_cursor_to_params("invalid", params)


def test_create_items_pagination_with_more_items():
"""Verify the helper slices the list and creates a pagination object."""
items = [{"index": i} for i in range(20)]
page_size = 10

sliced, pagination = create_items_pagination(
items=items,
page_size=page_size,
tool_name="test_tool",
next_call_base_params={"chain_id": "1"},
cursor_extractor=lambda item: {"index": item["index"]},
)

assert len(sliced) == page_size
assert sliced[0]["index"] == 0
assert sliced[-1]["index"] == page_size - 1
assert pagination is not None
assert pagination.next_call.tool_name == "test_tool"
decoded_cursor = decode_cursor(pagination.next_call.params["cursor"])
assert decoded_cursor == {"index": page_size - 1}


def test_create_items_pagination_with_fewer_items():
"""Verify the helper does nothing when items are below the page size."""
items = [{"index": i} for i in range(5)]
page_size = 10

sliced, pagination = create_items_pagination(
items=items,
page_size=page_size,
tool_name="test_tool",
next_call_base_params={"chain_id": "1"},
cursor_extractor=lambda item: {"index": item["index"]},
)

assert sliced == items
assert pagination is None


def test_extract_log_cursor_params():
"""Verify the log cursor extractor works correctly."""
from blockscout_mcp_server.tools.common import extract_log_cursor_params

complete_item = {"block_number": 123, "index": 7, "data": "0x"}
assert extract_log_cursor_params(complete_item) == {"block_number": 123, "index": 7}

missing_fields_item = {"data": "0xdead"}
assert extract_log_cursor_params(missing_fields_item) == {"block_number": None, "index": None}

assert extract_log_cursor_params({}) == {"block_number": None, "index": None}
Loading