Skip to content

Update AWS wrapper to not hide proper exceptions #770

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Aug 6, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 15 additions & 5 deletions src/instana/instrumentation/aws/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,16 +57,26 @@ def collect_s3_injected_attributes(
with tracer.start_as_current_span("s3", span_context=parent_context) as span:
try:
span.set_attribute("s3.op", operations[wrapped.__name__])
if wrapped.__name__ in ["download_file", "download_fileobj"]:
span.set_attribute("s3.bucket", args[0])
else:
span.set_attribute("s3.bucket", args[1])
if "Bucket" in kwargs:
span.set_attribute("s3.bucket", kwargs["Bucket"])
elif len(args) > 1:
if wrapped.__name__ in ["download_file", "download_fileobj"]:
span.set_attribute("s3.bucket", args[0])
else:
span.set_attribute("s3.bucket", args[1])
except Exception:
logger.debug(
f"collect_s3_injected_attributes collect error: {wrapped.__name__}", exc_info=True
)

try:
return wrapped(*args, **kwargs)
except Exception as exc:
span.record_exception(exc)
logger.debug(
"collect_s3_injected_attributes: collect error", exc_info=True
f"collect_s3_injected_attributes error: {wrapped.__name__}", exc_info=True
)
raise

for method in [
"upload_file",
Expand Down
127 changes: 81 additions & 46 deletions tests/clients/boto3/test_boto3_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@
# (c) Copyright Instana Inc. 2020

import os
from io import BytesIO

import pytest
import boto3
from typing import Generator
from moto import mock_aws
import boto3

from instana.singletons import tracer, agent
from tests.helpers import get_first_span_by_filter
Expand All @@ -18,13 +20,18 @@


class TestS3:
@classmethod
def setup_class(cls) -> None:
cls.bucket_name = "aws_bucket_name"
cls.object_name = "aws_key_name"
cls.recorder = tracer.span_processor
cls.mock = mock_aws()

@pytest.fixture(autouse=True)
def _resource(self) -> Generator[None, None, None]:
"""Setup and Teardown"""
# Clear all spans before a test run
self.recorder = tracer.span_processor
self.recorder.clear_spans()
self.mock = mock_aws()
self.mock.start()
self.s3 = boto3.client("s3", region_name="us-east-1")
yield
Expand All @@ -33,19 +40,19 @@ def _resource(self) -> Generator[None, None, None]:
agent.options.allow_exit_as_root = False

def test_vanilla_create_bucket(self) -> None:
self.s3.create_bucket(Bucket="aws_bucket_name")
self.s3.create_bucket(Bucket=self.bucket_name)

result = self.s3.list_buckets()
assert len(result["Buckets"]) == 1
assert result["Buckets"][0]["Name"] == "aws_bucket_name"
assert result["Buckets"][0]["Name"] == self.bucket_name

def test_s3_create_bucket(self) -> None:
with tracer.start_as_current_span("test"):
self.s3.create_bucket(Bucket="aws_bucket_name")
self.s3.create_bucket(Bucket=self.bucket_name)

result = self.s3.list_buckets()
assert len(result["Buckets"]) == 1
assert result["Buckets"][0]["Name"] == "aws_bucket_name"
assert result["Buckets"][0]["Name"] == self.bucket_name

spans = self.recorder.queued_spans()
assert len(spans) == 2
Expand All @@ -65,11 +72,11 @@ def test_s3_create_bucket(self) -> None:
assert not s3_span.ec

assert s3_span.data["s3"]["op"] == "CreateBucket"
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
assert s3_span.data["s3"]["bucket"] == self.bucket_name

def test_s3_create_bucket_as_root_exit_span(self) -> None:
agent.options.allow_exit_as_root = True
self.s3.create_bucket(Bucket="aws_bucket_name")
self.s3.create_bucket(Bucket=self.bucket_name)

agent.options.allow_exit_as_root = False
self.s3.list_buckets()
Expand All @@ -83,7 +90,7 @@ def test_s3_create_bucket_as_root_exit_span(self) -> None:
assert not s3_span.ec

assert s3_span.data["s3"]["op"] == "CreateBucket"
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
assert s3_span.data["s3"]["bucket"] == self.bucket_name

def test_s3_list_buckets(self) -> None:
with tracer.start_as_current_span("test"):
Expand Down Expand Up @@ -113,21 +120,15 @@ def test_s3_list_buckets(self) -> None:
assert not s3_span.data["s3"]["bucket"]

def test_s3_vanilla_upload_file(self) -> None:
object_name = "aws_key_name"
bucket_name = "aws_bucket_name"

self.s3.create_bucket(Bucket=bucket_name)
result = self.s3.upload_file(upload_filename, bucket_name, object_name)
self.s3.create_bucket(Bucket=self.bucket_name)
result = self.s3.upload_file(upload_filename, self.bucket_name, self.object_name)
assert not result

def test_s3_upload_file(self) -> None:
object_name = "aws_key_name"
bucket_name = "aws_bucket_name"

self.s3.create_bucket(Bucket=bucket_name)
self.s3.create_bucket(Bucket=self.bucket_name)

with tracer.start_as_current_span("test"):
self.s3.upload_file(upload_filename, bucket_name, object_name)
self.s3.upload_file(upload_filename, self.bucket_name, self.object_name)

spans = self.recorder.queued_spans()
assert len(spans) == 2
Expand All @@ -147,17 +148,14 @@ def test_s3_upload_file(self) -> None:
assert not s3_span.ec

assert s3_span.data["s3"]["op"] == "UploadFile"
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
assert s3_span.data["s3"]["bucket"] == self.bucket_name

def test_s3_upload_file_obj(self) -> None:
object_name = "aws_key_name"
bucket_name = "aws_bucket_name"

self.s3.create_bucket(Bucket=bucket_name)
self.s3.create_bucket(Bucket=self.bucket_name)

with tracer.start_as_current_span("test"):
with open(upload_filename, "rb") as fd:
self.s3.upload_fileobj(fd, bucket_name, object_name)
self.s3.upload_fileobj(fd, self.bucket_name, self.object_name)

spans = self.recorder.queued_spans()
assert len(spans) == 2
Expand All @@ -177,17 +175,14 @@ def test_s3_upload_file_obj(self) -> None:
assert not s3_span.ec

assert s3_span.data["s3"]["op"] == "UploadFileObj"
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
assert s3_span.data["s3"]["bucket"] == self.bucket_name

def test_s3_download_file(self) -> None:
object_name = "aws_key_name"
bucket_name = "aws_bucket_name"

self.s3.create_bucket(Bucket=bucket_name)
self.s3.upload_file(upload_filename, bucket_name, object_name)
self.s3.create_bucket(Bucket=self.bucket_name)
self.s3.upload_file(upload_filename, self.bucket_name, self.object_name)

with tracer.start_as_current_span("test"):
self.s3.download_file(bucket_name, object_name, download_target_filename)
self.s3.download_file(self.bucket_name, self.object_name, download_target_filename)

spans = self.recorder.queued_spans()
assert len(spans) == 2
Expand All @@ -207,18 +202,15 @@ def test_s3_download_file(self) -> None:
assert not s3_span.ec

assert s3_span.data["s3"]["op"] == "DownloadFile"
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
assert s3_span.data["s3"]["bucket"] == self.bucket_name

def test_s3_download_file_obj(self) -> None:
object_name = "aws_key_name"
bucket_name = "aws_bucket_name"

self.s3.create_bucket(Bucket=bucket_name)
self.s3.upload_file(upload_filename, bucket_name, object_name)
self.s3.create_bucket(Bucket=self.bucket_name)
self.s3.upload_file(upload_filename, self.bucket_name, self.object_name)

with tracer.start_as_current_span("test"):
with open(download_target_filename, "wb") as fd:
self.s3.download_fileobj(bucket_name, object_name, fd)
self.s3.download_fileobj(self.bucket_name, self.object_name, fd)

spans = self.recorder.queued_spans()
assert len(spans) == 2
Expand All @@ -238,15 +230,13 @@ def test_s3_download_file_obj(self) -> None:
assert not s3_span.ec

assert s3_span.data["s3"]["op"] == "DownloadFileObj"
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
assert s3_span.data["s3"]["bucket"] == self.bucket_name

def test_s3_list_obj(self) -> None:
bucket_name = "aws_bucket_name"

self.s3.create_bucket(Bucket=bucket_name)
self.s3.create_bucket(Bucket=self.bucket_name)

with tracer.start_as_current_span("test"):
self.s3.list_objects(Bucket=bucket_name)
self.s3.list_objects(Bucket=self.bucket_name)

spans = self.recorder.queued_spans()
assert len(spans) == 2
Expand All @@ -266,4 +256,49 @@ def test_s3_list_obj(self) -> None:
assert not s3_span.ec

assert s3_span.data["s3"]["op"] == "ListObjects"
assert s3_span.data["s3"]["bucket"] == "aws_bucket_name"
assert s3_span.data["s3"]["bucket"] == self.bucket_name

def test_s3_resource_bucket_upload_fileobj(self) -> None:
"""
Verify boto3.resource().Bucket().upload_fileobj() works correctly with BytesIO objects
"""
test_data = b"somedata"

# Create a bucket using the client first
self.s3.create_bucket(Bucket=self.bucket_name)

s3_resource = boto3.resource(
"s3",
region_name="us-east-1"
)
bucket = s3_resource.Bucket(name=self.bucket_name)

with tracer.start_as_current_span("test"):
bucket.upload_fileobj(BytesIO(test_data), self.object_name)

# Verify the upload was successful by retrieving the object
response = bucket.Object(self.object_name).get()
file_content = response["Body"].read()

# Assert the content matches what we uploaded
assert file_content == test_data

# Verify the spans were created correctly
spans = self.recorder.queued_spans()
assert len(spans) >= 2

filter = lambda span: span.n == "sdk" # noqa: E731
test_span = get_first_span_by_filter(spans, filter)
assert test_span

filter = lambda span: span.n == "s3" and span.data["s3"]["op"] == "UploadFileObj" # noqa: E731
s3_span = get_first_span_by_filter(spans, filter)
assert s3_span

assert s3_span.t == test_span.t
assert s3_span.p == test_span.s

assert not test_span.ec
assert not s3_span.ec

assert s3_span.data["s3"]["bucket"] == self.bucket_name
Loading