Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ pip install tusk-drift-python-sdk[flask]

# FastAPI support
pip install tusk-drift-python-sdk[fastapi]

# Django support
pip install tusk-drift-python-sdk[django]
```

## Requirements
Expand Down
6 changes: 3 additions & 3 deletions drift/instrumentation/django/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def _record_request(self, request: HttpRequest, sdk, is_pre_app_start: bool) ->

start_time_ns = time.time_ns()

method = request.method
method = request.method or ""
path = request.path
span_name = f"{method} {path}"

Expand Down Expand Up @@ -397,7 +397,7 @@ def dict_to_schema_merges(merges_dict):
status = SpanStatus(code=StatusCode.OK, message="")

# Django-specific: use route template for span name to avoid cardinality explosion
method = request.method
method = request.method or ""
route_template = getattr(request, "_drift_route_template", None)
if route_template:
# Use route template (e.g., "users/<int:id>/")
Expand Down Expand Up @@ -502,7 +502,7 @@ def dict_to_schema_merges(merges_dict):
duration_seconds = duration_ns // 1_000_000_000
duration_nanos = duration_ns % 1_000_000_000

method = request.method
method = request.method or ""
route_template = getattr(request, "_drift_route_template", None)
span_name = f"{method} {route_template}" if route_template else f"{method} {request.path}"

Expand Down
27 changes: 27 additions & 0 deletions drift/instrumentation/httpx/e2e-tests/.tusk/config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
version: 1

service:
id: "httpx-e2e-test-id"
name: "httpx-e2e-test"
port: 8000
start:
command: "python src/app.py"
readiness_check:
command: "curl -f http://localhost:8000/health"
timeout: 45s
interval: 5s

tusk_api:
url: "http://localhost:8000"

test_execution:
concurrent_limit: 10
batch_size: 10
timeout: 30s

recording:
sampling_rate: 1.0
export_spans: false

replay:
enable_telemetry: false
21 changes: 21 additions & 0 deletions drift/instrumentation/httpx/e2e-tests/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
FROM python-e2e-base:latest

# Copy SDK source for editable install
COPY . /sdk

# Copy test files
COPY drift/instrumentation/httpx/e2e-tests /app

WORKDIR /app

# Install dependencies (requirements.txt uses -e /sdk for SDK)
RUN pip install -q -r requirements.txt

# Make entrypoint executable
RUN chmod +x entrypoint.py

# Create .tusk directories
RUN mkdir -p /app/.tusk/traces /app/.tusk/logs

# Run entrypoint
ENTRYPOINT ["python", "entrypoint.py"]
17 changes: 17 additions & 0 deletions drift/instrumentation/httpx/e2e-tests/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
services:
app:
build:
context: ../../../..
dockerfile: drift/instrumentation/httpx/e2e-tests/Dockerfile
args:
- TUSK_CLI_VERSION=${TUSK_CLI_VERSION:-latest}
environment:
- PORT=8000
- TUSK_ANALYTICS_DISABLED=1
- PYTHONUNBUFFERED=1
working_dir: /app
volumes:
# Mount app source for development
- ./src:/app/src
# Mount .tusk folder to persist traces
- ./.tusk:/app/.tusk
34 changes: 34 additions & 0 deletions drift/instrumentation/httpx/e2e-tests/entrypoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/usr/bin/env python3
"""
E2E Test Entrypoint for HTTPX Instrumentation

This script orchestrates the full e2e test lifecycle:
1. Setup: Install dependencies
2. Record: Start app in RECORD mode, execute requests
3. Test: Run Tusk CLI tests
4. Teardown: Cleanup and return exit code
"""

import sys
from pathlib import Path

# Add SDK to path for imports
sys.path.insert(0, "/sdk")

from drift.instrumentation.e2e_common.base_runner import E2ETestRunnerBase


class HttpxE2ETestRunner(E2ETestRunnerBase):
"""E2E test runner for HTTPX instrumentation."""

def __init__(self):
import os

port = int(os.getenv("PORT", "8000"))
super().__init__(app_port=port)


if __name__ == "__main__":
runner = HttpxE2ETestRunner()
exit_code = runner.run()
sys.exit(exit_code)
4 changes: 4 additions & 0 deletions drift/instrumentation/httpx/e2e-tests/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
-e /sdk
Flask>=3.1.2
httpx>=0.28.1
requests>=2.32.5
64 changes: 64 additions & 0 deletions drift/instrumentation/httpx/e2e-tests/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
#!/bin/bash

# Exit on error
set -e

# Accept optional port parameter (default: 8000)
APP_PORT=${1:-8000}
export APP_PORT

# Generate unique docker compose project name
# Get the instrumentation name (parent directory of e2e-tests)
TEST_NAME="$(basename "$(dirname "$(pwd)")")"
PROJECT_NAME="python-${TEST_NAME}-${APP_PORT}"

# Colors for output
GREEN='\033[0;32m'
RED='\033[0;31m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'

echo -e "${BLUE}========================================${NC}"
echo -e "${BLUE}Running Python E2E Test: ${TEST_NAME}${NC}"
echo -e "${BLUE}Port: ${APP_PORT}${NC}"
echo -e "${BLUE}========================================${NC}"
echo ""

# Cleanup function
cleanup() {
echo ""
echo -e "${YELLOW}Cleaning up containers...${NC}"
docker compose -p "$PROJECT_NAME" down -v 2>/dev/null || true
}

# Register cleanup on exit
trap cleanup EXIT

# Build containers
echo -e "${BLUE}Building containers...${NC}"
docker compose -p "$PROJECT_NAME" build --no-cache

# Run the test container
echo -e "${BLUE}Starting test...${NC}"
echo ""

# Run container and capture exit code (always use port 8000 inside container)
# Disable set -e temporarily to capture exit code
set +e
docker compose -p "$PROJECT_NAME" run --rm app
EXIT_CODE=$?
set -e

echo ""
if [ $EXIT_CODE -eq 0 ]; then
echo -e "${GREEN}========================================${NC}"
echo -e "${GREEN}Test passed!${NC}"
echo -e "${GREEN}========================================${NC}"
else
echo -e "${RED}========================================${NC}"
echo -e "${RED}Test failed with exit code ${EXIT_CODE}${NC}"
echo -e "${RED}========================================${NC}"
fi

exit $EXIT_CODE
Loading