Skip to content

Commit e90026c

Browse files
feat(metrics): add comprehensive metrics collection system
- GitHub metrics: stars, forks, downloads, traffic, contributors - CI metrics: workflow runs, success rate, duration - Compression benchmarks with algorithm detection - Better error handling and cross-platform support - Timestamped output and aggregate reports
1 parent 2a9e2c4 commit e90026c

File tree

7 files changed

+337
-0
lines changed

7 files changed

+337
-0
lines changed

.gitignore

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,3 +28,8 @@ solana-accounts/
2828
# Development scripts (keep install.sh tracked)
2929
scripts/*
3030
!scripts/install.sh
31+
32+
# Metrics scripts are tracked
33+
!scripts/metrics/
34+
!scripts/metrics/*.sh
35+
metrics_out/

scripts/metrics/ci_metrics.sh

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
#!/usr/bin/env bash
2+
# filepath: scripts/metrics/ci_metrics.sh
3+
set -euo pipefail
4+
OWNER="${OWNER:-owl-sol}"
5+
REPO="${REPO:-OWLSOL_CLI}"
6+
WORKFLOW_FILE="${WORKFLOW_FILE:-.github/workflows/nightly.yml}"
7+
8+
if [ -z "${GITHUB_TOKEN:-}" ]; then
9+
echo '{"error":"GITHUB_TOKEN not set"}' >&2
10+
echo '{}'
11+
exit 0
12+
fi
13+
14+
AUTH="Authorization: token ${GITHUB_TOKEN}"
15+
16+
# Get workflow ID
17+
workflows=$(curl -sSL -H "$AUTH" "https://api.github.com/repos/${OWNER}/${REPO}/actions/workflows" 2>/dev/null)
18+
if [ -z "$workflows" ] || [ "$workflows" = "null" ]; then
19+
echo '{"error":"Failed to fetch workflows"}' >&2
20+
echo '{}'
21+
exit 0
22+
fi
23+
24+
wf=$(echo "$workflows" | jq -r --arg wf "$WORKFLOW_FILE" '.workflows[]? | select(.path==$wf) | .id' 2>/dev/null || echo "")
25+
26+
if [ -z "$wf" ] || [ "$wf" = "null" ]; then
27+
echo '{"error":"Workflow not found","workflow":"'$WORKFLOW_FILE'","total_runs":0}' >&2
28+
echo '{"workflow":"'$WORKFLOW_FILE'","total_runs":0}'
29+
exit 0
30+
fi
31+
32+
# Get workflow runs
33+
runs=$(curl -sSL -H "$AUTH" "https://api.github.com/repos/${OWNER}/${REPO}/actions/workflows/${wf}/runs?per_page=100" 2>/dev/null)
34+
if [ -z "$runs" ] || [ "$runs" = "null" ]; then
35+
echo '{"error":"Failed to fetch runs"}' >&2
36+
echo '{}'
37+
exit 0
38+
fi
39+
40+
total=$(echo "$runs" | jq '.total_count // 0' 2>/dev/null || echo 0)
41+
42+
if [ "$total" -eq 0 ]; then
43+
echo '{"workflow":"'$WORKFLOW_FILE'","total_runs":0,"success":0,"fail":0,"success_rate":0}'
44+
exit 0
45+
fi
46+
47+
success_count=$(echo "$runs" | jq '[.workflow_runs[]? | select(.conclusion=="success")] | length' 2>/dev/null || echo 0)
48+
fail_count=$(echo "$runs" | jq '[.workflow_runs[]? | select(.conclusion=="failure" or .conclusion=="cancelled" or .conclusion=="timed_out")] | length' 2>/dev/null || echo 0)
49+
50+
# Calculate success rate
51+
success_rate=$(awk -v s="$success_count" -v t="$total" 'BEGIN{if(t>0) printf "%.2f", (s/t)*100; else print 0}')
52+
53+
# Get average duration (in seconds, not ms)
54+
avg_duration_sec=$(echo "$runs" | jq '[.workflow_runs[]? | (.updated_at | fromdateiso8601) - (.created_at | fromdateiso8601)] | if length > 0 then (add / length) else 0 end' 2>/dev/null || echo 0)
55+
56+
# Get last run status
57+
last_run_status=$(echo "$runs" | jq -r '.workflow_runs[0]?.conclusion // "unknown"' 2>/dev/null || echo "unknown")
58+
last_run_time=$(echo "$runs" | jq -r '.workflow_runs[0]?.created_at // "unknown"' 2>/dev/null || echo "unknown")
59+
60+
jq -n --arg wf "$WORKFLOW_FILE" \
61+
--argjson total "$total" \
62+
--argjson success "$success_count" \
63+
--argjson fail "$fail_count" \
64+
--argjson rate "$success_rate" \
65+
--argjson avg_sec "$avg_duration_sec" \
66+
--arg last_status "$last_run_status" \
67+
--arg last_time "$last_run_time" \
68+
'{
69+
workflow:$wf,
70+
total_runs:$total,
71+
success:$success,
72+
fail:$fail,
73+
success_rate:$rate,
74+
avg_duration_seconds:$avg_sec,
75+
last_run_status:$last_status,
76+
last_run_time:$last_time
77+
}'

scripts/metrics/collect_metrics.sh

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
#!/usr/bin/env bash
2+
# filepath: scripts/metrics/collect_metrics.sh
3+
set -euo pipefail
4+
5+
OUTDIR="${OUTDIR:-metrics_out}"
6+
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
7+
mkdir -p "$OUTDIR"
8+
9+
echo "🦉 OWLSOL Metrics Collection"
10+
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
11+
echo "Output directory: $OUTDIR"
12+
echo "Timestamp: $TIMESTAMP"
13+
echo ""
14+
15+
# GitHub metrics
16+
echo "📊 Collecting GitHub metrics..."
17+
if ./scripts/metrics/github_metrics.sh > "$OUTDIR/github.json" 2>/dev/null; then
18+
echo " ✓ GitHub metrics collected"
19+
else
20+
echo " ⚠ GitHub metrics failed (check GITHUB_TOKEN)"
21+
echo "{}" > "$OUTDIR/github.json"
22+
fi
23+
24+
# CI metrics
25+
echo "🔄 Collecting CI metrics..."
26+
if ./scripts/metrics/ci_metrics.sh > "$OUTDIR/ci.json" 2>/dev/null; then
27+
echo " ✓ CI metrics collected"
28+
else
29+
echo " ⚠ CI metrics failed"
30+
echo "{}" > "$OUTDIR/ci.json"
31+
fi
32+
33+
# Compression benchmarks (if samples exist)
34+
if [ -d "metrics_samples" ] && [ "$(ls -A metrics_samples 2>/dev/null)" ]; then
35+
echo "⚡ Running compression benchmarks..."
36+
bench_count=0
37+
for sample in metrics_samples/*; do
38+
if [ -f "$sample" ] && command -v owlsol >/dev/null 2>&1; then
39+
sample_name=$(basename "$sample")
40+
if ./scripts/metrics/compression_bench.sh "$sample" > "$OUTDIR/bench_${sample_name}.json" 2>/dev/null; then
41+
echo " ✓ Benchmarked: $sample_name"
42+
bench_count=$((bench_count + 1))
43+
fi
44+
fi
45+
done
46+
echo " Completed $bench_count benchmark(s)"
47+
fi
48+
49+
# Combine all metrics
50+
echo ""
51+
echo "📦 Aggregating results..."
52+
jq -s 'reduce .[] as $item ({}; . * $item)' "$OUTDIR"/*.json 2>/dev/null > "$OUTDIR/aggregate_${TIMESTAMP}.json" || echo "{}" > "$OUTDIR/aggregate_${TIMESTAMP}.json"
53+
54+
# Create symlink to latest
55+
ln -sf "aggregate_${TIMESTAMP}.json" "$OUTDIR/aggregate.json"
56+
57+
# Optional telemetry
58+
if [ -n "${TELEMETRY_URL:-}" ] && [ -f "$OUTDIR/aggregate.json" ]; then
59+
echo "📡 Sending telemetry..."
60+
if TELEMETRY_URL="${TELEMETRY_URL}" ./scripts/metrics/telemetry_ping.sh "$OUTDIR/aggregate.json" 2>/dev/null; then
61+
echo " ✓ Telemetry sent"
62+
else
63+
echo " ⚠ Telemetry send failed"
64+
fi
65+
fi
66+
67+
echo ""
68+
echo "✅ Metrics collection complete!"
69+
echo " Latest: $OUTDIR/aggregate.json"
70+
echo " Timestamped: $OUTDIR/aggregate_${TIMESTAMP}.json"
71+
echo ""
72+
echo "View results: cat $OUTDIR/aggregate.json | jq ."
Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
#!/usr/bin/env bash
2+
# filepath: scripts/metrics/compression_bench.sh
3+
set -euo pipefail
4+
5+
if [ $# -ne 1 ]; then
6+
echo "Usage: $0 inputfile" >&2
7+
exit 1
8+
fi
9+
10+
in="$1"
11+
if [ ! -f "$in" ]; then
12+
echo "Error: Input file not found: $in" >&2
13+
exit 1
14+
fi
15+
16+
if ! command -v owlsol >/dev/null 2>&1; then
17+
echo "Error: owlsol not found in PATH" >&2
18+
exit 1
19+
fi
20+
21+
name=$(basename "$in")
22+
# Cross-platform stat command
23+
if stat -c%s "$in" >/dev/null 2>&1; then
24+
orig_size=$(stat -c%s "$in")
25+
else
26+
orig_size=$(stat -f%z "$in")
27+
fi
28+
29+
out="${in}.owlsol"
30+
meta="${out}.meta.json"
31+
32+
# Remove old files if they exist
33+
rm -f "$out" "$meta"
34+
35+
# Measure compression time
36+
if date +%s%3N >/dev/null 2>&1; then
37+
start=$(date +%s%3N)
38+
owlsol compress "$in" -o "$out" 2>&1 | grep -v "^$" || true
39+
end=$(date +%s%3N)
40+
duration_ms=$((end-start))
41+
else
42+
# Fallback for systems without millisecond precision
43+
start=$(date +%s)
44+
owlsol compress "$in" -o "$out" 2>&1 | grep -v "^$" || true
45+
end=$(date +%s)
46+
duration_ms=$(((end-start)*1000))
47+
fi
48+
49+
if [ ! -f "$out" ]; then
50+
echo "Error: Compression failed, output file not created" >&2
51+
exit 1
52+
fi
53+
54+
# Get compressed size
55+
if stat -c%s "$out" >/dev/null 2>&1; then
56+
compressed_size=$(stat -c%s "$out")
57+
else
58+
compressed_size=$(stat -f%z "$out")
59+
fi
60+
61+
# Calculate metrics
62+
ratio=$(awk -v a="$compressed_size" -v b="$orig_size" 'BEGIN{printf "%.4f", a/b}')
63+
savings=$(awk -v a="$compressed_size" -v b="$orig_size" 'BEGIN{printf "%.2f", ((b-a)/b)*100}')
64+
65+
# Extract algorithm from metadata if available
66+
algorithm="unknown"
67+
if [ -f "$meta" ]; then
68+
algorithm=$(jq -r '.algorithm // "unknown"' "$meta" 2>/dev/null || echo "unknown")
69+
fi
70+
71+
jq -n --arg file "$name" \
72+
--argjson orig "$orig_size" \
73+
--argjson comp "$compressed_size" \
74+
--argjson dur "$duration_ms" \
75+
--argjson ratio "$ratio" \
76+
--argjson savings "$savings" \
77+
--arg algo "$algorithm" \
78+
'{
79+
file:$file,
80+
original_bytes:$orig,
81+
compressed_bytes:$comp,
82+
compression_ratio:$ratio,
83+
space_savings_percent:$savings,
84+
algorithm:$algo,
85+
duration_ms:$dur
86+
}'
87+
88+
# Cleanup
89+
rm -f "$out" "$meta"

scripts/metrics/github_metrics.sh

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
#!/usr/bin/env bash
2+
# filepath: scripts/metrics/github_metrics.sh
3+
set -euo pipefail
4+
OWNER="${OWNER:-owl-sol}"
5+
REPO="${REPO:-OWLSOL_CLI}"
6+
7+
if [ -z "${GITHUB_TOKEN:-}" ]; then
8+
echo '{"error":"GITHUB_TOKEN not set"}' >&2
9+
echo '{}'
10+
exit 0
11+
fi
12+
13+
AUTH="Authorization: token ${GITHUB_TOKEN}"
14+
15+
# Fetch repo info with better error handling
16+
repo=$(curl -sSL -H "$AUTH" "https://api.github.com/repos/${OWNER}/${REPO}" 2>/dev/null)
17+
if [ -z "$repo" ] || [ "$repo" = "null" ]; then
18+
echo '{"error":"Failed to fetch repo info"}' >&2
19+
echo '{}'
20+
exit 0
21+
fi
22+
23+
stars=$(echo "$repo" | jq '.stargazers_count // 0' 2>/dev/null || echo 0)
24+
forks=$(echo "$repo" | jq '.forks_count // 0' 2>/dev/null || echo 0)
25+
watchers=$(echo "$repo" | jq '.subscribers_count // 0' 2>/dev/null || echo 0)
26+
open_issues=$(echo "$repo" | jq '.open_issues_count // 0' 2>/dev/null || echo 0)
27+
28+
# Fetch releases
29+
releases=$(curl -sSL -H "$AUTH" "https://api.github.com/repos/${OWNER}/${REPO}/releases" 2>/dev/null || echo '[]')
30+
download_count=$(echo "$releases" | jq 'if type == "array" then [.[] | .assets[]? | .download_count] | add // 0 else 0 end' 2>/dev/null || echo 0)
31+
release_count=$(echo "$releases" | jq 'if type == "array" then length else 0 end' 2>/dev/null || echo 0)
32+
33+
# Fetch traffic (requires push access)
34+
views=$(curl -sSL -H "$AUTH" "https://api.github.com/repos/${OWNER}/${REPO}/traffic/views" 2>/dev/null || echo '{"count":0,"uniques":0}')
35+
clones=$(curl -sSL -H "$AUTH" "https://api.github.com/repos/${OWNER}/${REPO}/traffic/clones" 2>/dev/null || echo '{"count":0,"uniques":0}')
36+
37+
# Get contributors count
38+
contributors=$(curl -sSL -H "$AUTH" "https://api.github.com/repos/${OWNER}/${REPO}/contributors?per_page=1" 2>/dev/null)
39+
contributor_count=$(echo "$contributors" | jq 'if type == "array" then length else 0 end' 2>/dev/null || echo 0)
40+
41+
jq -n --arg owner "$OWNER" --arg repo "$REPO" \
42+
--argjson stars "$stars" --argjson forks "$forks" --argjson watchers "$watchers" \
43+
--argjson issues "$open_issues" --argjson downloads "$download_count" \
44+
--argjson releases "$release_count" --argjson contributors "$contributor_count" \
45+
--argjson views "$views" --argjson clones "$clones" \
46+
'{
47+
owner:$owner,
48+
repo:$repo,
49+
stars:$stars,
50+
forks:$forks,
51+
watchers:$watchers,
52+
open_issues:$issues,
53+
release_count:$releases,
54+
release_downloads:$downloads,
55+
contributors:$contributors,
56+
views:$views,
57+
clones:$clones
58+
}'
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
#!/usr/bin/env bash
2+
# Query Solana account sizes for a list of addresses
3+
# Usage: scripts/metrics/solana_account_sizes.sh accounts.txt
4+
# Requires: SOLANA_RPC_URL or default mainnet RPC, jq
5+
set -euo pipefail
6+
RPC="${SOLANA_RPC_URL:-https://api.mainnet-beta.solana.com}"
7+
if [ $# -lt 1 ]; then echo "Usage: $0 accounts.txt"; exit 1; fi
8+
INFILE="$1"
9+
echo "["
10+
first=true
11+
while read -r addr; do
12+
[ -z "$addr" ] && continue
13+
body=$(jq -n --arg acc "$addr" '{jsonrpc:"2.0",id:1,method:"getAccountInfo",params:[$acc,{"encoding":"base64","dataSlice":null}]}' )
14+
res=$(curl -sSL -X POST -H "Content-Type: application/json" -d "$body" "$RPC")
15+
data_b64=$(echo "$res" | jq -r '.result.value.data[0] // empty')
16+
len=0
17+
if [ -n "$data_b64" ]; then
18+
len=$(echo "$data_b64" | wc -c)
19+
fi
20+
json=$(jq -n --arg addr "$addr" --argjson size "$len" '{account:$addr,bytes_size:$size}')
21+
if $first; then first=false; else printf ","; fi
22+
echo "$json"
23+
done < "$INFILE"
24+
echo "]"

scripts/metrics/telemetry_ping.sh

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/usr/bin/env bash
2+
# Optional opt-in telemetry sender
3+
# Usage: TELEMETRY_URL=https://collector.example/ingest scripts/metrics/telemetry_ping.sh metrics.json
4+
set -euo pipefail
5+
if [ -z "${TELEMETRY_URL:-}" ]; then echo "TELEMETRY_URL not set; skipping send"; exit 0; fi
6+
if [ $# -ne 1 ]; then echo "Usage: $0 metrics.json"; exit 1; fi
7+
file="$1"
8+
if [ ! -f "$file" ]; then echo "file not found"; exit 1; fi
9+
# minimal anonymized payload
10+
payload=$(jq '{repo:.repo,collector_version:"v1",aggregate:.} | {repo: .repo, metrics: .aggregate}' "$file")
11+
curl -fsSL -X POST -H "Content-Type: application/json" -d "$payload" "$TELEMETRY_URL" || echo "telemetry send failed"
12+
echo "telemetry sent"

0 commit comments

Comments
 (0)