Skip to content

Commit e75b8ea

Browse files
authored
Merge pull request #164 from naved001/adjustable-metric-frequency
2 parents ffe21b7 + 7e9f94c commit e75b8ea

File tree

3 files changed

+35
-4
lines changed

3 files changed

+35
-4
lines changed

openshift_metrics/config.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,3 +14,5 @@
1414
S3_SECRET_ACCESS_KEY = os.getenv("S3_OUTPUT_SECRET_ACCESS_KEY")
1515
S3_INVOICE_BUCKET = os.getenv("S3_INVOICE_BUCKET", "nerc-invoicing")
1616
S3_METRICS_BUCKET = os.getenv("S3_METRICS_BUCKET", "openshift_metrics")
17+
PROM_QUERY_INTERVAL_MINUTES = int(os.getenv("PROM_QUERY_INTERVAL_MINUTES", 15))
18+
assert PROM_QUERY_INTERVAL_MINUTES >= 1, "Query interval must be at least 1 minute"

openshift_metrics/merge.py

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
Merges metrics from files and produces reports by pod and by namespace
33
"""
44

5+
import sys
56
import logging
67
import argparse
78
from datetime import datetime, UTC
@@ -12,7 +13,7 @@
1213

1314
from openshift_metrics import utils, invoice
1415
from openshift_metrics.metrics_processor import MetricsProcessor
15-
from openshift_metrics.config import S3_INVOICE_BUCKET
16+
from openshift_metrics.config import S3_INVOICE_BUCKET, PROM_QUERY_INTERVAL_MINUTES
1617

1718
logging.basicConfig(level=logging.INFO)
1819
logger = logging.getLogger(__name__)
@@ -106,7 +107,31 @@ def main():
106107
report_start_date = None
107108
report_end_date = None
108109
cluster_name = None
109-
processor = MetricsProcessor()
110+
interval_minutes = None
111+
112+
for file in files:
113+
with open(file, "r") as jsonfile:
114+
metrics_from_file = json.load(jsonfile)
115+
if interval_minutes is None:
116+
interval_minutes = metrics_from_file.get("interval_minutes")
117+
else:
118+
interval_minutes_from_file = metrics_from_file["interval_minutes"]
119+
if interval_minutes != interval_minutes_from_file:
120+
sys.exit(
121+
f"Cannot process files with different intervals {interval_minutes} != {interval_minutes_from_file}"
122+
)
123+
124+
if interval_minutes is None:
125+
logger.info(
126+
f"No prometheus query interval minutes found in the given set of files. Using the provided interval: {PROM_QUERY_INTERVAL_MINUTES} minute(s)"
127+
)
128+
interval_minutes = PROM_QUERY_INTERVAL_MINUTES
129+
else:
130+
logger.info(
131+
f"Prometheus Query interval set to {interval_minutes} minute(s) from file"
132+
)
133+
134+
processor = MetricsProcessor(interval_minutes)
110135

111136
for file in files:
112137
with open(file, "r") as jsonfile:

openshift_metrics/openshift_prometheus_metrics.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
OPENSHIFT_PROMETHEUS_URL,
2727
OPENSHIFT_TOKEN,
2828
S3_METRICS_BUCKET,
29+
PROM_QUERY_INTERVAL_MINUTES,
2930
)
3031

3132
logging.basicConfig(level=logging.INFO)
@@ -89,14 +90,17 @@ def main():
8990
output_file = f"metrics-{report_start_date}-to-{report_end_date}.json"
9091

9192
logger.info(
92-
f"Generating report starting {report_start_date} and ending {report_end_date} in {output_file}"
93+
f"Generating report starting {report_start_date} and ending {report_end_date} in {output_file} with interval {PROM_QUERY_INTERVAL_MINUTES} minute"
9394
)
9495

95-
prom_client = PrometheusClient(openshift_url, OPENSHIFT_TOKEN)
96+
prom_client = PrometheusClient(
97+
openshift_url, OPENSHIFT_TOKEN, PROM_QUERY_INTERVAL_MINUTES
98+
)
9699

97100
metrics_dict = {}
98101
metrics_dict["start_date"] = report_start_date
99102
metrics_dict["end_date"] = report_end_date
103+
metrics_dict["interval_minutes"] = PROM_QUERY_INTERVAL_MINUTES
100104
metrics_dict["cluster_name"] = URL_CLUSTER_NAME_MAPPING.get(
101105
args.openshift_url, args.openshift_url
102106
)

0 commit comments

Comments
 (0)