Skip to content

Commit dfece72

Browse files
committed
Addressing #144 in Chunks, Addded config.py for infrastructure variables, updated references for imports
1 parent 440a773 commit dfece72

File tree

4 files changed

+40
-23
lines changed

4 files changed

+40
-23
lines changed

openshift_metrics/config.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
"""Infrastructure configuration for OpenShift metrics and S3 storage."""
2+
3+
import os
4+
5+
# OpenShift/Prometheus
6+
OPENSHIFT_PROMETHEUS_URL = os.getenv("OPENSHIFT_PROMETHEUS_URL")
7+
OPENSHIFT_TOKEN = os.getenv("OPENSHIFT_TOKEN")
8+
9+
# S3 Configuration
10+
S3_ENDPOINT_URL = os.getenv(
11+
"S3_OUTPUT_ENDPOINT_URL", "https://s3.us-east-005.backblazeb2.com"
12+
)
13+
S3_ACCESS_KEY_ID = os.getenv("S3_OUTPUT_ACCESS_KEY_ID")
14+
S3_SECRET_ACCESS_KEY = os.getenv("S3_OUTPUT_SECRET_ACCESS_KEY")
15+
S3_INVOICE_BUCKET = os.getenv("S3_INVOICE_BUCKET", "nerc-invoicing")
16+
S3_METRICS_BUCKET = os.getenv("S3_METRICS_BUCKET", "openshift_metrics")

openshift_metrics/merge.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
"""
44

55
import logging
6-
import os
76
import argparse
87
from datetime import datetime, UTC
98
import json
@@ -13,6 +12,7 @@
1312

1413
from openshift_metrics import utils, invoice
1514
from openshift_metrics.metrics_processor import MetricsProcessor
15+
from openshift_metrics.config import S3_INVOICE_BUCKET
1616

1717
logging.basicConfig(level=logging.INFO)
1818
logger = logging.getLogger(__name__)
@@ -219,29 +219,30 @@ def main():
219219
)
220220

221221
if args.upload_to_s3:
222-
bucket_name = os.environ.get("S3_INVOICE_BUCKET", "nerc-invoicing")
223222
primary_location = (
224223
f"Invoices/{report_month}/"
225224
f"Service Invoices/{cluster_name} {report_month}.csv"
226225
)
227-
utils.upload_to_s3(invoice_file, bucket_name, primary_location)
226+
utils.upload_to_s3(invoice_file, S3_INVOICE_BUCKET, primary_location)
228227

229228
timestamp = datetime.utcnow().strftime("%Y%m%dT%H%M%SZ")
230229
secondary_location = (
231230
f"Invoices/{report_month}/"
232231
f"Archive/{cluster_name} {report_month} {timestamp}.csv"
233232
)
234-
utils.upload_to_s3(invoice_file, bucket_name, secondary_location)
233+
utils.upload_to_s3(invoice_file, S3_INVOICE_BUCKET, secondary_location)
235234
pod_report_location = (
236235
f"Invoices/{report_month}/"
237236
f"Archive/Pod-{cluster_name} {report_month} {timestamp}.csv"
238237
)
239-
utils.upload_to_s3(pod_report_file, bucket_name, pod_report_location)
238+
utils.upload_to_s3(pod_report_file, S3_INVOICE_BUCKET, pod_report_location)
240239
class_invoice_location = (
241240
f"Invoices/{report_month}/"
242241
f"Archive/Class-{cluster_name} {report_month} {timestamp}.csv"
243242
)
244-
utils.upload_to_s3(class_invoice_file, bucket_name, class_invoice_location)
243+
utils.upload_to_s3(
244+
class_invoice_file, S3_INVOICE_BUCKET, class_invoice_location
245+
)
245246

246247

247248
if __name__ == "__main__":

openshift_metrics/openshift_prometheus_metrics.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,18 @@
1515

1616
import argparse
1717
from datetime import datetime, timedelta
18-
import os
1918
import sys
2019
import json
2120
import logging
2221

2322
from openshift_metrics import utils
2423
from openshift_metrics.prometheus_client import PrometheusClient
2524
from openshift_metrics.metrics_processor import MetricsProcessor
25+
from openshift_metrics.config import (
26+
OPENSHIFT_PROMETHEUS_URL,
27+
OPENSHIFT_TOKEN,
28+
S3_METRICS_BUCKET,
29+
)
2630

2731
logging.basicConfig(level=logging.INFO)
2832
logger = logging.getLogger(__name__)
@@ -47,7 +51,7 @@ def main():
4751
parser.add_argument(
4852
"--openshift-url",
4953
help="OpenShift Prometheus URL",
50-
default=os.getenv("OPENSHIFT_PROMETHEUS_URL"),
54+
default=OPENSHIFT_PROMETHEUS_URL,
5155
)
5256
parser.add_argument(
5357
"--report-start-date",
@@ -88,8 +92,7 @@ def main():
8892
f"Generating report starting {report_start_date} and ending {report_end_date} in {output_file}"
8993
)
9094

91-
token = os.environ.get("OPENSHIFT_TOKEN")
92-
prom_client = PrometheusClient(openshift_url, token)
95+
prom_client = PrometheusClient(openshift_url, OPENSHIFT_TOKEN)
9396

9497
metrics_dict = {}
9598
metrics_dict["start_date"] = report_start_date
@@ -151,8 +154,7 @@ def main():
151154
json.dump(metrics_dict, file)
152155

153156
if args.upload_to_s3:
154-
bucket_name = os.environ.get("S3_METRICS_BUCKET", "openshift_metrics")
155-
utils.upload_to_s3(output_file, bucket_name, s3_location)
157+
utils.upload_to_s3(output_file, S3_METRICS_BUCKET, s3_location)
156158

157159

158160
if __name__ == "__main__":

openshift_metrics/utils.py

Lines changed: 9 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,16 @@
1313

1414
"""Holds bunch of utility functions"""
1515

16-
import os
1716
import csv
1817
import boto3
1918
import logging
2019

2120
from openshift_metrics import invoice
21+
from openshift_metrics.config import (
22+
S3_ENDPOINT_URL,
23+
S3_ACCESS_KEY_ID,
24+
S3_SECRET_ACCESS_KEY,
25+
)
2226
from decimal import Decimal
2327

2428
logging.basicConfig(level=logging.INFO)
@@ -30,22 +34,16 @@ class EmptyResultError(Exception):
3034

3135

3236
def upload_to_s3(file, bucket, location):
33-
s3_endpoint = os.getenv(
34-
"S3_OUTPUT_ENDPOINT_URL", "https://s3.us-east-005.backblazeb2.com"
35-
)
36-
s3_key_id = os.getenv("S3_OUTPUT_ACCESS_KEY_ID")
37-
s3_secret = os.getenv("S3_OUTPUT_SECRET_ACCESS_KEY")
38-
39-
if not s3_key_id or not s3_secret:
37+
if not S3_ACCESS_KEY_ID or not S3_SECRET_ACCESS_KEY:
4038
raise Exception(
4139
"Must provide S3_OUTPUT_ACCESS_KEY_ID and"
4240
" S3_OUTPUT_SECRET_ACCESS_KEY environment variables."
4341
)
4442
s3 = boto3.client(
4543
"s3",
46-
endpoint_url=s3_endpoint,
47-
aws_access_key_id=s3_key_id,
48-
aws_secret_access_key=s3_secret,
44+
endpoint_url=S3_ENDPOINT_URL,
45+
aws_access_key_id=S3_ACCESS_KEY_ID,
46+
aws_secret_access_key=S3_SECRET_ACCESS_KEY,
4947
)
5048
logger.info(f"Uploading {file} to s3://{bucket}/{location}")
5149
s3.upload_file(file, Bucket=bucket, Key=location)

0 commit comments

Comments
 (0)