Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions openshift_metrics/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
"""Infrastructure configuration for OpenShift metrics and S3 storage."""

import os

# OpenShift/Prometheus
OPENSHIFT_PROMETHEUS_URL = os.getenv("OPENSHIFT_PROMETHEUS_URL")
OPENSHIFT_TOKEN = os.getenv("OPENSHIFT_TOKEN")

# S3 Configuration
S3_ENDPOINT_URL = os.getenv(
"S3_OUTPUT_ENDPOINT_URL", "https://s3.us-east-005.backblazeb2.com"
)
S3_ACCESS_KEY_ID = os.getenv("S3_OUTPUT_ACCESS_KEY_ID")
S3_SECRET_ACCESS_KEY = os.getenv("S3_OUTPUT_SECRET_ACCESS_KEY")
S3_INVOICE_BUCKET = os.getenv("S3_INVOICE_BUCKET", "nerc-invoicing")
S3_METRICS_BUCKET = os.getenv("S3_METRICS_BUCKET", "openshift_metrics")
13 changes: 7 additions & 6 deletions openshift_metrics/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"""

import logging
import os
import argparse
from datetime import datetime, UTC
import json
Expand All @@ -13,6 +12,7 @@

from openshift_metrics import utils, invoice
from openshift_metrics.metrics_processor import MetricsProcessor
from openshift_metrics.config import S3_INVOICE_BUCKET

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -219,29 +219,30 @@ def main():
)

if args.upload_to_s3:
bucket_name = os.environ.get("S3_INVOICE_BUCKET", "nerc-invoicing")
primary_location = (
f"Invoices/{report_month}/"
f"Service Invoices/{cluster_name} {report_month}.csv"
)
utils.upload_to_s3(invoice_file, bucket_name, primary_location)
utils.upload_to_s3(invoice_file, S3_INVOICE_BUCKET, primary_location)

timestamp = datetime.utcnow().strftime("%Y%m%dT%H%M%SZ")
secondary_location = (
f"Invoices/{report_month}/"
f"Archive/{cluster_name} {report_month} {timestamp}.csv"
)
utils.upload_to_s3(invoice_file, bucket_name, secondary_location)
utils.upload_to_s3(invoice_file, S3_INVOICE_BUCKET, secondary_location)
pod_report_location = (
f"Invoices/{report_month}/"
f"Archive/Pod-{cluster_name} {report_month} {timestamp}.csv"
)
utils.upload_to_s3(pod_report_file, bucket_name, pod_report_location)
utils.upload_to_s3(pod_report_file, S3_INVOICE_BUCKET, pod_report_location)
class_invoice_location = (
f"Invoices/{report_month}/"
f"Archive/Class-{cluster_name} {report_month} {timestamp}.csv"
)
utils.upload_to_s3(class_invoice_file, bucket_name, class_invoice_location)
utils.upload_to_s3(
class_invoice_file, S3_INVOICE_BUCKET, class_invoice_location
)


if __name__ == "__main__":
Expand Down
14 changes: 8 additions & 6 deletions openshift_metrics/openshift_prometheus_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,18 @@

import argparse
from datetime import datetime, timedelta
import os
import sys
import json
import logging

from openshift_metrics import utils
from openshift_metrics.prometheus_client import PrometheusClient
from openshift_metrics.metrics_processor import MetricsProcessor
from openshift_metrics.config import (
OPENSHIFT_PROMETHEUS_URL,
OPENSHIFT_TOKEN,
S3_METRICS_BUCKET,
)

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
Expand All @@ -47,7 +51,7 @@ def main():
parser.add_argument(
"--openshift-url",
help="OpenShift Prometheus URL",
default=os.getenv("OPENSHIFT_PROMETHEUS_URL"),
default=OPENSHIFT_PROMETHEUS_URL,
)
parser.add_argument(
"--report-start-date",
Expand Down Expand Up @@ -88,8 +92,7 @@ def main():
f"Generating report starting {report_start_date} and ending {report_end_date} in {output_file}"
)

token = os.environ.get("OPENSHIFT_TOKEN")
prom_client = PrometheusClient(openshift_url, token)
prom_client = PrometheusClient(openshift_url, OPENSHIFT_TOKEN)

metrics_dict = {}
metrics_dict["start_date"] = report_start_date
Expand Down Expand Up @@ -151,8 +154,7 @@ def main():
json.dump(metrics_dict, file)

if args.upload_to_s3:
bucket_name = os.environ.get("S3_METRICS_BUCKET", "openshift_metrics")
utils.upload_to_s3(output_file, bucket_name, s3_location)
utils.upload_to_s3(output_file, S3_METRICS_BUCKET, s3_location)


if __name__ == "__main__":
Expand Down
20 changes: 9 additions & 11 deletions openshift_metrics/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,16 @@

"""Holds bunch of utility functions"""

import os
import csv
import boto3
import logging

from openshift_metrics import invoice
from openshift_metrics.config import (
S3_ENDPOINT_URL,
S3_ACCESS_KEY_ID,
S3_SECRET_ACCESS_KEY,
)
from decimal import Decimal

logging.basicConfig(level=logging.INFO)
Expand All @@ -30,22 +34,16 @@ class EmptyResultError(Exception):


def upload_to_s3(file, bucket, location):
s3_endpoint = os.getenv(
"S3_OUTPUT_ENDPOINT_URL", "https://s3.us-east-005.backblazeb2.com"
)
s3_key_id = os.getenv("S3_OUTPUT_ACCESS_KEY_ID")
s3_secret = os.getenv("S3_OUTPUT_SECRET_ACCESS_KEY")

if not s3_key_id or not s3_secret:
if not S3_ACCESS_KEY_ID or not S3_SECRET_ACCESS_KEY:
raise Exception(
"Must provide S3_OUTPUT_ACCESS_KEY_ID and"
" S3_OUTPUT_SECRET_ACCESS_KEY environment variables."
)
s3 = boto3.client(
"s3",
endpoint_url=s3_endpoint,
aws_access_key_id=s3_key_id,
aws_secret_access_key=s3_secret,
endpoint_url=S3_ENDPOINT_URL,
aws_access_key_id=S3_ACCESS_KEY_ID,
aws_secret_access_key=S3_SECRET_ACCESS_KEY,
)
logger.info(f"Uploading {file} to s3://{bucket}/{location}")
s3.upload_file(file, Bucket=bucket, Key=location)
Expand Down