diff --git a/app/helpers/aws_helper.py b/app/helpers/aws_helper.py index 963aab9..fb75c8b 100644 --- a/app/helpers/aws_helper.py +++ b/app/helpers/aws_helper.py @@ -1,17 +1,18 @@ import boto3 -import os import json +from lib.application_configs import ApplicationConfigs + session = boto3.Session( - aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'], - aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY']) + aws_access_key_id=ApplicationConfigs.aws_access_key_id, + aws_secret_access_key=ApplicationConfigs.aws_secret_key) s3 = session.resource('s3', - region_name=os.environ['AWS_REGION']) - # endpoint_url=os.environ['ENDPOINT_URL']) + region_name=ApplicationConfigs.region_name, + endpoint_url=ApplicationConfigs.endpoint_url) sqs = session.client('sqs', - region_name=os.environ['AWS_REGION']) - # endpoint_url=os.environ['ENDPOINT_URL']) + region_name=ApplicationConfigs.region_name, + endpoint_url=ApplicationConfigs.endpoint_url) def get_key_from_message(body): diff --git a/app/lib/application_configs.py b/app/lib/application_configs.py new file mode 100644 index 0000000..8663525 --- /dev/null +++ b/app/lib/application_configs.py @@ -0,0 +1,11 @@ +import os +from pydantic.dataclasses import dataclass + +@dataclass +class ApplicationConfigs(): + region_name = os.environ.get('AWS_REGION', 'ca-central-1') + aws_access_key_id = os.environ.get('AWS_ACCESS_KEY_ID', '') + aws_secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY', '') + sqs_queue = os.environ.get('SQS_QUEUE', '') + endpoint_url = os.environ.get('ENDPOINT_URL', '') + s3_processed_bucket = os.environ.get('S3_PROCESSED_BUCKET', 'measure-local-solver-processed') diff --git a/app/main.py b/app/main.py index 8dc92e7..59b6e9e 100644 --- a/app/main.py +++ b/app/main.py @@ -1,5 +1,6 @@ import os, sys, logging +from lib.application_configs import ApplicationConfigs from services.loft_service import LoftService from helpers import aws_helper @@ -27,12 +28,12 @@ def main(): logging.info('Starting Solver Service (v1.1.4)...') listener = ServiceListener( None, - os.environ['SQS_QUEUE'], + ApplicationConfigs.sqs_queue, create_queue=False, - region_name=os.environ['AWS_REGION'], - aws_access_key=os.environ['AWS_ACCESS_KEY_ID'], - aws_secret_key=os.environ['AWS_SECRET_ACCESS_KEY']) - # endpoint_url=os.environ['ENDPOINT_URL']) + region_name=ApplicationConfigs.region_name, + aws_access_key=ApplicationConfigs.aws_access_key_id, + aws_secret_key=ApplicationConfigs.aws_secret_key, + endpoint_url=ApplicationConfigs.endpoint_url) listener.listen() diff --git a/app/services/loft_service.py b/app/services/loft_service.py index 81c8160..7642a0f 100644 --- a/app/services/loft_service.py +++ b/app/services/loft_service.py @@ -2,6 +2,7 @@ import os, json, random, io, logging from pulp import LpProblem, LpVariable, LpMinimize, LpMaximize, LpAffineExpression, LpConstraint, LpStatus, lpSum +from lib.application_configs import ApplicationConfigs from helpers import aws_helper, tar_helper, csv_helper, service_helper, solver_helper from lib.errors.item_generation_error import ItemGenerationError @@ -201,14 +202,14 @@ class LoftService(Base): if error: logging.info('Streaming %s error response to s3 bucket - %s', - self.file_name, os.environ['S3_PROCESSED_BUCKET']) + self.file_name, ApplicationConfigs.s3_processed_bucket) solution_file = service_helper.error_to_file(buffer, error) else: logging.info('Streaming %s to s3 bucket - %s', self.file_name, - os.environ['S3_PROCESSED_BUCKET']) + ApplicationConfigs.s3_processed_bucket) solution_file = service_helper.solution_to_file( buffer, self.solver_run.total_form_items, self.solution.forms) # upload generated file to s3 and return result return aws_helper.file_stream_upload(solution_file, self.file_name, - os.environ['S3_PROCESSED_BUCKET']) + ApplicationConfigs.s3_processed_bucket)