Skip to content

Commit 6bae233

Browse files
authored
fix: make tags safer, refactor legacy lambda, fix rls (#1234)
1 parent cdbf50c commit 6bae233

File tree

6 files changed

+79
-46
lines changed

6 files changed

+79
-46
lines changed

bump-release.py

Lines changed: 20 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,16 @@
1-
import os
21
import sys
2+
import subprocess
33

4-
os.system('git checkout main')
5-
os.system('git pull')
4+
5+
current_branch = subprocess.check_output('git branch --show-current', shell=True).decode('utf-8').strip()
6+
subprocess.check_output('git checkout main', shell=True)
7+
subprocess.check_output('git pull', shell=True)
68

79
from cid._version import __version__ as old_ver
810

9-
bump='patch'
10-
if len(sys.argv)>1:
11-
bump = sys.argv[1]
11+
bump = 'patch'
12+
if 'minor' in sys.argv:
13+
bump = 'minor'
1214

1315
maj, minor, patch = map(int, old_ver.split('.'))
1416

@@ -19,8 +21,7 @@
1921
else:
2022
raise NotImplementedError('only patch and minor are implemented')
2123

22-
os.system(f"git checkout -b 'release/{new_ver}'")
23-
24+
print(subprocess.check_output(f"git checkout -b 'release/{new_ver}'", shell=True).decode('utf-8').strip())
2425

2526
tx = open('cid/_version.py').read()
2627
with open('cid/_version.py', "w") as f:
@@ -31,6 +32,14 @@
3132
f.write(tx.replace(f"{old_ver}", f"{new_ver}"))
3233

3334

34-
os.system('git diff HEAD --unified=0')
35-
print('to undo:\n git checkout HEAD -- cfn-templates/cid-cfn.yml cid/_version.py')
36-
print(f"to continue:\n git commit -am 'release {new_ver}'; git push origin 'release/{new_ver}'")
35+
print(subprocess.check_output('git diff HEAD --unified=0 --color', shell=True).decode('utf-8'))
36+
37+
if '--merge' in sys.argv:
38+
print(subprocess.check_output(f"git commit -am 'release {new_ver}'", shell=True).decode('utf-8').strip())
39+
print(subprocess.check_output(f"git checkout {current_branch}", shell=True).decode('utf-8').strip())
40+
print(subprocess.check_output(f"git merge 'release/{new_ver}'", shell=True).decode('utf-8').strip())
41+
print(subprocess.check_output(f"git branch -D 'release/{new_ver}'", shell=True).decode('utf-8').strip())
42+
print(f'Merged to {current_branch}')
43+
else:
44+
print('to undo:\n git checkout HEAD -- cfn-templates/cid-cfn.yml cid/_version.py')
45+
print(f"to continue:\n git commit -am 'release {new_ver}'; git push origin 'release/{new_ver}'")

cfn-templates/cid-cfn.yml

Lines changed: 7 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# https://github.com/aws-samples/aws-cudos-framework-deployment/blob/main/cfn-templates/cid-cfn.yml
22
AWSTemplateFormatVersion: '2010-09-09'
3-
Description: Deployment of Cloud Intelligence Dashboards v4.2.5 - AWS Solution SO9011
3+
Description: Deployment of Cloud Intelligence Dashboards v4.2.6 - AWS Solution SO9011
44
Metadata:
55
AWS::CloudFormation::Interface:
66
ParameterGroups:
@@ -605,26 +605,11 @@ Resources:
605605
Handler: 'index.lambda_handler'
606606
Code:
607607
ZipFile: |
608-
# This is a legacy lambda. You can delete it. This was kept to disable delete workgroup functionality.
609-
import json
610-
import urllib3
611-
608+
# Legacy lambda - will be deleted in next major update
609+
import cfnresponse
610+
612611
def lambda_handler(event, context):
613-
url = event.get('ResponseURL')
614-
json_body = json.dumps({
615-
'Status': 'SUCCESS',
616-
'Reason': 'legacy',
617-
'PhysicalResourceId': 'keep_it_constant',
618-
'StackId': event.get('StackId'),
619-
'RequestId': event.get('RequestId'),
620-
'LogicalResourceId': event.get('LogicalResourceId'),
621-
})
622-
try:
623-
http = urllib3.PoolManager()
624-
response = http.request('PUT', url, body=json_body, headers={'content-type' : '', 'content-length' : str(len(json_body))}, retries=False)
625-
print(f"Status code: {response}")
626-
except Exception as exc:
627-
print("Failed sending PUT to CFN: " + str(exc))
612+
cfnresponse.send(event, context, cfnresponse.SUCCESS, {}, 'keep_it_constant', 'legacy')
628613
Metadata:
629614
cfn_nag:
630615
rules_to_suppress:
@@ -2070,7 +2055,7 @@ Resources:
20702055
SourceBucket: !Ref ReferenceAssetsBucket
20712056
DestinationBucket: !Ref LocalAssetsBucket
20722057
Keys:
2073-
- 'cid-resource-lambda-layer/cid-4.2.5.zip' #replace version here if needed
2058+
- 'cid-resource-lambda-layer/cid-4.2.6.zip' #replace version here if needed
20742059

20752060
CidResourceLambdaLayer:
20762061
Type: AWS::Lambda::LayerVersion
@@ -2085,7 +2070,7 @@ Resources:
20852070
- LambdaLayerBucketPrefixIsManaged
20862071
- !FindInMap [RegionMap, !Ref 'AWS::Region', BucketName]
20872072
- !Sub '${LambdaLayerBucketPrefix}-${AWS::Region}' # Region added for backward compatibility
2088-
S3Key: 'cid-resource-lambda-layer/cid-4.2.5.zip' #replace version here if needed
2073+
S3Key: 'cid-resource-lambda-layer/cid-4.2.6.zip' #replace version here if needed
20892074
CompatibleRuntimes:
20902075
- python3.10
20912076
- python3.11

cid/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
__version__ = '4.2.5'
1+
__version__ = '4.2.6'
22

cid/common.py

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1668,17 +1668,11 @@ def create_or_update_dataset(self, dataset_definition: dict, dataset_id: str=Non
16681668
update_dataset = True
16691669
break
16701670

1671-
identical = False # check if dataset needs an update
1672-
if isinstance(found_dataset, Dataset):
1673-
identical = True
1674-
for key in 'PhysicalTableMap LogicalTableMap OutputColumns ImportMode DataSetUsageConfiguration RowLevelPermissionDataSet FieldFolders RowLevelPermissionTagConfiguration DatasetParameters'.split():
1675-
if found_dataset.raw.get(key) != compiled_dataset.get(key):
1676-
logger.trace(f'not identical {key} {found_dataset.raw.get(key)} != {compiled_dataset.get(key)}')
1677-
identical = False
1678-
logger.trace(f'identical to existing = {identical}')
1671+
identical = Dataset.datasets_are_identical(found_dataset, compiled_dataset) # check if dataset needs an update
16791672

16801673
if update_dataset and not identical:
1681-
self.qs.update_dataset(compiled_dataset)
1674+
merged_dataset = Dataset.merge_datasets(compiled_dataset, found_dataset)
1675+
self.qs.update_dataset(merged_dataset)
16821676
if compiled_dataset.get("ImportMode") == "SPICE":
16831677
dataset_id = compiled_dataset.get('DataSetId')
16841678
schedules_definitions = []

cid/helpers/quicksight/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -261,7 +261,9 @@ def set_tags(self, arn, **tags):
261261
return True
262262
except self.client.exceptions.AccessDeniedException as exc:
263263
logger.debug(f'Cannot tag {arn} (AccessDenied).')
264-
return False
264+
except self.client.exceptions.ClientError as exc:
265+
logger.debug(f'Cannot tag {arn} ({exc}).')
266+
return False
265267

266268

267269
def get_tags(self, arn):

cid/helpers/quicksight/dataset.py

Lines changed: 45 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,12 @@
88

99
logger = logging.getLogger(__name__)
1010

11+
DATASET_PROPERTIES = [
12+
'AwsAccountId', 'DataSetId', 'Name', 'PhysicalTableMap', 'LogicalTableMap', 'ImportMode', 'ColumnGroups',
13+
'RowLevelPermissionDataSet', 'RowLevelPermissionTagConfiguration', 'FieldFolders', 'ColumnLevelPermissionRules',
14+
'DataSetUsageConfiguration', 'DatasetParameters', 'PerformanceConfiguration'
15+
]
16+
1117

1218
class Dataset(CidQsResource):
1319

@@ -189,7 +195,7 @@ def _athena_to_qs_type(col, athena_type):
189195
projected_cols.append(col)
190196

191197
# filter out all columns that cannot be used for dataset creation
192-
update_ = {key: value for key, value in dataset.items() if key in 'DataSetId, Name, PhysicalTableMap, LogicalTableMap, ImportMode, ColumnGroups, FieldFolders, RowLevelPermissionDataSet, RowLevelPermissionTagConfiguration, ColumnLevelPermissionRules, DataSetUsageConfiguration, DatasetParameters'.split(', ')}
198+
update_ = {key: value for key, value in dataset.items() if key in DATASET_PROPERTIES}
193199
logger.trace(f'update_ = {update_}')
194200
return update_
195201

@@ -230,4 +236,41 @@ def to_diffable_structure(self):
230236
for alias, join in join_clauses.items():
231237
if isinstance(data['Data'].get(alias), dict) :
232238
data['Data'][alias]['clause'] = join
233-
return (yaml.safe_dump(data))
239+
return (yaml.safe_dump(data))
240+
241+
@staticmethod
242+
def datasets_are_identical(dataset1, dataset2):
243+
''' Compare 2 datasets and returns True if no difference found
244+
'''
245+
if (not dataset1 and not dataset2):
246+
return True
247+
identical = False
248+
if (dataset1 and not dataset2) or (not dataset1 and dataset2):
249+
return identical
250+
dataset1 = dataset1 if isinstance(dataset1, Dataset) else Dataset(dataset1)
251+
dataset2 = dataset2 if isinstance(dataset2, Dataset) else Dataset(dataset2)
252+
identical = True
253+
for key in DATASET_PROPERTIES:
254+
if key in ['AwsAccountId', 'DataSetId']:
255+
continue
256+
if dataset1.raw.get(key) != dataset2.raw.get(key):
257+
logger.trace(f'not identical {key} {dataset1.raw.get(key)} != {dataset2.raw.get(key)}')
258+
identical = False
259+
logger.trace(f'identical to existing = {identical}')
260+
return identical
261+
262+
@staticmethod
263+
def merge_datasets(dataset1, dataset2):
264+
''' merge high level 2 datasets. Not a deep merge.
265+
'''
266+
if not dataset2:
267+
return dataset1
268+
if not dataset1:
269+
return dataset2
270+
dataset1 = dataset1 if isinstance(dataset1, Dataset) else Dataset(dataset1)
271+
dataset2 = dataset2 if isinstance(dataset2, Dataset) else Dataset(dataset2)
272+
result = {}
273+
for key in DATASET_PROPERTIES:
274+
if dataset1.raw.get(key) or dataset2.raw.get(key):
275+
result[key] = dataset1.raw.get(key) or dataset2.raw.get(key)
276+
return result

0 commit comments

Comments
 (0)