Skip to content

Commit 5c5005c

Browse files
PR feedback
1 parent c4e81c2 commit 5c5005c

5 files changed

Lines changed: 31 additions & 16 deletions

File tree

backend/compact-connect/lambdas/python/common/cc_common/data_model/data_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1065,7 +1065,7 @@ def _get_privilege_update_records_directly(
10651065

10661066
return [PrivilegeUpdateData.from_database_record(item) for item in response_items]
10671067

1068-
@logger_inject_kwargs(logger, 'compact', 'provider_id', 'detail', 'jurisdiction', 'license_type')
1068+
@logger_inject_kwargs(logger, 'compact', 'provider_id', 'detail', 'jurisdiction', 'license_type_abbr')
10691069
def get_privilege_data(
10701070
self,
10711071
*,

backend/compact-connect/lambdas/python/disaster-recovery/handlers/rollback_license_upload.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ def from_dict(cls, data: dict) -> 'RollbackResults':
181181
RevertedLicense(
182182
jurisdiction=reverted_license['jurisdiction'],
183183
license_type=reverted_license['licenseType'],
184-
revision_id=uuid4(),
184+
revision_id=reverted_license['revisionId'],
185185
action=reverted_license['action'],
186186
)
187187
for reverted_license in summary.get('licensesReverted', [])
@@ -190,7 +190,7 @@ def from_dict(cls, data: dict) -> 'RollbackResults':
190190
RevertedPrivilege(
191191
jurisdiction=reverted_privilege['jurisdiction'],
192192
license_type=reverted_privilege['licenseType'],
193-
revision_id=uuid4(),
193+
revision_id=reverted_privilege['revisionId'],
194194
action=reverted_privilege['action'],
195195
)
196196
for reverted_privilege in summary.get('privilegesReverted', [])

backend/compact-connect/lambdas/python/disaster-recovery/tests/function/test_rollback_license_upload.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1088,6 +1088,14 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel
10881088
self.assertEqual(0, result_first['providersFailed'])
10891089
self.assertEqual(mock_second_provider_id, result_first['continueFromProviderId'])
10901090

1091+
# Verify: S3 results contain first provider with revision id
1092+
s3_key = f'licenseUploadRollbacks/{MOCK_EXECUTION_NAME}/results.json'
1093+
s3_obj = self.config.s3_client.get_object(Bucket=self.config.disaster_recovery_results_bucket_name, Key=s3_key)
1094+
first_results_data = json.loads(s3_obj['Body'].read().decode('utf-8'))
1095+
1096+
# grab the revision id from the results which we will use when asserting on the final object
1097+
revision_id = first_results_data['revertedProviderSummaries'][0]['licensesReverted'][0]['revisionId']
1098+
10911099
# Execute: Second invocation (continue from where we left off)
10921100
# Reset mock time for second invocation
10931101
mock_time.time.side_effect = [0, 1] # Won't timeout this time
@@ -1117,7 +1125,7 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel
11171125
'action': 'REVERT',
11181126
'jurisdiction': 'oh',
11191127
'licenseType': 'speech-language pathologist',
1120-
'revisionId': ANY,
1128+
'revisionId': revision_id,
11211129
}
11221130
],
11231131
'privilegesReverted': [],
@@ -1132,6 +1140,7 @@ def test_rollback_handles_pagination_when_provider_id_present_in_event_input(sel
11321140
'action': 'REVERT',
11331141
'jurisdiction': 'oh',
11341142
'licenseType': 'speech-language pathologist',
1143+
# unknown random UUID, we won't check for it here
11351144
'revisionId': ANY,
11361145
}
11371146
],

backend/compact-connect/lambdas/python/migration/migrate_update_sort_keys/main.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,10 @@ def on_delete(self, _properties: dict) -> CustomResourceResponse | None:
3333
def do_migration(_properties: dict) -> None:
3434
"""
3535
This migration performs the following:
36-
- Scans the provider table for all privilege update records
37-
- For each update record, adds effectiveDate and createDate equal to that updates dateOfUpdate
38-
- Handles batching for cases where there are more than 100 records to update
36+
- Scans the provider table for all update records
37+
- For each update record, load the records and serialize it again,
38+
so the schema classes will generate the new sort key patterns
39+
- Recreate the records by deleting the update records with the old sort key and storing the migrated records.
3940
"""
4041
logger.info('Starting update record sort key migration')
4142

backend/compact-connect/tests/smoke/rollback_license_upload_smoke_tests.py

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -176,11 +176,15 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max
176176
},
177177
}
178178

179-
last_key = None
180-
page_num = 1
181-
all_provider_ids = []
179+
# Use a set to track unique provider IDs across all retries
180+
unique_provider_ids = set()
181+
182182
while time.time() - start_time < max_wait_time:
183-
# Collect all providers across all pages
183+
# Reset pagination variables at the start of each outer loop iteration
184+
last_key = None
185+
page_num = 1
186+
187+
# Collect all providers across all pages for this iteration
184188
while True:
185189
query_body = base_query_body.copy()
186190
if last_key:
@@ -203,13 +207,13 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max
203207
providers = response_data.get('providers', [])
204208
pagination = response_data.get('pagination', {})
205209

206-
# Collect provider IDs from this page
210+
# Collect provider IDs from this page and add to set (automatically handles duplicates)
207211
page_provider_ids = [p['providerId'] for p in providers]
208-
all_provider_ids.extend(page_provider_ids)
212+
unique_provider_ids.update(page_provider_ids)
209213

210214
logger.info(
211215
f'Page {page_num}: Found {len(page_provider_ids)} providers '
212-
f'(total: {len(all_provider_ids)}/{expected_count})'
216+
f'(unique total: {len(unique_provider_ids)}/{expected_count})'
213217
)
214218

215219
# Check if there are more pages
@@ -220,14 +224,15 @@ def wait_for_all_providers_created(staff_headers: dict, expected_count: int, max
220224

221225
page_num += 1
222226

223-
num_found = len(all_provider_ids)
227+
num_found = len(unique_provider_ids)
224228
logger.info(
225229
f'Found {num_found}/{expected_count} providers with family name "RollbackTest" (across {page_num} pages)'
226230
)
227231

228232
if num_found >= expected_count:
229233
logger.info(f'All {expected_count} providers found!')
230-
return all_provider_ids # Return only the expected count
234+
# Return a deterministic sorted list, sliced to expected_count
235+
return sorted(list(unique_provider_ids))[:expected_count]
231236

232237
elapsed = time.time() - start_time
233238
if elapsed < max_wait_time:

0 commit comments

Comments
 (0)