-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathschema_manager.py
More file actions
2612 lines (2143 loc) · 106 KB
/
schema_manager.py
File metadata and controls
2612 lines (2143 loc) · 106 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import ast
import yaml
import logging
import requests
import unicodedata
import concurrent.futures
from flask import Response
from datetime import datetime
# Don't confuse urllib (Python native library) with urllib3 (3rd-party library, requests also uses urllib3)
from requests.packages.urllib3.exceptions import InsecureRequestWarning
# Local modules
from schema import schema_errors
from schema import schema_triggers
from schema import schema_validators
from schema import schema_neo4j_queries
from schema.schema_constants import SchemaConstants
from schema.schema_constants import MetadataScopeEnum
from schema.schema_constants import TriggerTypeEnum
# HuBMAP commons
from hubmap_commons.hm_auth import AuthHelper
logger = logging.getLogger(__name__)
# Suppress InsecureRequestWarning warning when requesting status on https with ssl cert verify disabled
requests.packages.urllib3.disable_warnings(category = InsecureRequestWarning)
# In Python, "privacy" depends on "consenting adults'" levels of agreement, we can't force it.
# A single leading underscore means you're not supposed to access it "from the outside"
_schema = None
_uuid_api_url = None
_ingest_api_url = None
_entity_api_url = None
_ontology_api_url = None
_auth_helper = None
_neo4j_driver = None
_memcached_client = None
_memcached_prefix = None
_organ_types = None
####################################################################################################
## Provenance yaml schema initialization
####################################################################################################
"""
Initialize the schema_manager module with loading the schema yaml file
and create an neo4j driver instance (some trigger methods query neo4j)
Parameters
----------
valid_yaml_file : file
A valid yaml file
uuid_api_url : str
The uuid-api base URL
ingest_api_url : str
The ingest-api base URL
ontology_api_url : str
The ontology-api base URL
auth_helper_instance : AuthHelper
The auth helper instance
neo4j_driver_instance : neo4j_driver
The Neo4j driver instance
memcached_client_instance : PooledClient
The pooled client from Memcached connection
memcached_prefix : str
The application-specifc prefix for Memcached data store
"""
def initialize(valid_yaml_file,
uuid_api_url,
ingest_api_url,
ontology_api_url,
entity_api_url,
auth_helper_instance,
neo4j_driver_instance,
memcached_client_instance,
memcached_prefix):
# Specify as module-scope variables
global _schema
global _uuid_api_url
global _ingest_api_url
global _ontology_api_url
global _entity_api_url
global _auth_helper
global _neo4j_driver
global _memcached_client
global _memcached_prefix
_schema = load_provenance_schema(valid_yaml_file)
if uuid_api_url is not None:
_uuid_api_url = uuid_api_url
else:
msg = f"Unable to initialize schema manager with uuid_api_url={uuid_api_url}."
logger.critical(msg=msg)
raise Exception(msg)
if ingest_api_url is not None:
_ingest_api_url = ingest_api_url
else:
msg = f"Unable to initialize schema manager with ingest_api_url={ingest_api_url}."
logger.critical(msg=msg)
raise Exception(msg)
if ontology_api_url is not None:
_ontology_api_url = ontology_api_url
else:
msg = f"Unable to initialize schema manager with ontology_api_url={ontology_api_url}."
logger.critical(msg=msg)
raise Exception(msg)
if entity_api_url is not None:
_entity_api_url = entity_api_url
else:
msg = f"Unable to initialize schema manager with entity_api_url={entity_api_url}."
logger.critical(msg=msg)
raise Exception(msg)
# Get the helper instances
_auth_helper = auth_helper_instance
_neo4j_driver = neo4j_driver_instance
_memcached_client = memcached_client_instance
_memcached_prefix = memcached_prefix
####################################################################################################
## Provenance yaml schema loading
####################################################################################################
"""
Load the schema yaml file
Parameters
----------
valid_yaml_file : file
A valid yaml file
Returns
-------
dict
A dict containing the schema details
"""
def load_provenance_schema(valid_yaml_file):
with open(valid_yaml_file) as file:
schema_dict = yaml.safe_load(file)
logger.info(f"Provenance Schema yaml file loaded successfully from {valid_yaml_file} :)")
# For entities with properties set to None/Null, remove them as these represent private values not inherited by subclass
for entity in schema_dict['ENTITIES']:
schema_dict['ENTITIES'][entity]['properties'] = remove_none_values(schema_dict['ENTITIES'][entity]['properties'])
return schema_dict
####################################################################################################
## Helper functions
####################################################################################################
"""
Get a list of all the supported types in the schema yaml
Returns
-------
list
A list of types
"""
def get_all_types():
global _schema
entity_types = _schema['ENTITIES'].keys()
activity_types = _schema['ACTIVITIES'].keys()
# Need convert the dict_keys object to a list
return list(entity_types) + list(activity_types)
"""
Get a list of all the supported entity types in the schema yaml
Returns
-------
list
A list of entity types
"""
def get_all_entity_types():
global _schema
dict_keys = _schema['ENTITIES'].keys()
# Need convert the dict_keys object to a list
return list(dict_keys)
"""
Get the optional superclass (if defined) of the given entity class
Parameters
----------
normalized_entity_class : str
The normalized target entity class
Returns
-------
string or None
One of the normalized entity classes if defined (currently only Publication has Dataset as superclass). None otherwise
"""
def get_entity_superclass(normalized_entity_class):
normalized_superclass = None
all_entity_types = get_all_entity_types()
if normalized_entity_class not in all_entity_types:
msg = f"Unrecognized value of 'normalized_entity_class': {normalized_entity_class}"
logger.error(msg)
raise ValueError(msg)
if 'superclass' in _schema['ENTITIES'][normalized_entity_class]:
normalized_superclass = normalize_entity_type(_schema['ENTITIES'][normalized_entity_class]['superclass'])
# Additional check to ensure no schema yaml mistake
if normalized_superclass not in all_entity_types:
msg = f"Invalid 'superclass' value defined for {normalized_entity_class}: {normalized_superclass}"
logger.error(msg)
raise ValueError(msg)
return normalized_superclass
"""
Get the optional subclass (if defined) of the given entity class
Parameters
----------
normalized_entity_class : str
The normalized target entity class
Returns
-------
string or None
One of the normalized entity classes if defined. None otherwise
"""
def get_entity_subclasses(normalized_entity_class):
subclasses = []
all_entity_types = get_all_entity_types()
if normalized_entity_class not in all_entity_types:
raise ValueError(f"Unrecognized entity class: {normalized_entity_class}")
for name, data in _schema["ENTITIES"].items():
superclass = data.get("superclass")
if superclass and normalize_entity_type(superclass) == normalized_entity_class:
subclasses.append(normalize_entity_type(name))
return subclasses
"""
Determine if the Entity type with 'entity_type' is an instance of 'entity_class'.
Use this function if you already have the Entity type. Use `entity_instanceof(uuid, class)`
if you just have the Entity uuid.
Parameters
----------
entity_type : str
The superclass
entity_class : str
The subclass
Returns
-------
bool
"""
def entity_type_instanceof(entity_type: str, entity_class: str) -> bool:
if entity_type is None:
return False
normalized_entry_class: str = normalize_entity_type(entity_class)
super_entity_type: str = normalize_entity_type(entity_type)
while super_entity_type is not None:
if normalized_entry_class == super_entity_type:
return True
super_entity_type = get_entity_superclass(super_entity_type)
return False
"""
Determine if the Entity with 'entity_uuid' is an instance of 'entity_class'.
Parameters
----------
entity_uuid : str
The uuid of the given entity
entity_class : str
The superclass
Returns
-------
bool
"""
def entity_instanceof(entity_uuid: str, entity_class: str) -> bool:
entity_type: str =\
schema_neo4j_queries.get_entity_type(get_neo4j_driver_instance(), entity_uuid.strip())
return entity_type_instanceof(entity_type, entity_class)
"""
Retrieves fields designated in the provenance schema yaml under
excluded_properties_from_public_response and returns the fields in a list
Parameters
----------
normalized_class : str
the normalized entity type of the entity who's fields are to be removed
Returns
-------
excluded_fields
A list of strings where each entry is a field to be excluded
"""
def get_fields_to_exclude(normalized_class=None):
# Determine the schema section based on class
excluded_fields = []
schema_section = _schema['ENTITIES']
exclude_list = schema_section[normalized_class].get('excluded_properties_from_public_response')
if exclude_list:
excluded_fields.extend(exclude_list)
return excluded_fields
"""
Removes specified fields from an existing dictionary
Parameters
----------
excluded_fields : list
A JSON list of the fields to be excluded, may have nested fields
output_dict : dictionary
A dictionary representing the data to be modified
Returns
-------
dict
The modified data with removed fields
"""
def exclude_properties_from_response(excluded_fields, output_dict):
def delete_nested_field(data, nested_path):
if isinstance(nested_path, dict):
for key, value in nested_path.items():
if key in data:
if isinstance(value, list):
for nested_field in value:
if isinstance(nested_field, dict):
if isinstance(data[key], list):
for item in data[key]:
delete_nested_field(item, nested_field)
else:
delete_nested_field(data[key], nested_field)
elif isinstance(data[key], list):
for item in data[key]:
if nested_field in item:
del item[nested_field]
elif isinstance(data[key], dict) and nested_field in data[key]:
del data[key][nested_field]
elif isinstance(value, dict):
delete_nested_field(data[key], value)
elif nested_path in data:
if isinstance(data[nested_path], list):
for item in data[nested_path]:
if nested_path in item:
del item[nested_path]
else:
del data[nested_path]
for field in excluded_fields:
delete_nested_field(output_dict, field)
return output_dict
"""
Use the Flask request.args MultiDict to see if 'exclude' is a URL parameter passed in with the
request and parse the comma-separated properties to be excluded from final response
For now, only support one dot for nested fields (depth 2)
Parameters
----------
request_args: ImmutableMultiDict
The Flask request.args passed in from application request
Returns
-------
list
A flat list of strings containing top-level and/or nested dot-notated properties
Example: ['a.b', 'a.c', 'x']
"""
def get_excluded_query_props(request_args):
all_props_to_exclude = []
if 'exclude' in request_args:
# The query string values are case-sensitive as the property keys in schema yaml are case-sensitive
props_to_exclude_str = request_args.get('exclude')
if not validate_comma_separated_exclude_str(props_to_exclude_str):
raise ValueError(
"The 'exclude' query parameter must be a comma-separated list of properties that follow these rules: "
"[1] Each property must include at least one letter; "
"[2] Only lowercase letters and underscores '_' are allowed; "
"[3] Nested property is limited to 2 depths and must use single dot '.' for dot-notation (like 'a.b')."
)
all_props_to_exclude = [item.strip() for item in props_to_exclude_str.split(",")]
logger.info(f"User specified properties to exclude in request URL: {all_props_to_exclude}")
# More validation - ensure prohibited properties are not accepted
# This two properties are required internally by `normalize_entity_result_for_response()`
prohibited_properties = ['uuid', 'entity_type']
second_level_list = []
for item in all_props_to_exclude:
if item in prohibited_properties or ('.' in item and item.split('.')[1] in prohibited_properties):
raise ValueError(f"Entity property '{item}' is not allowed in the 'exclude' query parameter.")
return all_props_to_exclude
"""
The 'exclude' query parameter must be a comma-separated list of properties that follow these rules:
[1] Each property must include at least one letter;
[2] Only lowercase letters and underscores '_' are allowed;
[3] Nested property is limited to 2 depths and must use single dot '.' for dot-notation (like 'a.b').
Parameters
----------
s : str
Comma-separated input string used to exclude entity properties
Returns
-------
bool
True if valid or False otherwise
"""
def validate_comma_separated_exclude_str(s: str):
# No empty string
if not s:
return False
# Split by commas
items = s.split(',')
# No empty items allowed (prevents ',,' or trailing comma)
if any(not item.strip() for item in items):
return False
def is_valid_item(item: str):
return (
all(c.islower() or c in '._' for c in item)
and any(c.isalpha() for c in item)
and item.count('.') <= 1
and not ((item.startswith('.') or item.endswith('.')))
)
return all(is_valid_item(item.strip()) for item in items)
"""
Transform a flat list of dot-notated strings into a hybrid list that:
- keeps plain strings as-is
- converts entries with dot-notation (like 'direct_ancestors.files') into a dictionary, grouping by the prefix
Example: ['a.b', 'a.c', 'x'] -> ['x', {'a': ['b', 'c']}]
Used by `GET /entities/<id>?exclude=a.b, a.c, x` to build a JSON list
that can be futher processed by `exclude_properties_from_response()`.
Parameters
----------
flat_list : list
A flat list of strings, dot-notated strings are optional and can be used to indicate nested fields
Example: ['a.b', 'a.c', 'x']
Returns
-------
list
A list mixing strings and grouped dicts, like ['x', {'a': ['b', 'c']}]
"""
def group_dot_notation_props(flat_list):
output_list = []
grouped_dict = {}
for item in flat_list:
# For now, only support one dot for nested fields (depth 2)
if '.' in item:
prefix, field = item.split('.', 1)
grouped_dict.setdefault(prefix, []).append(field)
else:
output_list.append(item)
# Add grouped items as dictionaries
for prefix, fields in grouped_dict.items():
output_list.append({prefix: fields})
return output_list
"""
Group properties by exclusion type
Example: ['a.b', 'a.c', 'x', 'y'] where
- x and y are top-level properties
- x is Neo4j node property, and y is generated via trigger method
- a.b and a.c are nested properties while a is a top-level property of either type
Parameters
----------
normalized_entity_type : str
One of the normalized entity types: Dataset, Collection, Sample, Donor, Upload, Publication
flat_list : list
A flat list of strings, dot-notated strings are optional and can be used to indicate nested fields
Example: ['a.b', 'a.c', 'x']
Returns
-------
list
Three lists - one for triggered properties and one for Neo4j node properties
Example for Dataset:
- triggered_top_props_to_skip: ['direct_ancestors.files', 'direct_ancestors.ingest_metadata', 'upload.title']
- neo4j_top_props_to_skip: ['data_access_level']
- neo4j_nested_props_to_skip: ['status_history.status']
"""
def get_exclusion_types(normalized_entity_type, flat_list):
global _schema
triggered_top_props_to_skip = []
neo4j_top_props_to_skip = []
neo4j_nested_props_to_skip =[]
top_level_list = []
second_level_list = []
properties = _schema['ENTITIES'][normalized_entity_type]['properties']
# First find the top-level properties without using dot-notation
for item in flat_list:
if '.' not in item:
top_level_list.append(item)
else:
second_level_list.append(item)
# Only care about the properties defined in schema yaml
for item in top_level_list:
if item in properties:
if TriggerTypeEnum.ON_READ in properties[item]:
triggered_top_props_to_skip.append(item)
else:
neo4j_top_props_to_skip.append(item)
# Nested second-level properties, such as `direct_ancestors.files`, belong to `triggered_top_props_to_skip`
# `ingest_metadata.dag_provenance_list` belongs to `neo4j_nested_props_to_skip`
for item in second_level_list:
prefix = item.split('.')[0]
if prefix in properties:
if TriggerTypeEnum.ON_READ in properties[prefix]:
triggered_top_props_to_skip.append(item)
else:
neo4j_nested_props_to_skip.append(item)
logger.info(f"Determined property exclusion type - triggered_top_props_to_skip: {triggered_top_props_to_skip}")
logger.info(f"Determined property exclusion type - neo4j_top_props_to_skip: {neo4j_top_props_to_skip}")
logger.info(f"Determined property exclusion type - neo4j_nested_props_to_skip: {neo4j_nested_props_to_skip}")
# NOTE: Will need to convert the `neo4j_nested_props_to_skip` to a format that can be used by
# `exclude_properties_from_response()` - Zhou 10/1/2025
return triggered_top_props_to_skip, neo4j_top_props_to_skip, neo4j_nested_props_to_skip
"""
Generating triggered data based on the target events and methods
Parameters
----------
trigger_type : str
One of the trigger types: on_create_trigger, on_update_trigger, on_read_trigger
normalized_class : str
One of the types defined in the schema yaml: Activity, Collection, Donor, Sample, Dataset
request_args: ImmutableMultiDict
The Flask request.args passed in from application request
user_token: str
The user's globus nexus token, 'on_read_trigger' doesn't really need this
existing_data_dict : dict
A dictionary that contains existing entity data
new_data_dict : dict
A dictionary that contains incoming entity data
properties_to_skip : list
Any properties to skip running triggers
Returns
-------
dict
A dictionary of trigger event methods generated data
"""
def generate_triggered_data(trigger_type: TriggerTypeEnum, normalized_class, request_args, user_token, existing_data_dict
, new_data_dict, properties_to_skip = []):
global _schema
schema_section = None
# A bit validation
validate_trigger_type(trigger_type)
# Use validate_normalized_class instead of validate_normalized_entity_type()
# to allow "Activity"
validate_normalized_class(normalized_class)
# Determine the schema section based on class
if normalized_class == 'Activity':
schema_section = _schema['ACTIVITIES']
else:
schema_section = _schema['ENTITIES']
# The ordering of properties of this entity class defined in the yaml schema
# decides the ordering of which trigger method gets to run first
properties = schema_section[normalized_class]['properties']
logger.info(f"Skipping triggered data generation for the following properties: {properties_to_skip}")
# Set each property value and put all resulting data into a dictionary for:
# before_create_trigger|before_update_trigger|on_read_trigger
# No property value to be set for: after_create_trigger|after_update_trigger
trigger_generated_data_dict = {}
for key in properties:
# Among those properties that have the target trigger type,
# we can skip the ones specified in the `properties_to_skip` by not running their triggers
if (trigger_type.value in properties[key]) and (key not in properties_to_skip):
# 'after_create_trigger' and 'after_update_trigger' don't generate property values
# E.g., create relationships between nodes in neo4j
# So just return the empty trigger_generated_data_dict
if trigger_type in [TriggerTypeEnum.AFTER_CREATE, TriggerTypeEnum.AFTER_UPDATE]:
# Only call the triggers if the propery key presents from the incoming data
# E.g., 'direct_ancestor_uuid' for Sample, 'dataset_uuids' for Collection
if key in new_data_dict:
trigger_method_name = properties[key][trigger_type.value]
try:
# Get the target trigger method defined in the schema_triggers.py module
trigger_method_to_call = getattr(schema_triggers, trigger_method_name)
target_uuid = existing_data_dict['uuid'] if existing_data_dict and 'uuid' in existing_data_dict else '';
logger.info(f"To run {trigger_type.value}: {trigger_method_name} for {normalized_class} {target_uuid}")
# No return values for 'after_create_trigger' and 'after_update_trigger'
# because the property value is already set and stored in neo4j
# Normally it's building linkages between entity nodes
trigger_method_to_call(key, normalized_class, request_args, user_token, existing_data_dict, new_data_dict)
except Exception:
msg = f"Failed to call the {trigger_type.value} method: {trigger_method_name}"
# Log the full stack trace, prepend a line with our message
logger.exception(msg)
if trigger_type == TriggerTypeEnum.AFTER_CREATE:
raise schema_errors.AfterCreateTriggerException
elif trigger_type.value == TriggerTypeEnum.AFTER_UPDATE:
raise schema_errors.AfterUpdateTriggerException
elif trigger_type in [TriggerTypeEnum.BEFORE_UPDATE]:
# IMPORTANT! Call the triggers for the properties:
# Case 1: specified in request JSON to be updated explicitly
# Case 2: defined as `auto_update: true` in the schema yaml, meaning will always be updated if the entity gets updated
if (key in new_data_dict) or (('auto_update' in properties[key]) and properties[key]['auto_update']):
trigger_method_name = properties[key][trigger_type.value]
try:
trigger_method_to_call = getattr(schema_triggers, trigger_method_name)
target_uuid = existing_data_dict['uuid'] if existing_data_dict and 'uuid' in existing_data_dict else '';
logger.info(f"To run {trigger_type.value}: {trigger_method_name} for {normalized_class} {target_uuid}")
# Will set the trigger return value as the property value by default
# Unless the return value is to be assigned to another property different target key
#the updated_peripherally tag is a temporary measure to correctly handle any attributes
#which are potentially updated by multiple triggers
#we keep the state of the attribute(s) directly in the trigger_generated_data_dict
#dictionary, which is used to track and save all changes from triggers in general
#the trigger methods for the 'updated_peripherally' attributes take an extra argument,
#the trigger_generated_data_dict, and must initialize this dictionary with the value for
#the attribute from the existing_data_dict as well as make any updates to this attribute
#within this dictionary and return it so it can be saved in the scope of this loop and
#passed to other 'updated_peripherally' triggers
if 'updated_peripherally' in properties[key] and properties[key]['updated_peripherally']:
trigger_generated_data_dict = trigger_method_to_call(key, normalized_class, request_args, user_token, existing_data_dict, new_data_dict, trigger_generated_data_dict)
else:
target_key, target_value = trigger_method_to_call(key, normalized_class, request_args, user_token, existing_data_dict, new_data_dict)
trigger_generated_data_dict[target_key] = target_value
# Meanwhile, set the original property as None if target_key is different
# This is especially important when the returned target_key is different from the original key
# Because we'll be merging this trigger_generated_data_dict with the original user input
# and this will overwrite the original key so it doesn't get stored in Neo4j
if key != target_key:
trigger_generated_data_dict[key] = None
# If something wrong with file upload
except schema_errors.FileUploadException as e:
msg = f"Failed to call the {trigger_type.value} method: {trigger_method_name}"
# Log the full stack trace, prepend a line with our message
logger.exception(msg)
raise schema_errors.FileUploadException(e)
except Exception as e:
msg = f"Failed to call the {trigger_type.value} method: {trigger_method_name}"
# Log the full stack trace, prepend a line with our message
logger.exception(msg)
# We can't create/update the entity
# without successfully executing this trigger method
raise schema_errors.BeforeUpdateTriggerException
else:
# Handling of all other trigger types: before_create_trigger|on_read_trigger
trigger_method_name = properties[key][trigger_type.value]
try:
trigger_method_to_call = getattr(schema_triggers, trigger_method_name)
target_uuid = existing_data_dict['uuid'] if existing_data_dict and 'uuid' in existing_data_dict else '';
logger.info(f"To run {trigger_type.value}: {trigger_method_name} for {normalized_class} {target_uuid}")
# Will set the trigger return value as the property value by default
# Unless the return value is to be assigned to another property different target key
# the updated_peripherally tag is a temporary measure to correctly handle any attributes
# which are potentially updated by multiple triggers
# we keep the state of the attribute(s) directly in the trigger_generated_data_dict
# dictionary, which is used to track and save all changes from triggers in general
# the trigger methods for the 'updated_peripherally' attributes take an extra argument,
# the trigger_generated_data_dict, and must initialize this dictionary with the value for
# the attribute from the existing_data_dict as well as make any updates to this attribute
# within this dictionary and return it so it can be saved in the scope of this loop and
# passed to other 'updated_peripherally' triggers
if 'updated_peripherally' in properties[key] and properties[key]['updated_peripherally']:
# Such trigger methods get executed but really do nothing internally
trigger_generated_data_dict = trigger_method_to_call(key, normalized_class, request_args, user_token, existing_data_dict, new_data_dict, trigger_generated_data_dict)
else:
target_key, target_value = trigger_method_to_call(key, normalized_class, request_args, user_token, existing_data_dict, new_data_dict)
trigger_generated_data_dict[target_key] = target_value
# Meanwhile, set the original property as None if target_key is different
# This is especially important when the returned target_key is different from the original key
# Because we'll be merging this trigger_generated_data_dict with the original user input
# and this will overwrite the original key so it doesn't get stored in Neo4j
if key != target_key:
trigger_generated_data_dict[key] = None
except schema_errors.NoDataProviderGroupException as e:
msg = f"Failed to call the {trigger_type.value} method: {trigger_method_name}"
# Log the full stack trace, prepend a line with our message
logger.exception(msg)
raise schema_errors.NoDataProviderGroupException
except schema_errors.MultipleDataProviderGroupException as e:
msg = f"Failed to call the {trigger_type.value} method: {trigger_method_name}"
# Log the full stack trace, prepend a line with our message
logger.exception(msg)
raise schema_errors.MultipleDataProviderGroupException
except schema_errors.UnmatchedDataProviderGroupException as e:
msg = f"Failed to call the {trigger_type.value} method: {trigger_method_name}"
# Log the full stack trace, prepend a line with our message
logger.exception(msg)
raise schema_errors.UnmatchedDataProviderGroupException
# If something wrong with file upload
except schema_errors.FileUploadException as e:
msg = f"Failed to call the {trigger_type.value} method: {trigger_method_name}"
# Log the full stack trace, prepend a line with our message
logger.exception(msg)
raise schema_errors.FileUploadException(e)
except Exception as e:
msg = f"Failed to call the {trigger_type.value} method: {trigger_method_name}"
# Log the full stack trace, prepend a line with our message
logger.exception(msg)
if trigger_type == TriggerTypeEnum.BEFORE_CREATE:
# We can't create/update the entity
# without successfully executing this trigger method
raise schema_errors.BeforeCreateTriggerException
else:
# Assign the error message as the value of this property
# No need to raise exception
trigger_generated_data_dict[key] = msg
# Return after for loop
return trigger_generated_data_dict
"""
Filter out the merged dict by getting rid of properties with None values
This method is used by get_complete_entity_result() for the 'on_read_trigger'
Parameters
----------
merged_dict : dict
A merged dict that may contain properties with None values
Returns
-------
dict
A filtered dict that removed all properties with None values
"""
def remove_none_values(merged_dict):
filtered_dict = {}
for k, v in merged_dict.items():
# Only keep the properties whose value is not None
if v is not None:
filtered_dict[k] = v
return filtered_dict
"""
Filter out the merged_dict by getting rid of the transitent properties (not to be stored)
and properties with None value
Meaning the returned target property key is different from the original key
in the trigger method, e.g., Donor.image_files_to_add
Parameters
----------
merged_dict : dict
A merged dict that may contain properties with None values
normalized_entity_type : str
One of the normalized entity types: Dataset, Collection, Sample, Donor, Upload, Publication
Returns
-------
dict
A filtered dict that removed all transient properties and the ones with None values
"""
def remove_transient_and_none_values(merged_dict, normalized_entity_type):
global _schema
properties = _schema['ENTITIES'][normalized_entity_type]['properties']
filtered_dict = {}
for k, v in merged_dict.items():
# Only keep the properties that don't have `transitent` flag or are marked as `transitent: false`
# and at the same time the property value is not None
if (('transient' not in properties[k]) or ('transient' in properties[k] and not properties[k]['transient'])) and (v is not None):
filtered_dict[k] = v
return filtered_dict
"""
Generate the complete entity record by running the read triggers
Parameters
----------
request_args: ImmutableMultiDict
The Flask request.args passed in from application request
token: str
Either the user's globus nexus token or the internal token
entity_dict : dict
The entity dict based on neo4j record
properties_to_skip : list
Any properties to skip running triggers
Returns
-------
dict
A dictionary of complete entity with all the generated 'on_read_trigger' data
"""
def get_complete_entity_result(request_args, token, entity_dict, properties_to_skip = []):
global _memcached_client
global _memcached_prefix
complete_entity = {}
# In case entity_dict is None or
# an incorrectly created entity that doesn't have the `entity_type` property
if entity_dict and ('entity_type' in entity_dict) and ('uuid' in entity_dict):
entity_uuid = entity_dict['uuid']
entity_type = entity_dict['entity_type']
cache_result = None
# Need both client and prefix when fetching the cache
# Do NOT fetch cache if properties_to_skip is specified
if _memcached_client and _memcached_prefix and (not properties_to_skip):
cache_key = f'{_memcached_prefix}_complete_{entity_uuid}'
cache_result = _memcached_client.get(cache_key)
# As long as `properties_to_skip` is specified or when`?exclude` is used in query parameter
# Do not return the cached data and store the new cache regardless of it's available or not - Zhou 10/10/2025
if properties_to_skip or get_excluded_query_props(request_args):
logger.info(f'Skipped/excluded properties specified for {entity_type} {entity_uuid}. Always generate the {TriggerTypeEnum.ON_READ} data and do not cache the result.')
# No error handling here since if a 'on_read_trigger' method fails,
# the property value will be the error message
# Pass {} since no new_data_dict for 'on_read_trigger'
generated_on_read_trigger_data_dict = generate_triggered_data( trigger_type=TriggerTypeEnum.ON_READ
, normalized_class=entity_type
, request_args=request_args
, user_token=token
, existing_data_dict=entity_dict
, new_data_dict={}
, properties_to_skip=properties_to_skip)
# Merge the entity info and the generated on read data into one dictionary
complete_entity_dict = {**entity_dict, **generated_on_read_trigger_data_dict}
# Remove properties of None value
complete_entity = remove_none_values(complete_entity_dict)
else:
logger.info(f'Skipped/excluded properties NOT specified for {entity_type} {entity_uuid}.')
# Re-generate the triggered data and add to memcache
# Otherwise, use the cached data if found and still valid
if cache_result is None:
if _memcached_client and _memcached_prefix:
logger.info(f'Cache of complete entity of {entity_type} {entity_uuid} not found or expired at time {datetime.now()}')
# No error handling here since if a 'on_read_trigger' method fails,
# the property value will be the error message
# Pass {} since no new_data_dict for 'on_read_trigger'
generated_on_read_trigger_data_dict = generate_triggered_data( trigger_type=TriggerTypeEnum.ON_READ
, normalized_class=entity_type
, request_args=request_args
, user_token=token
, existing_data_dict=entity_dict
, new_data_dict={}
, properties_to_skip=properties_to_skip)
# Merge the entity info and the generated on read data into one dictionary
complete_entity_dict = {**entity_dict, **generated_on_read_trigger_data_dict}
# Remove properties of None value
complete_entity = remove_none_values(complete_entity_dict)
# Need both client and prefix when creating the cache
# Do NOT cache when properties_to_skip is specified
if _memcached_client and _memcached_prefix and (not properties_to_skip):
logger.info(f'Creating complete entity cache of {entity_type} {entity_uuid} at time {datetime.now()}')
cache_key = f'{_memcached_prefix}_complete_{entity_uuid}'
_memcached_client.set(cache_key, complete_entity, expire = SchemaConstants.MEMCACHED_TTL)
logger.debug(f"Following is the complete {entity_type} cache created at time {datetime.now()} using key {cache_key}:")
logger.debug(complete_entity)
else:
logger.info(f'Using complete entity cache of {entity_type} {entity_uuid} at time {datetime.now()}')
logger.debug(cache_result)
complete_entity = cache_result
else:
# Just return the original entity_dict otherwise
complete_entity = entity_dict
# One final return
return complete_entity
"""
Generate the entity metadata by reading Neo4j data and only running triggers for data which will go into an
OpenSearch document. Any data from Neo4j which will not go into the OSS document must also be removed e.g.
local_directory_rel_path.
Parameters
----------
request: Flask request object
The instance of Flask request passed in from application request
token: str
Either the user's globus nexus token or the internal token
entity_dict : dict
The entity dict based on neo4j record
properties_to_skip : list
Any properties to skip running triggers
Returns
-------
dict
A dictionary of metadata to be included in an OpenSearch index document for the entity.
"""
def get_index_metadata(request, token, entity_dict, properties_to_skip=[]):
metadata_dict = _get_metadata_result(request=request
, token=token
, entity_dict=entity_dict
, metadata_scope=MetadataScopeEnum.INDEX
, properties_to_skip=properties_to_skip)
return metadata_dict
"""
Generate the complete entity records as well as result filtering for response
Parameters
----------
request_args: ImmutableMultiDict
The Flask request.args passed in from application request
token: str
Either the user's globus nexus token or the internal token
entities_list : list
A list of entity dictionaries
properties_to_skip : list
Any properties to skip running triggers
Returns
-------
list
A list a complete entity dictionaries with all the normalized information
"""
def get_complete_entities_list(request_args, token, entities_list, properties_to_skip = []):