-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathInfoSWMM_Import_with_Cleanup_Exchange.rb
More file actions
1255 lines (1041 loc) · 46.1 KB
/
InfoSWMM_Import_with_Cleanup_Exchange.rb
File metadata and controls
1255 lines (1041 loc) · 46.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# ============================================================================
# InfoSWMM Multi-Scenario Import WITH CLEANUP - EXCHANGE SCRIPT (Version 3)
# ============================================================================
#
# WHAT THIS SCRIPT DOES:
# Phase 1: Import each scenario to separate model groups
# + Clean up empty label lists after each import
#
# Phase 1.5: Analyze and deduplicate Rainfall Events across all scenarios
# (Note: Time Patterns and Climatology don't support deduplication)
#
# Phase 2: Create merged network with all scenarios combined
# + Copy only unique objects (deduplicated)
# + Add all scenarios to merged network
# + Delete inactive elements from each scenario
#
# NEW IN VERSION 3:
# - Automatically deletes empty label lists (created by InfoSWMM import)
# - Deduplicates objects by content (not name) before merge
# - Results in cleaner, more efficient merged model groups
#
# RUNS AUTOMATICALLY:
# Launched by InfoSWMM_Import_with_Cleanup_UI.rb (UI script)
# Reads configuration from YAML file
#
# ============================================================================
require 'yaml'
# ----------------------------------------------------------------------------
# Helper method for logging
# ----------------------------------------------------------------------------
def log(message, log_file = nil)
puts message
log_file.puts message if log_file
end
# ----------------------------------------------------------------------------
# Helper method to check if a label list is empty
# ----------------------------------------------------------------------------
# NOTE: InfoSWMM imports always create empty label lists as artifacts
# This method checks the 'labels' field (blob) for content
# If it's nil or empty, the label list is considered empty
# ----------------------------------------------------------------------------
def is_label_list_empty?(label_list, log_file = nil)
begin
# Check the 'labels' field (blob) - if empty, label list is empty
labels_blob = label_list['labels']
return labels_blob.nil? || labels_blob.empty?
rescue => e
log " WARNING: Error checking label list: #{e.message}", log_file
# On error, assume it's NOT empty (safer to keep it)
false
end
end
# ----------------------------------------------------------------------------
# Read configuration
# ----------------------------------------------------------------------------
# Config file is saved in the log folder next to the model file.
# The UI script passes the location via environment variable.
config_file = ENV['ICM_IMPORT_CONFIG']
# Fallback: search for recent config files if not passed
unless config_file && File.exist?(config_file)
script_dir = File.dirname(__FILE__)
parent_dir = File.dirname(script_dir)
grandparent_dir = File.dirname(parent_dir)
search_paths = []
[script_dir, parent_dir, grandparent_dir].each do |dir|
Dir.glob(File.join(dir, "**", "ICM Import Log Files", "import_config.yaml")).each do |path|
search_paths << path
end
end
if search_paths.any?
config_file = search_paths.max_by { |f| File.mtime(f) }
end
end
unless config_file && File.exist?(config_file)
puts "ERROR: Configuration file not found"
puts "Please run InfoSWMM_Import_with_Cleanup_UI.rb first to generate the config file."
exit 1
end
config = YAML.load_file(config_file)
# Validate required configuration keys
required_keys = ['file_path', 'scenarios', 'merge_scenarios', 'cleanup_empty_label_lists']
missing = required_keys - config.keys
if missing.any?
puts "ERROR: Configuration missing required keys: #{missing.join(', ')}"
puts "Please run the UI script again to regenerate the configuration."
exit 1
end
file_path = config['file_path']
scenario_input = config['scenarios']
merge_scenarios = config['merge_scenarios']
cleanup_empty_label_lists = config['cleanup_empty_label_lists']
# Validate model file still exists
unless File.exist?(file_path)
puts "ERROR: InfoSWMM model file not found: #{file_path}"
puts "The file may have been moved or deleted since the UI script ran."
exit 1
end
# Validate file extension
unless File.extname(file_path).downcase == ".mxd"
puts "ERROR: File must be an InfoSWMM .mxd file"
puts "Selected file: #{file_path}"
exit 1
end
puts "\n" + "="*70
puts " InfoSWMM Multi-Scenario Import with Cleanup (V3)"
puts "="*70
puts "\nModel: #{File.basename(file_path)}"
puts "Scenarios: #{scenario_input}"
puts "\n" + "="*70
# ----------------------------------------------------------------------------
# Open database
# ----------------------------------------------------------------------------
begin
db = WSApplication.open
rescue => e
puts "Error opening database: #{e.message}"
exit 1
end
if db.nil?
puts "Failed to open the database."
exit 1
end
# File validation already done above after config loading
# Additional validation handled earlier in the script
# ----------------------------------------------------------------------------
# Parse scenarios
# ----------------------------------------------------------------------------
scenarios = scenario_input.split(',').map(&:strip).reject(&:empty?)
if scenarios.empty?
puts "ERROR: No valid scenario names provided."
exit 1
end
# ----------------------------------------------------------------------------
# Setup logging
# ----------------------------------------------------------------------------
log_dir = File.join(File.dirname(file_path), "ICM Import Log Files")
Dir.mkdir(log_dir) unless Dir.exist?(log_dir)
log_filename = File.join(log_dir, "Import_Cleanup_#{Time.now.strftime('%Y%m%d_%H%M%S')}.log")
log_file = File.open(log_filename, 'w')
log "\n" + "="*70, log_file
log "InfoSWMM Multi-Scenario Import with Cleanup (V3) - #{Time.now}", log_file
log "="*70, log_file
log "Database GUID: #{db.guid}", log_file
log "Source File: #{file_path}", log_file
log "Scenarios to import: #{scenarios.join(', ')}", log_file
log "Cleanup empty label lists: #{cleanup_empty_label_lists}", log_file
log "="*70 + "\n", log_file
# ============================================================================
# PHASE 1: Import each scenario to separate model groups
# ============================================================================
puts "+" + "="*68 + "+"
puts "|" + " "*27 + "PHASE 1" + " "*35 + "|"
puts "|" + " "*17 + "Import Individual Scenarios" + " "*24 + "|"
puts "+" + "="*68 + "+"
puts ""
log "\n" + "="*70, log_file
log "PHASE 1: Individual Scenario Import", log_file
log "="*70, log_file
successful_imports = []
failed_imports = []
imported_model_groups = {} # Track model group IDs for phase 2
cleanup_stats = { label_lists_found: 0, label_lists_deleted: 0, label_lists_kept: 0 }
scenarios.each_with_index do |scenario_name, index|
puts "[#{index + 1}/#{scenarios.length}] #{scenario_name}"
puts " " + "-"*66
log "\n[#{index + 1}/#{scenarios.length}] Processing scenario: #{scenario_name}", log_file
log "-" * 70, log_file
begin
# Create model group
model_group_name = "#{File.basename(file_path, '.mxd')} - #{scenario_name}"
log "Creating model group: #{model_group_name}", log_file
begin
model_group = db.new_model_object('Model Group', model_group_name)
log "Model group created with ID: #{model_group.id}", log_file
rescue => e
if e.message.include?("already exists")
error_msg = "ERROR: Model group '#{model_group_name}' already exists in database.\n\n" +
"Please delete or rename the existing model group before running this script again."
log error_msg, log_file
puts ""
puts "="*70
puts "ERROR: Duplicate Model Group Detected"
puts "="*70
puts ""
puts "A model group with this name already exists:"
puts " '#{model_group_name}'"
puts ""
puts "Please delete or rename the existing model group"
puts "before running this script again."
puts ""
puts "="*70
log_file.close
exit 1
else
raise # Re-raise if it's a different error
end
end
# Create import log
import_log_path = File.join(log_dir, "#{scenario_name}_#{Time.now.strftime('%Y%m%d_%H%M%S')}.txt")
# Import
log "Importing scenario '#{scenario_name}' from #{File.basename(file_path)}...", log_file
imported_objects = model_group.import_all_sw_model_objects(
file_path,
"mxd",
scenario_name,
import_log_path
)
# Check success
if imported_objects.nil? || imported_objects.empty?
puts " FAILED: No objects imported"
log "WARNING: No objects imported for scenario '#{scenario_name}'", log_file
if File.exist?(import_log_path)
log "Import log contents:", log_file
File.foreach(import_log_path) do |line|
log " #{line.strip}", log_file
end
end
log "Import failed for scenario '#{scenario_name}'", log_file
failed_imports << scenario_name
# Clean up empty model group
begin
model_group.delete
log "Deleted empty model group", log_file
rescue => e
log "Could not delete empty model group: #{e.message}", log_file
end
else
# Success!
puts " > Imported #{imported_objects.length} objects"
log "SUCCESS: Imported #{imported_objects.length} objects for scenario '#{scenario_name}'", log_file
log "Imported objects:", log_file
imported_objects.each do |obj|
log " - #{obj.type}: #{obj.name} (ID: #{obj.id})", log_file
end
# ==================================================================
# NEW: CLEANUP EMPTY LABEL LISTS
# ==================================================================
if cleanup_empty_label_lists
log "\nCleaning up empty label lists...", log_file
label_lists_to_delete = []
# Find all label lists in the imported objects
imported_objects.each do |obj|
if obj.type == 'Label List'
cleanup_stats[:label_lists_found] += 1
log " Found Label List: #{obj.name} (ID: #{obj.id})", log_file
# Check if it's empty
if is_label_list_empty?(obj, log_file)
log " Label list is empty - marking for deletion", log_file
label_lists_to_delete << obj
else
log " Label list has content - keeping", log_file
cleanup_stats[:label_lists_kept] += 1
end
end
end
# Delete empty label lists
if label_lists_to_delete.any?
log " Deleting #{label_lists_to_delete.length} empty label list(s)...", log_file
puts " > Cleaning up: #{label_lists_to_delete.length} empty label list(s) removed"
label_lists_to_delete.each do |label_list|
begin
label_list.delete
cleanup_stats[:label_lists_deleted] += 1
log " Deleted: #{label_list.name}", log_file
rescue => e
log " ERROR deleting label list '#{label_list.name}': #{e.message}", log_file
cleanup_stats[:label_lists_kept] += 1
end
end
log " Cleanup complete: deleted #{cleanup_stats[:label_lists_deleted]} label lists", log_file
else
log " No empty label lists to clean up", log_file
end
end
# ==================================================================
# Find and commit the SWMM network so it's saved for Phase 2
imported_network = nil
imported_objects.each do |obj|
if obj.type == 'SWMM network'
imported_network = obj
break
end
end
if imported_network
begin
log "Committing imported network: #{imported_network.name}", log_file
net = imported_network.open
net.commit("Imported from InfoSWMM - #{scenario_name}")
log "Network committed successfully", log_file
rescue => e
log "WARNING: Could not commit network: #{e.message}", log_file
end
end
successful_imports << { scenario: scenario_name, group_id: model_group.id, count: imported_objects.length }
imported_model_groups[scenario_name] = model_group.id
end
rescue => e
puts " ERROR: #{e.message}"
log "ERROR importing scenario '#{scenario_name}': #{e.message}", log_file
log "Backtrace: #{e.backtrace.join("\n")}", log_file
failed_imports << scenario_name
end
end
# Log cleanup statistics
if cleanup_empty_label_lists
log "\n" + "="*70, log_file
log "CLEANUP STATISTICS", log_file
log "="*70, log_file
log "Label lists found: #{cleanup_stats[:label_lists_found]}", log_file
log "Label lists deleted: #{cleanup_stats[:label_lists_deleted]}", log_file
log "Label lists kept: #{cleanup_stats[:label_lists_kept]}", log_file
log "="*70 + "\n", log_file
if cleanup_stats[:label_lists_deleted] > 0
puts "\n" + "-" * 70
puts " Cleanup Summary: Deleted #{cleanup_stats[:label_lists_deleted]} empty label list(s)"
puts "-" * 70
end
end
# ============================================================================
# PHASE 1.5: Deduplicate Objects (Time Patterns, Climatology, Rainfall Events)
# ============================================================================
puts "+" + "="*68 + "+"
puts "|" + " "*26 + "PHASE 1.5" + " "*33 + "|"
puts "|" + " "*13 + "Analyze & Deduplicate Rainfall Events" + " "*18 + "|"
puts "+" + "="*68 + "+"
puts ""
log "\n" + "="*70, log_file
log "PHASE 1.5: Object Deduplication Analysis", log_file
log "="*70, log_file
# Helper method to get object content hash by exporting and stripping metadata
def get_object_hash(obj, log_file = nil)
require 'digest'
begin
# Use a simple temp directory path
script_dir = File.dirname(__FILE__)
temp_dir = File.join(script_dir, "temp_object_compare")
Dir.mkdir(temp_dir) unless Dir.exist?(temp_dir)
# Export object to temp file
safe_name = obj.type.gsub(' ', '_').gsub(/[^a-zA-Z0-9_]/, '')
temp_file = File.join(temp_dir, "#{safe_name}_#{obj.id}.txt")
# Use default InfoWorks format
obj.export(temp_file, '')
# Check if file was created and has content
unless File.exist?(temp_file)
log " ERROR: Export failed - file not created for #{obj.type} '#{obj.name}'", log_file if log_file
return nil
end
file_size = File.size(temp_file)
if file_size == 0
log " WARNING: Export created empty file (0 bytes) for #{obj.type} '#{obj.name}'", log_file if log_file
File.delete(temp_file) if File.exist?(temp_file)
return nil
end
# Read file contents
contents = File.read(temp_file)
# Strip metadata that would make identical objects appear different
# Remove lines that contain object names (they vary by scenario)
cleaned_lines = []
contents.each_line do |line|
# Skip lines that are just the object name or contain common metadata keywords
next if line.strip == obj.name
next if line.match(/^Name[:\s]/i)
next if line.match(/^Description[:\s]/i)
next if line.match(/^Created[:\s]/i)
next if line.match(/^Modified[:\s]/i)
next if line.match(/^GUID[:\s]/i)
next if line.match(/^ID[:\s]/i)
# Keep data lines
cleaned_lines << line
end
cleaned_content = cleaned_lines.join
# Log first export for debugging
if log_file && $first_export_logged.nil?
$first_export_logged = {}
end
if log_file && !$first_export_logged[obj.type]
$first_export_logged[obj.type] = true
log " DEBUG: First #{obj.type} export (after metadata stripping, first 300 chars):", log_file
log " #{cleaned_content[0..300].inspect}", log_file
log " Original size: #{contents.length} bytes, Cleaned size: #{cleaned_content.length} bytes", log_file
end
# Calculate hash on cleaned content
hash = Digest::SHA256.hexdigest(cleaned_content)
# Cleanup
File.delete(temp_file) if File.exist?(temp_file)
return hash
rescue => e
log " ERROR: Exception hashing #{obj.type} '#{obj.name}': #{e.message}", log_file if log_file
log " Backtrace: #{e.backtrace.first(3).join("\n ")}", log_file if log_file
return nil
end
end
# Define object types to deduplicate
# Note: IWSW Time Patterns and IWSW Climatology don't support export method,
# so we can only deduplicate Rainfall Events
DEDUP_OBJECT_TYPES = [
'Rainfall Event'
]
# These types don't support export and can't be deduplicated:
# - 'IWSW Time Patterns' (no export support)
# - 'IWSW Climatology' (no export support)
# Collect all objects from all successful imports
all_objects_by_type = {} # Hash: object_type => { scenario_name => [objects] }
object_stats_by_type = {} # Hash: object_type => { total_found, unique_count, duplicate_count }
DEDUP_OBJECT_TYPES.each do |obj_type|
all_objects_by_type[obj_type] = {}
object_stats_by_type[obj_type] = { total_found: 0, unique_count: 0, duplicate_count: 0 }
end
successful_imports.each do |import_info|
scenario_name = import_info[:scenario]
group_id = import_info[:group_id]
# Get the model group
model_group = db.model_object_from_type_and_id('Model Group', group_id)
next unless model_group
# Find objects of each type in this group
DEDUP_OBJECT_TYPES.each do |obj_type|
all_objects_by_type[obj_type][scenario_name] = []
model_group.children.each do |child|
if child.type == obj_type
all_objects_by_type[obj_type][scenario_name] << child
object_stats_by_type[obj_type][:total_found] += 1
end
end
count = all_objects_by_type[obj_type][scenario_name].length
log "Found #{count} #{obj_type} object(s) in #{scenario_name}", log_file if count > 0
end
end
# Analyze each object type for uniqueness
unique_objects_by_type = {} # Hash: object_type => { hash => { object, model_names, scenarios } }
DEDUP_OBJECT_TYPES.each do |obj_type|
unique_objects_by_type[obj_type] = {}
next if object_stats_by_type[obj_type][:total_found] == 0
log "\nAnalyzing #{obj_type}...", log_file
puts "Analyzing #{obj_type}..."
all_objects_by_type[obj_type].each do |scenario_name, objects|
objects.each do |obj|
obj_model_name = obj.name
obj_hash = get_object_hash(obj, log_file)
# If hash is nil (export failed), treat as unique to avoid data loss
if obj_hash.nil?
log " WARNING: Could not hash '#{obj_model_name}' from #{scenario_name} - treating as unique", log_file
# Generate a unique key for this failed hash
unique_key = "UNHASHABLE_#{obj.id}"
else
# Use the hash as the unique key
unique_key = obj_hash
end
# Enhanced logging to help debug
hash_display = obj_hash.nil? ? "FAILED" : "#{obj_hash[0..10]}..."
log " '#{obj_model_name}' (#{scenario_name}): hash=#{hash_display}", log_file
if unique_objects_by_type[obj_type].key?(unique_key)
# Duplicate found (same data content, regardless of model object name)
object_stats_by_type[obj_type][:duplicate_count] += 1
unique_objects_by_type[obj_type][unique_key][:scenarios] << scenario_name
unique_objects_by_type[obj_type][unique_key][:model_names] << obj_model_name
log " -> DUPLICATE (matches '#{unique_objects_by_type[obj_type][unique_key][:model_names].first}')", log_file
else
# New unique object (different data content)
unique_objects_by_type[obj_type][unique_key] = {
object: obj,
model_names: [obj_model_name],
hash: obj_hash,
scenarios: [scenario_name]
}
log " -> UNIQUE", log_file
end
end
end
object_stats_by_type[obj_type][:unique_count] = unique_objects_by_type[obj_type].length
end
# Log summary
log "\nDeduplication analysis complete:", log_file
DEDUP_OBJECT_TYPES.each do |obj_type|
stats = object_stats_by_type[obj_type]
next if stats[:total_found] == 0
log " #{obj_type}:", log_file
log " Total found: #{stats[:total_found]}", log_file
log " Unique (by contents): #{stats[:unique_count]}", log_file
log " Duplicates: #{stats[:duplicate_count]}", log_file
puts " > Found: #{stats[:total_found]} total | Unique: #{stats[:unique_count]} | Duplicates: #{stats[:duplicate_count]}"
end
puts ""
# Cleanup temp directory used for object comparison
begin
script_dir = File.dirname(__FILE__)
temp_dir = File.join(script_dir, "temp_object_compare")
if Dir.exist?(temp_dir)
Dir.glob(File.join(temp_dir, "*")).each { |f| File.delete(f) rescue nil }
Dir.delete(temp_dir) rescue nil
log "Cleaned up temporary object comparison directory", log_file
end
rescue => e
log "WARNING: Could not clean up temp directory: #{e.message}", log_file
end
# ============================================================================
# PHASE 2: Create merged network with scenarios
# ============================================================================
if merge_scenarios && successful_imports.length > 0
puts "+" + "="*68 + "+"
puts "|" + " "*27 + "PHASE 2" + " "*35 + "|"
puts "|" + " "*14 + "Create Merged Network with Scenarios" + " "*17 + "|"
puts "+" + "="*68 + "+"
puts ""
puts "This phase will:"
puts " 1. Create a new master network based on BASE"
puts " 2. Add scenarios for each imported network"
puts " 3. Remove inactive elements from each scenario"
puts ""
log "\n" + "="*70, log_file
log "PHASE 2: Merged Network Creation", log_file
log "="*70, log_file
begin
# Find BASE scenario
base_scenario = successful_imports.find { |s| s[:scenario].upcase == 'BASE' }
if base_scenario.nil?
# Use first scenario as base if no BASE found
base_scenario = successful_imports.first
log "WARNING: No BASE scenario found, using '#{base_scenario[:scenario]}' as master", log_file
puts " WARNING: Using '#{base_scenario[:scenario]}' as master (no BASE found)"
else
log "Using BASE scenario as master network", log_file
puts " Using BASE as master network"
end
# Create merged model group
merged_group_name = "#{File.basename(file_path, '.mxd')} - Merged Scenarios"
log "Creating merged model group: #{merged_group_name}", log_file
puts "Step 1: Creating merged model group '#{merged_group_name}'..."
begin
merged_group = db.new_model_object('Model Group', merged_group_name)
log "Merged group created with ID: #{merged_group.id}", log_file
puts " Model group created successfully"
rescue => e
if e.message.include?("already exists")
error_msg = "ERROR: Model group '#{merged_group_name}' already exists.\n\n" +
"Please delete or rename the existing merged model group before running this script again."
log error_msg, log_file
puts ""
puts "="*70
puts "ERROR: Duplicate Merged Model Group Detected"
puts "="*70
puts ""
puts "A merged model group already exists:"
puts " '#{merged_group_name}'"
puts ""
puts "Please delete or rename the existing merged group"
puts "before running this script again."
puts ""
puts "="*70
log_file.close
exit 1
else
raise # Re-raise if it's a different error
end
end
# Get the BASE model group to find the network
base_group = db.model_object_from_type_and_id('Model Group', base_scenario[:group_id])
if base_group.nil?
raise "Could not find BASE model group with ID #{base_scenario[:group_id]}"
end
# Find the SWMM Network in the BASE group
base_network = nil
log "Searching for network in BASE group, children:", log_file
base_group.children.each do |child|
log " - Type: '#{child.type}', Name: '#{child.name}'", log_file
# Note: type is 'SWMM network' with lowercase 'n'
if child.type == 'SWMM network'
base_network = child
break
end
end
if base_network.nil?
# Log what we found and raise error
child_types = []
base_group.children.each { |c| child_types << c.type }
error_msg = "Could not find SWMM network in BASE model group. Found types: #{child_types.join(', ')}"
log error_msg, log_file
raise error_msg
end
log "Found BASE network: #{base_network.name} (ID: #{base_network.id})", log_file
puts " Found BASE network: #{base_network.name}"
# Check if BASE network has data
base_net = base_network.open
node_count = base_net.row_objects('_nodes').length
link_count = base_net.row_objects('_links').length
sub_count = base_net.row_objects('_subcatchments').length
log "BASE network contains: #{node_count} nodes, #{link_count} links, #{sub_count} subcatchments", log_file
puts " BASE network has: #{node_count} nodes, #{link_count} links, #{sub_count} subs"
if node_count == 0 && link_count == 0
raise "BASE network is empty! Cannot create merged network from empty source."
end
# Copy the BASE network into the merged group
log "Copying BASE network to merged group...", log_file
puts " Copying BASE network..."
# Use ICM's built-in copy method - copies all elements, structures, and relationships correctly
merged_network = merged_group.copy_here(base_network, false, false)
if merged_network.nil?
raise "Failed to copy BASE network - copy_here returned nil"
end
# Rename the copied network
merged_network_name = "#{File.basename(file_path, '.mxd')} - Merged"
merged_network.name = merged_network_name
log "Copied network as: #{merged_network_name} (ID: #{merged_network.id})", log_file
# Verify the copy worked by checking element counts
merged_net = merged_network.open
merged_node_count = merged_net.row_objects('_nodes').length
merged_link_count = merged_net.row_objects('_links').length
merged_sub_count = merged_net.row_objects('_subcatchments').length
log "Merged network after copy: #{merged_node_count} nodes, #{merged_link_count} links, #{merged_sub_count} subcatchments", log_file
puts " Merged network: #{merged_node_count} nodes, #{merged_link_count} links, #{merged_sub_count} subs"
if merged_node_count == 0 && merged_link_count == 0
log "WARNING: copy_here created empty network, attempting manual commit...", log_file
puts " WARNING: Network copy is empty, trying alternative approach..."
# The copy might need to be committed - try that
merged_net.commit("Initial import from BASE")
# Re-check
merged_node_count = merged_net.row_objects('_nodes').length
if merged_node_count == 0
raise "Copied network is empty even after commit - copy_here may not work for SWMM networks"
end
end
log "Successfully created merged network with BASE data", log_file
puts "Step 1: Create master network from BASE"
puts " > Created: #{node_count} nodes, #{link_count} links, #{sub_count} subcatchments"
puts ""
# ==================================================================
# DEDUPLICATE OBJECTS IN MERGED GROUP
# ==================================================================
log "\nDeduplicating objects in merged group...", log_file
puts "Step 2: Copy unique Rainfall Events to merged group"
total_copied = 0
total_failed = 0
total_duplicates_skipped = 0
DEDUP_OBJECT_TYPES.each do |obj_type|
next if object_stats_by_type[obj_type][:total_found] == 0
log "\n Processing #{obj_type}...", log_file
# First, delete all objects of this type that came with the BASE network copy
existing_objects = []
merged_group.children.each do |child|
if child.type == obj_type
existing_objects << child
end
end
log " Found #{existing_objects.length} existing #{obj_type} object(s) (will be replaced)", log_file
existing_objects.each do |obj|
begin
obj.delete
log " Deleted: #{obj.name}", log_file
rescue => e
log " WARNING: Could not delete '#{obj.name}': #{e.message}", log_file
end
end
# Now copy only unique objects
unique_count = unique_objects_by_type[obj_type].length
log " Copying #{unique_count} unique #{obj_type} object(s)...", log_file
objects_copied = 0
objects_failed = 0
unique_objects_by_type[obj_type].each do |unique_key, obj_info|
obj = obj_info[:object]
begin
# Copy the object to merged group
copied_obj = merged_group.copy_here(obj, false, false)
# Rename to include all scenarios that use this data (scenario names only)
if obj_info[:scenarios].length > 1
# Multiple scenarios share this data - create name showing all
new_name = obj_info[:scenarios].join('-')
# Rename the copied object using the name= method
copied_obj.name = new_name
log " Copied and renamed to: '#{new_name}' (represents: #{obj_info[:scenarios].join(', ')})", log_file
else
# Single scenario - just use the scenario name
new_name = obj_info[:scenarios].first
copied_obj.name = new_name
log " Copied and renamed to: '#{new_name}' (from: #{new_name})", log_file
end
objects_copied += 1
rescue => e
objects_failed += 1
log " ERROR copying '#{obj_info[:model_names].first}': #{e.message}", log_file
end
end
duplicates_skipped = object_stats_by_type[obj_type][:duplicate_count]
log " #{obj_type} complete: #{objects_copied} copied, #{objects_failed} failed, #{duplicates_skipped} duplicates skipped", log_file
puts " > Copied: #{objects_copied} unique | Skipped: #{duplicates_skipped} duplicates"
total_copied += objects_copied
total_failed += objects_failed
total_duplicates_skipped += duplicates_skipped
end
log "\n Object deduplication complete:", log_file
log " Total objects copied: #{total_copied}", log_file
log " Total failed: #{total_failed}", log_file
log " Total duplicates skipped: #{total_duplicates_skipped}", log_file
puts ""
# ==================================================================
# Now add other scenarios
other_scenarios = successful_imports.reject { |s| s[:scenario] == base_scenario[:scenario] }
if other_scenarios.any?
log "\nAdding #{other_scenarios.length} additional scenario(s) to merged network...", log_file
puts ""
puts "Step 3: Add #{other_scenarios.length} scenario(s) to merged network"
# Open the merged network ONCE for all scenarios
merged_net_work = merged_network.open
other_scenarios.each_with_index do |scenario_info, idx|
scenario_name = scenario_info[:scenario]
puts " [#{idx + 1}/#{other_scenarios.length}] #{scenario_name}"
log "\nAdding scenario: #{scenario_name}", log_file
begin
# Get the source model group for this scenario
scenario_group = db.model_object_from_type_and_id('Model Group', scenario_info[:group_id])
if scenario_group.nil?
raise "Could not find model group for scenario #{scenario_name}"
end
# Find selection list in this group (created by import)
selection_list = nil
scenario_group.children.each do |child|
# InfoSWMM import typically creates selection lists with scenario name
if child.type == 'Selection List'
log " Found Selection List: #{child.name} (ID: #{child.id})", log_file
# Use the first selection list found (should be the one from import)
selection_list = child
break
end
end
if selection_list.nil?
log " WARNING: No selection list found for scenario #{scenario_name}", log_file
log " Available children:", log_file
scenario_group.children.each do |child|
log " - #{child.type}: #{child.name}", log_file
end
puts " WARNING: No selection list found - skipping"
next
end
log " Using selection list: #{selection_list.name}", log_file
puts " Found selection list: #{selection_list.name}"
# Find the source network for this scenario
source_network = nil
scenario_group.children.each do |child|
if child.type == 'SWMM network'
source_network = child
break
end
end
if source_network.nil?
log " WARNING: No SWMM network found in scenario group - skipping", log_file
puts " WARNING: No source network found - skipping"
next
end
log " Found source network: #{source_network.name}", log_file
# Set to base scenario before creating new scenario
merged_net_work.current_scenario = 'Base'
# Create new scenario in merged network (based on Base scenario)
merged_net_work.add_scenario(scenario_name, 'Base', "Imported from InfoSWMM - #{scenario_name}")
log " Created scenario '#{scenario_name}' in merged network", log_file
# Switch to the new scenario
merged_net_work.current_scenario = scenario_name
log " Switched to scenario: #{scenario_name}", log_file
# Copy data from source network to this scenario (field-by-field)
log " Copying scenario-specific data from source network...", log_file
source_net = source_network.open
fields_updated = 0
fields_skipped = 0
# Begin transaction for field updates
merged_net_work.transaction_begin
# Copy node data
source_net.row_objects('_nodes').each do |source_node|
target_node = merged_net_work.row_object('_nodes', source_node.id)
next unless target_node
# Copy each field value
source_node.table_info.fields.each do |field|
field_name = field.name
# Skip only the object's own ID (not reference fields)
next if field_name.downcase == 'node_id'
begin
source_value = source_node[field_name]
target_value = target_node[field_name]
if source_value != target_value
target_node[field_name] = source_value
fields_updated += 1
end
rescue => e
# Skip read-only or incompatible fields
fields_skipped += 1
log " Skipped node field '#{field_name}': #{e.message}", log_file if fields_skipped <= 10
end
end
target_node.write
end
log " Node fields: #{fields_updated} updated, #{fields_skipped} skipped", log_file
# Copy link data
link_fields_updated = 0
link_fields_skipped = 0
source_net.row_objects('_links').each do |source_link|
target_link = merged_net_work.row_object('_links', source_link.id)
next unless target_link
source_link.table_info.fields.each do |field|
field_name = field.name
# Skip the object's own ID and topology (us/ds nodes define the link)
next if ['link_id', 'us_node_id', 'ds_node_id'].include?(field_name.downcase)
begin
source_value = source_link[field_name]
target_value = target_link[field_name]
if source_value != target_value
target_link[field_name] = source_value
link_fields_updated += 1
end
rescue => e
link_fields_skipped += 1
log " Skipped link field '#{field_name}': #{e.message}", log_file if link_fields_skipped <= 10
end
end
target_link.write
end
log " Link fields: #{link_fields_updated} updated, #{link_fields_skipped} skipped", log_file
# Copy subcatchment data
sub_fields_updated = 0
sub_fields_skipped = 0
source_net.row_objects('_subcatchments').each do |source_sub|
target_sub = merged_net_work.row_object('_subcatchments', source_sub.id)
next unless target_sub
source_sub.table_info.fields.each do |field|
field_name = field.name
# Skip only the object's own ID
next if field_name.downcase == 'subcatchment_id'
begin
source_value = source_sub[field_name]
target_value = target_sub[field_name]
if source_value != target_value
target_sub[field_name] = source_value
sub_fields_updated += 1
end
rescue => e
sub_fields_skipped += 1