From a32a4b90321bda345f8354a63fb88ce0b94b24fd Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 11:17:02 +0000 Subject: [PATCH 01/17] mv --- process/core/io/{write_new_in_dat.py => in_dat/create.py} | 0 process/core/io/{ => plot/costs}/costs_bar.py | 0 process/core/io/{ => plot}/plot_stress_tf.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename process/core/io/{write_new_in_dat.py => in_dat/create.py} (100%) rename process/core/io/{ => plot/costs}/costs_bar.py (100%) rename process/core/io/{ => plot}/plot_stress_tf.py (100%) diff --git a/process/core/io/write_new_in_dat.py b/process/core/io/in_dat/create.py similarity index 100% rename from process/core/io/write_new_in_dat.py rename to process/core/io/in_dat/create.py diff --git a/process/core/io/costs_bar.py b/process/core/io/plot/costs/costs_bar.py similarity index 100% rename from process/core/io/costs_bar.py rename to process/core/io/plot/costs/costs_bar.py diff --git a/process/core/io/plot_stress_tf.py b/process/core/io/plot/plot_stress_tf.py similarity index 100% rename from process/core/io/plot_stress_tf.py rename to process/core/io/plot/plot_stress_tf.py From 88ef30ed42cde7e6f6735133ac8a921b4f284cc1 Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Mon, 23 Feb 2026 08:56:46 +0000 Subject: [PATCH 02/17] Overhall CI --- process/core/caller.py | 2 +- process/core/io/{in_dat.py => in_dat/base.py} | 5 - process/core/io/in_dat/cli.py | 28 + process/core/io/in_dat/create.py | 122 +- process/core/io/mfile/cli.py | 51 + process/core/io/{ => mfile}/mfile.py | 0 process/core/io/{ => mfile}/mfile2dict.py | 14 - .../core/io/{ => mfile}/mfile_comparison.py | 196 +-- process/core/io/{ => mfile}/mfile_to_csv.py | 133 +- .../io/{ => mfile}/mfile_to_csv_vars.json | 0 process/core/io/mfile_utils.py | 43 - process/core/io/plot/cli.py | 255 ++++ process/core/io/plot/costs/cli.py | 71 ++ process/core/io/plot/costs/costs_bar.py | 297 ++--- process/core/io/{ => plot/costs}/costs_pie.py | 49 +- .../io/{ => plot/images}/alpha_particle.png | Bin .../images}/blanket_with_coolant.png | Bin .../core/io/{ => plot/images}/divertor.png | Bin process/core/io/{ => plot/images}/fw.png | Bin .../core/io/{ => plot/images}/generator.png | Bin .../io/{ => plot/images}/hcd_injector.png | Bin process/core/io/{ => plot/images}/neutron.png | Bin process/core/io/{ => plot/images}/plasma.png | Bin process/core/io/{ => plot/images}/pylon.png | Bin process/core/io/{ => plot/images}/turbine.png | Bin process/core/io/{ => plot/images}/vv.png | Bin process/core/io/{ => plot}/plot_proc.py | 107 +- process/core/io/{ => plot}/plot_scans.py | 212 ---- process/core/io/{ => plot}/plot_solutions.py | 0 process/core/io/plot/plot_stress_tf.py | 1081 ++++++++--------- .../{ => plot/sankey}/plot_plotly_sankey.py | 4 + .../core/io/{ => plot/sankey}/plot_sankey.py | 0 .../core/io/{ => plot/sankey}/sankey_funcs.py | 530 ++++++-- process/core/io/process_config.py | 4 +- process/core/io/process_funcs.py | 4 +- process/core/io/tools.py | 75 ++ process/main.py | 257 ++-- pyproject.toml | 2 +- tracking/tracking_data.py | 37 - 39 files changed, 1773 insertions(+), 1806 deletions(-) rename process/core/io/{in_dat.py => in_dat/base.py} (99%) create mode 100644 process/core/io/in_dat/cli.py create mode 100644 process/core/io/mfile/cli.py rename process/core/io/{ => mfile}/mfile.py (100%) rename process/core/io/{ => mfile}/mfile2dict.py (98%) rename process/core/io/{ => mfile}/mfile_comparison.py (62%) rename process/core/io/{ => mfile}/mfile_to_csv.py (59%) rename process/core/io/{ => mfile}/mfile_to_csv_vars.json (100%) delete mode 100644 process/core/io/mfile_utils.py create mode 100644 process/core/io/plot/cli.py create mode 100644 process/core/io/plot/costs/cli.py rename process/core/io/{ => plot/costs}/costs_pie.py (85%) rename process/core/io/{ => plot/images}/alpha_particle.png (100%) rename process/core/io/{ => plot/images}/blanket_with_coolant.png (100%) rename process/core/io/{ => plot/images}/divertor.png (100%) rename process/core/io/{ => plot/images}/fw.png (100%) rename process/core/io/{ => plot/images}/generator.png (100%) rename process/core/io/{ => plot/images}/hcd_injector.png (100%) rename process/core/io/{ => plot/images}/neutron.png (100%) rename process/core/io/{ => plot/images}/plasma.png (100%) rename process/core/io/{ => plot/images}/pylon.png (100%) rename process/core/io/{ => plot/images}/turbine.png (100%) rename process/core/io/{ => plot/images}/vv.png (100%) rename process/core/io/{ => plot}/plot_proc.py (99%) rename process/core/io/{ => plot}/plot_scans.py (88%) rename process/core/io/{ => plot}/plot_solutions.py (100%) rename process/core/io/{ => plot/sankey}/plot_plotly_sankey.py (98%) rename process/core/io/{ => plot/sankey}/plot_sankey.py (100%) rename process/core/io/{ => plot/sankey}/sankey_funcs.py (63%) create mode 100644 process/core/io/tools.py diff --git a/process/core/caller.py b/process/core/caller.py index 6de2edcd9..59994ad25 100644 --- a/process/core/caller.py +++ b/process/core/caller.py @@ -11,7 +11,7 @@ from process import data_structure from process.core import constants from process.core.final import finalise -from process.core.io.mfile import MFile +from process.core.io.mfile.mfile import MFile from process.core.process_output import OutputFileManager, ovarre from process.core.solver.iteration_variables import set_scaled_iteration_variable from process.core.solver.objectives import objective_function diff --git a/process/core/io/in_dat.py b/process/core/io/in_dat/base.py similarity index 99% rename from process/core/io/in_dat.py rename to process/core/io/in_dat/base.py index 06ccbf360..40d3cdb4c 100644 --- a/process/core/io/in_dat.py +++ b/process/core/io/in_dat/base.py @@ -1823,8 +1823,3 @@ def get_param_value(self, var_name): value = var_dict.get("value") return value - - -if __name__ == "__main__": - i = InDat(filename="IN.DAT") - i.write_in_dat() diff --git a/process/core/io/in_dat/cli.py b/process/core/io/in_dat/cli.py new file mode 100644 index 000000000..8c744f2ff --- /dev/null +++ b/process/core/io/in_dat/cli.py @@ -0,0 +1,28 @@ +import click + +from process.io.in_dat.create import write_indat +from process.io.tools import indat_opt, mfile_opt + + +@click.command("indat", no_args_is_help=True) +@mfile_opt(exists=True) +@indat_opt() +@click.option( + "-o", + "--indat-out", + "indat_out", + type=str, + default="new_IN.DAT", + help="IN.DAT to write out", +) +@click.option( + "-fpi", + "--feasuble-point-index", + type=int, + default=-1, + help="Create indat from the Nth feasible point in mfile", +) +def new_indat(mfile, indat, indat_out, feasuble_point_index): + """Creates a new IN.DAT using MFILE.DAT iteration variables.""" + + write_indat(mfile, indat, indat_out, feasuble_point_index) diff --git a/process/core/io/in_dat/create.py b/process/core/io/in_dat/create.py index b311e72f9..46e5ec40b 100644 --- a/process/core/io/in_dat/create.py +++ b/process/core/io/in_dat/create.py @@ -5,14 +5,13 @@ """ -import argparse import re -import process.core.io.mfile as mf -from process.core.io.in_dat import InDat +import process.io.mfile.mfile as mf +from process.io.in_dat.base import InDat -def feasible_point(filename, position): +def feasible_point(filename, position: int): """Function to check for feasible solution before creating new IN.DAT, or to determine the first or last feasible point in a scan Parameters @@ -20,44 +19,38 @@ def feasible_point(filename, position): filename : name of MFILE.DAT to read position : - e.g first or last + feasible position index Returns ------- - scanPoint: + scan_point: scan number to use when writing new file """ mfile_data = mf.MFile(filename) - finished = False scan_point = 0 - num_scans = 1 + num_scans = int(mfile_data.get("isweep", scan=-1) or 1) + + if position == -1: + position = num_scans + + position = max(1, position) + + if position > num_scans: + position = num_scans + print(f"Only {num_scans} in mfile selecting last feasible_point") + + check_point = 1 for value in mfile_data.data: - if "isweep" in value: - num_scans = int(mfile_data.data["isweep"].get_scan(-1)) - break - - # Assign start point - check_point = 1 if position == "first" else num_scans - - while finished is False: - for value in mfile_data.data: - # Look for feasible scan points (with ifail = 1) - if "ifail" in value and "vmcon_error_flag_(ifail)" not in value: - if mfile_data.data[value].get_scan(check_point) == 1: - finished = True - scan_point = check_point - else: - if position == "last": - if check_point == 1: - finished = True - else: - check_point = check_point - 1 - elif position == "first": - if check_point == num_scans: - finished = True - else: - check_point = check_point + 1 + # Look for feasible scan points (with ifail = 1) + if "ifail" in value and "vmcon_error_flag_(ifail)" not in value: + if mfile_data.get(value, scan=check_point) == 1: + scan_point = check_point + if check_point == position: + break + check_point += 1 + else: + raise ValueError("No feasible point found") return scan_point @@ -112,71 +105,18 @@ def replace_iteration_variables(iteration_vars, in_data): return in_data -def main(args=None): - parser = argparse.ArgumentParser( - description="Creates a new IN.DAT using iteration variable values from MFILE.DAT." - ) - - parser.add_argument( - "-f", - metavar="f", - type=str, - default="MFILE.DAT", - help='File to read as MFILE.DAT (default="MFILE.DAT")', - ) - - parser.add_argument( - "-i", - metavar="i", - type=str, - default="IN.DAT", - help='File to read as IN.DAT (default="IN.DAT")', - ) - - parser.add_argument( - "-o", - metavar="o", - type=str, - default="new_IN.DAT", - help='File to write as new IN.DAT (default="new_IN.DAT")', - ) - - parser.add_argument( - "-lfp", - "--lfp", - help="use last feasible point in a scan (default)", - action="store_true", - ) - - parser.add_argument( - "-ffp", "--ffp", help="use first feasible point in a scan", action="store_true" - ) - - args = parser.parse_args(args) - - if args.ffp: - # Determine first feasible scan point - scan = feasible_point(args.f, "first") - else: - # Determine last feasible scan point - default - scan = feasible_point(args.f, "last") - if scan == 0: - print("No feasible points in scan") - raise SystemExit +def write_indat(mfile, indat, output, feasible_point_index): + scan = feasible_point(mfile, feasible_point_index) print("Using scan number = ", scan) # Get iteration variables from MFILE.DAT - it_vars = get_iteration_variables(args.f, scan) + it_vars = get_iteration_variables(mfile, scan) # Read IN.DAT - in_dat_data = InDat(args.i) + in_dat_data = InDat(indat) # Amend the values for the iteration variables in_dat_data = replace_iteration_variables(it_vars, in_dat_data) # Write a new IN.DAT - in_dat_data.write_in_dat(output_filename=args.o) - - -if __name__ == "__main__": - main() + in_dat_data.write_in_dat(output_filename=output) diff --git a/process/core/io/mfile/cli.py b/process/core/io/mfile/cli.py new file mode 100644 index 000000000..b950d2f9e --- /dev/null +++ b/process/core/io/mfile/cli.py @@ -0,0 +1,51 @@ +import click + +from process.io.mfile.mfile_comparison import compare_mfiles +from process.io.mfile.mfile_to_csv import to_csv +from process.io.tools import mfile_arg, mfile_opt, save + + +@click.group() +def mfile(): + """MFile tools""" + + +@mfile.command("convert", no_args_is_help=True) +@mfile_opt(exists=True) +@click.option( + "-v", + "--variables", + type=str, + help="Optional list of variables or file with list of variables to extract", +) +@click.option( + "-fmt", + "--format", + "format_", + type=click.Choice(["json", "csv", "toml", "yaml", "pickle"]), +) +def convert(mfile, variables, format_): + """Convert MFile to other formats.""" + to_csv(mfile, variables) + + +@mfile.command("compare", no_args_is_help=True) +@mfile_arg +@save("Save output to file called comp.txt") +@click.option( + "-t", + "--comparison-type", + "comparison", + type=click.Choice(["defaults", "baseline", "blanket", "generic", "all"]), + default="all", + help="Format to save the eqdsk file in.", +) +@click.option("-v", "--verbose", default=False, is_flag=True) +@click.option("--acc", type=float, default=5.0) +def compare(mfiles, save, comparison, acc, verbose): + """Produce a comparison between two PROCESS MFILEs. + + User Can specify level of differences to show. + + """ + compare_mfiles(mfiles, comparison, acc, save, verbose) diff --git a/process/core/io/mfile.py b/process/core/io/mfile/mfile.py similarity index 100% rename from process/core/io/mfile.py rename to process/core/io/mfile/mfile.py diff --git a/process/core/io/mfile2dict.py b/process/core/io/mfile/mfile2dict.py similarity index 98% rename from process/core/io/mfile2dict.py rename to process/core/io/mfile/mfile2dict.py index c9a4e87ec..2a8f78048 100644 --- a/process/core/io/mfile2dict.py +++ b/process/core/io/mfile/mfile2dict.py @@ -387,17 +387,3 @@ def write(self, output_filename: str): raise RuntimeError(f"Unrecognised file format '{suffix}'") self._logger.info("File was written successfully.") - - -if __name__ in "__main__": - import argparse - - parser = argparse.ArgumentParser() - - parser.add_argument("input_mfile") - parser.add_argument("output_file") - - args = parser.parse_args() - - parser = MFILEParser(args.input_mfile) - parser.write(args.output_file) diff --git a/process/core/io/mfile_comparison.py b/process/core/io/mfile/mfile_comparison.py similarity index 62% rename from process/core/io/mfile_comparison.py rename to process/core/io/mfile/mfile_comparison.py index af5a775dd..8d9172d42 100644 --- a/process/core/io/mfile_comparison.py +++ b/process/core/io/mfile/mfile_comparison.py @@ -10,7 +10,6 @@ generation script imports, and inspects, process. """ -import argparse import sys import numpy as np @@ -271,7 +270,15 @@ class BColors: ENDC = "\033[0m" -def main(arg): +comparison_dict = { + "defaults": DEFAULT_COMPARE_PARAMS, + "baseline": BASELINE_LIST, + "blanket": BLANKET_COMPARE_PARAMS, + "generic": GENERIC_LIST, +} + + +def compare_mfiles(files, comparison, acc, save, verbose): """Main function for comparing MFILEs Parameters @@ -283,9 +290,9 @@ def main(arg): print_counter = 0 n = 2 mfile_list = [] - for item in arg.f: + for item in files: mfile = mf.MFile(filename=item) - if mfile.data["error_status"].get_scan(-1) == 3: + if mfile.get("error_status", scan=-1) == 3: raise RuntimeError( f"{item} is an MFile from a PROCESS run that did not converge" " and instead results from an error during the run" @@ -293,10 +300,7 @@ def main(arg): mfile_list.append(mfile) - var_list = [] - missing_vars = [] - diff_list = [] - within_list = [] + var_list, missing_vars, diff_list, within_list = [], [], [], [] key_list = mfile_list[0].data.keys() for var in key_list: @@ -309,38 +313,30 @@ def main(arg): if store: var_list.append(var) - if arg.defaults: - var_list = DEFAULT_COMPARE_PARAMS - - if arg.blanket: - var_list = BLANKET_COMPARE_PARAMS - - if arg.baseline: - var_list = BASELINE_LIST - - if arg.generic: - var_list = GENERIC_LIST + if comparison != "all": + var_list = comparison_dict[comparison] + dts = get_dicts() for v in var_list: if "normres" in v: continue values = np.zeros(n) # replaced scipy with numpy - if v not in get_dicts()["DICT_VAR_TYPE"]: + if v not in dts["DICT_VAR_TYPE"]: try: - eval(mfile_list[0].data[v].get_scan(-1)) + eval(mfile_list[0].get(v, scan=-1)) except NameError: pass except TypeError: for m in range(len(mfile_list)): - values[m] = mfile_list[m].data[v].get_scan(-1) + values[m] = mfile_list[m].get(v, scan=-1) except SyntaxError: pass elif ( - get_dicts()["DICT_VAR_TYPE"][v] == "real_variable" - or get_dicts()["DICT_VAR_TYPE"][v] == "int_variable" + dts["DICT_VAR_TYPE"][v] == "real_variable" + or dts["DICT_VAR_TYPE"][v] == "int_variable" ): for m in range(len(mfile_list)): values[m] = mfile_list[m].data[v].get_scan(-1) @@ -348,150 +344,32 @@ def main(arg): norm_vals = [] if values[0] != 0 and isfinite(values[0]): norm_vals = values / values[0] - # else: - # print(key, values[0]) if len(norm_vals) >= 1: key = v.strip(".").strip(" ") - des = get_dicts()["DICT_DESCRIPTIONS"].get(key, "-") - a = norm_vals >= 1.0 + arg.acc / 100.0 - b = norm_vals <= 1.0 - arg.acc / 100.0 - if a[1]: - diff_list.append(v) - line = ( - BColors.ENDC - + v - + "\t" - + des - + "\t" - + str(values[0]) - + "\t" - + str(values[1]) - + "\t" - + BColors.FAIL - + str(round((norm_vals[1] - 1) * 100.0, 2)) - + " %" - ) - wline = ( - v - + "\t" - + des - + "\t" - + "\t" - + str(values[0]) - + "\t" - + str(values[1]) - + "\t" - + str(round((norm_vals[1] - 1) * 100.0, 2)) - + " %" - ) - print(line) - print_counter += 1 - if arg.save: - with open("comp.txt", "a") as ofile: - ofile.write(wline + "\n") - elif b[1]: + des = dts["DICT_DESCRIPTIONS"].get(key, "-") + a = norm_vals >= 1.0 + acc / 100.0 + b = norm_vals <= 1.0 - acc / 100.0 + rounded = round((norm_vals[1] - 1) * 100.0, 2) + vals = f"{values[0]}\t{values[1]}" + if a[1] or b[1]: diff_list.append(v) - line = ( - BColors.ENDC - + v - + "\t" - + des - + "\t" - + str(values[0]) - + "\t" - + str(values[1]) - + "\t" - + BColors.FAIL - + str(round((norm_vals[1] - 1) * 100.0, 2)) - + " %" - ) - wline = ( - v - + "\t" - + des - + "\t" - + str(values[0]) - + "\t" - + str(values[1]) - + "\t" - + str(round((norm_vals[1] - 1) * 100.0, 2)) - + " %" - ) - print(line) - print_counter += 1 - if arg.save: - with open("comp.txt", "a") as ofile: - ofile.write(wline + "\n") + rv = f"{BColors.FAIL}{rounded} %" else: within_list.append(v) - line = ( - BColors.ENDC - + v - + "\t" - + des - + "\t" - + str(values[0]) - + "\t" - + str(values[1]) - + "\t" - + str(round((norm_vals[1] - 1) * 100.0, 2)) - + " %" - ) - wline = ( - v - + "\t" - + des - + "\t" - + str(values[0]) - + "\t" - + str(values[1]) - + "\t" - + str(round((norm_vals[1] - 1) * 100.0, 2)) - + " %" - ) - if arg.verbose: - print(line) - print_counter += 1 + rv = f"{rounded} %" + + wline = "\t".join([v, f"{des}\t" if a[1] else des, vals, f"{rounded} %"]) + if a[1] or b[1] or verbose: + print("\t".join([f"{BColors.ENDC}{v}", des, vals, rv])) + print_counter += 1 + if save: with open("comp.txt", "a") as ofile: ofile.write(wline + "\n") - if arg.baseline and arg.acc >= 10.0: + print(BColors.ENDC) + if comparison == "baseline" and acc >= 10.0: if print_counter == 0: sys.exit(0) else: - sys.exit(f"Differences in baseline output by more than {arg.acc}%") - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Produce a comparison " - "between two PROCESS " - "MFILEs. User Can specify " - "level of differences to show. " - "For info contact " - "james.morris2@ccfe.ac.uk" - ) - - parser.add_argument("-f", metavar="f", type=str, nargs="+", help="Files to compare") - - parser.add_argument( - "-s", "--save", help="Save output to file called comp.txt", action="store_true" - ) - - parser.add_argument("--acc", type=float, default=5.0) - - parser.add_argument("--verbose", type=float, default=0.0) - - parser.add_argument("--defaults", action="store_true") - - parser.add_argument("--baseline", action="store_true") - - parser.add_argument("--blanket", action="store_true") - - parser.add_argument("--generic", action="store_true") - - args = parser.parse_args() - - main(args) - print(BColors.ENDC) + sys.exit(f"Differences in baseline output by more than {acc}%") diff --git a/process/core/io/mfile_to_csv.py b/process/core/io/mfile/mfile_to_csv.py similarity index 59% rename from process/core/io/mfile_to_csv.py rename to process/core/io/mfile/mfile_to_csv.py index 72d3e1ccf..f43eb32f4 100644 --- a/process/core/io/mfile_to_csv.py +++ b/process/core/io/mfile/mfile_to_csv.py @@ -12,51 +12,40 @@ .csv will be saved to the directory of the input file """ -# == import modules == -# standard python modules -import argparse -import csv import json +from collections.abc import Sequence from pathlib import Path, PurePath +<<<<<<<< HEAD:process/core/io/mfile_to_csv.py # PROCESS-specific modules from process.core.io.mfile import MFile - -# == define functions == - - -def parse_args(args): - """Parse supplied arguments. - - Parameters - ---------- - args : list, None - arguments to parse - - Returns - ------- - Namespace - parsed arguments - """ - parser = argparse.ArgumentParser( - description="Read from a PROCESS MFILE and write values into a csv." - ) - parser.add_argument( - "-f", - "--mfile", - type=str, - default="MFILE.DAT", - help="Specify input mfile name, default = MFILE.DAT", - ) - parser.add_argument( - "-v", - "--varfile", - type=str, - default="mfile_to_csv_vars.json", - help="Specify file holding variable names, default = mfile_to_csv_vars.json", - ) - - return parser.parse_args(args) +======== +import numpy as np +>>>>>>>> 71bdc991 (Overhall CI):process/core/io/mfile/mfile_to_csv.py + +from process.io.mfile.mfile import MFile + +default_vars = ( + "minmax", + "p_hcd_injected_max", + "p_plant_electric_net_required_mw", + "ripple_b_tf_plasma_edge_max", + "t_burn_min", + "alstroh", + "sig_tf_wp_max", + "dx_tf_turn_steel", + "f_j_cs_start_pulse_end_flat_top", + "alstroh", + "rmajor", + "dr_tf_inboard", + "dr_cs", + "c_tf_turn", + "dr_tf_wp_with_insulation", + "dr_cryostat", + "dr_shld_outboard", + "dz_divertor", + "rmajor", +) def get_vars(vfile="mfile_to_csv_vars.json"): @@ -110,10 +99,11 @@ def read_mfile(mfilename="MFILE.DAT", variables=None): print(f"Variable '{var_name}' not in MFILE. Skipping and moving on...") else: # In case of a file containing multiple scans, (scan = -1) uses the last scan value - var_val = m_file.data[var_name].get_scan(-1) - description = m_file.data[var_name].var_description - var_data = (description, var_name, var_val) - output_vars.append(var_data) + output_vars.append(( + m_file.data[var_name].var_description, + var_name, + m_file.get(var_name, scan=-1), + )) return output_vars @@ -153,47 +143,34 @@ def write_to_csv(csv_outfile, output_data=None): output_data : (Default value = None) """ - if output_data is None: - output_data = [] - with open(csv_outfile, "w") as csv_file: - print("Writing to csv file:", csv_outfile) - writer = csv.writer(csv_file, delimiter=",") - writer.writerow(["Description", "Varname", "Value"]) - - for vardesc in output_data: - writer.writerow(vardesc) + print("Writing to csv file:", csv_outfile) + np.savetxt( + csv_outfile, + output_data or [], + fmt="%.5e", + delimiter=",", + header=", ".join(["Description", "Varname", "Value"]), + footer="", + comments="", + ) -def main(args=None): +def to_csv(mfile, variables: Sequence[str] | str = default_vars): """Extract certain variables from an MFILE.DAT and output to CSV. Parameters ---------- - args : list, optional - optional command-line args for testing, defaults to None + mfile: + Mfile to convert + variables: + variable file with variables to extract """ - # read from command line inputs - args = parse_args(args) - - # read list of required variables from input json file - jvars = get_vars(args.varfile) - - # read required data from input mfile - output_data = read_mfile(args.mfile, jvars) - - # identify save location - output_file = get_savenamepath(args.mfile) - - # write to csv - write_to_csv(output_file, output_data) + write_to_csv( + get_savenamepath(mfile), + read_mfile( + mfile, get_vars(variables) if isinstance(variables, str) else variables + ), + ) # write final line to screen print("Complete.") - - -# == program == - -if __name__ == "__main__": - main() - -# == end == diff --git a/process/core/io/mfile_to_csv_vars.json b/process/core/io/mfile/mfile_to_csv_vars.json similarity index 100% rename from process/core/io/mfile_to_csv_vars.json rename to process/core/io/mfile/mfile_to_csv_vars.json diff --git a/process/core/io/mfile_utils.py b/process/core/io/mfile_utils.py deleted file mode 100644 index b81d12ccc..000000000 --- a/process/core/io/mfile_utils.py +++ /dev/null @@ -1,43 +0,0 @@ -import re -from pathlib import Path - -import process.core.solver.iteration_variables as iteration_variables -import process.data_structure as data_structure -from process.main import SingleRun - - -def get_mfile_initial_ixc_values(file_path: Path): - """Initialise the input file and obtain the initial values of the iteration variables - - Parameters - ---------- - file_path : - The path to the MFile to get the initial iteration variable values from. - - Notes - ----- - This method initialises a SingleRun. At present, this involves mutating the global - data structure so it is not safe to run this method during a PROCESS run. - """ - SingleRun(file_path.as_posix()) - iteration_variables.load_iteration_variables() - - iteration_variable_names = [] - iteration_variable_values = [] - - for i in range(data_structure.numerics.nvar): - ivar = data_structure.numerics.ixc[i].item() - - itv = iteration_variables.ITERATION_VARIABLES[ivar] - - iteration_variable_names.append(itv.name) - if array := re.match(r"(\w+)\(([0-9]+)\)", itv.name): - var_name = array.group(1) - index = array.group(2) - iteration_variable_values.append( - getattr(itv.module, var_name)[int(index) - 1] - ) - else: - iteration_variable_values.append(getattr(itv.module, itv.name)) - - return iteration_variable_names, iteration_variable_values diff --git a/process/core/io/plot/cli.py b/process/core/io/plot/cli.py new file mode 100644 index 000000000..2d28ffdb3 --- /dev/null +++ b/process/core/io/plot/cli.py @@ -0,0 +1,255 @@ +from pathlib import Path + +import click + +from process.io.plot.plot_proc import setup_plot +from process.io.plot.plot_stress_tf import plot_stress +from process.io.tools import LazyGroup, mfile_arg, mfile_opt + + +@click.group( + cls=LazyGroup, + lazy_subcommands={ + "costs": "process.io.plot.costs.cli.costs", + # "sankey": "process.io.plot.sankey.cli", + }, +) +def plot(): + """Plotting utilities for PROCESS""" + + +@plot.command("scans", no_args_is_help=True) +@mfile_arg +# At least one output variable must be supplied in order to plot +@click.option( + "-yv", + "--y-vars", + required=True, + help=( + "Select the output variables\nMore than one output can be plotted " + "eg: -yv 'var1 var2'\nA separate plot will be created for each " + "inputs" + ), +) +@click.option( + "-yv2", + "--y-vars2", + default="", + help=( + "Select the 2nd axis output variable\n " + "eg: -yv2 'var'\n 2nd variable will be plotted on shared figure " + "inputs" + ), +) +@click.option( + "-o", + "--outputdir", + default=Path.cwd(), + help="Output directory for plots, defaults to current working directory.", +) +@click.option( + "-out", + "--term_output", + is_flag=True, + help="Option to show scans values on terminal", +) +@click.option( + "-sf", + "--save-format", + nargs="?", + default="pdf", + help="Output format (default='pdf') ", +) +@click.option( + "-afs", + "--axis-font-size", + nargs="?", + default=18, + help="Axis label font size selection (default=18)", + type=int, +) +@click.option( + "-ats", + "--axis-ticklabel-size", + nargs="?", + default=16, + help="Axis tick label font size selection (default=16)", + type=int, +) +@click.option( + "-x%", + "--x-axis-percent", + is_flag=True, + help=("Used to set the x axis ticks to percentages in place of absolute \nvalues."), +) +@click.option( + "-xm", + "--x-axis-max", + default="", + help=( + "Used to set the x value corresponding to 100 percent when \nconverting from absolute to percent values." + ), +) +@click.option( + "-xr", + "--x-axis-range", + default="", + help=("Used to set the range for x axis"), +) +@click.option( + "-y%", + "--y-axis-percent", + is_flag=True, + help=("Used to set the y axis ticks to percentages in place of absolute \nvalues."), +) +@click.option( + "-y2%", + "--y-axis2-percent", + is_flag=True, + help=( + "Used to set the y axis ticks to percentages in place of absolute \nvalues. For the twinned axis if present." + ), +) +@click.option( + "-ym", + "--y-axis-max", + default="", + help=( + "Used to set the y value corresponding to 100 percent when \nconverting from absolute to percent values." + ), +) +@click.option( + "-ym2", + "--y-axis2-max", + default="", + help=( + "Used to set the y value corresponding to 100 percent when \nconverting from absolute to percent values." + "For the twinned axis if present." + ), +) +@click.option( + "-yr", + "--y-axis-range", + default="", + help=("Used to set the range for y axis"), +) +@click.option( + "-yr2", + "--y-axis2-range", + default="", + help=("Used to set the range for y axis. For the twinned axis if present."), +) +@click.option( + "-ln", + "--label-name", + default="", + help=( + "Label names for plot legend. If multiple input files used then \n" + "list the same number of label names eg: -nl 'leg1 leg2'\n" + "(default = MFile file name) " + ), +) +@click.option( + "-2DC", + "--two-dimensional-contour", + is_flag=True, + help=( + "Option to plot 2D scans as a coloured contour plot instead of a line plot \n " + "Note: Non convergent points will show up with a value of zero \n " + "Note: The scan paramters must both be in increasing orderl \n " + ), +) +@click.option( + "-stc", + "--stack-plots", + is_flag=True, + help=( + "Option to plot multiple 1D plots in a column of subplots \n " + "Variables will be plotted in order of input" + ), +) +def plot_scans(): + """Plot optimisation information""" + + return parser.parse_args(args) + + +@plot.command("tf-stress", no_args_is_help=True) +@click.option( + "-p", + "--plot-selec", + multiple=True, + default="all", + type=click.Choice(["all", "sig", "disp", "strain", "sm_sig"]), + help="""\b +Plot selection string : +- If it containts 'sig' -> Stress radial dependency +- If it containts 'strain' -> Strain +- If it containts 'disp' -> Displacement +- If it containts 'all' -> all the mentioned plots (default value) +""", +) +@click.option( + "-sf", + "--save-format", + default="pdf", + help="output format (default='pdf') ", +) +@click.option( + "-as", + "--axis-font-size", + default=18, + help="Axis label font size selection (default=18)", + type=int, +) +@click.option( + "-out", + "--term-output", + is_flag=True, + help="Option to show stress on terminal output", +) +@click.option( + "-f", + "--input-file", + default="SIG_TF.json", + help="specify input file path (default = SIG_TF.json)", +) +def plot_tf_stress(plot_selec, save_format, axis_font_size, term_output, input_file): + """TF coil inboard mid-plane stress/strain summary plots""" + plot_stress(plot_selec, save_format, axis_font_size, term_output, input_file) + + +@plot.command("summary", no_args_is_help=True) +@mfile_opt(exists=True) +@click.option("-n", "scan", type=int, default=-1, help="Which scan to plot?") +@click.option( + "-d", + "--DEMO-ranges", + "demo_ranges", + help="Uses the DEMO dimensions as ranges for all graphics", + is_flag=True, +) +@click.option( + "-c", + "--colour", + help=( + "Which colour scheme to use for cross-section plots\n" + "1: Original PROCESS (default)\n" + "2: BLUEMIRA" + ), + default=1, + type=click.Choice([1, 2]), +) +@click.option( + "-o", + "--output-format", + help=( + "Output file format\npdf: pdf output (default)\npng: png output\nnone: no output file written" + ), + default="pdf", + type=click.Choice(["pdf", "png", "none"]), +) +@click.option("-s", "--show", help="show plot", is_flag=True) +def plot_proc(mfile, scan, demo_ranges, colour, output_format, show): + """Produces a summary of the PROCESS MFILE output.""" + setup_plot(mfile, scan, demo_ranges, colour, output_format, show) diff --git a/process/core/io/plot/costs/cli.py b/process/core/io/plot/costs/cli.py new file mode 100644 index 000000000..d282c52c7 --- /dev/null +++ b/process/core/io/plot/costs/cli.py @@ -0,0 +1,71 @@ +import sys + +import click + +import process.io.mfile.mfile as mf +from process.io.plot.costs.costs_bar import cost_comp_1990, cost_comp_2014 +from process.io.plot.costs.costs_pie import new_cost_model, orig_cost_model +from process.io.tools import mfile_arg, mfile_opt, save + +save = save("Save figure") + + +@click.group() +def costs(): + """Cost plotting utilities""" + + +@costs.command("pie", no_args_is_help=True) +@mfile_opt(exists=True) +@save +def pie_plot(mfile, save): + """Displays the cost breakdown as a pie chart.""" + + m_file = mf.MFile(mfile) + + # Check which cost model is being used + if "c21" in m_file.data: + orig_cost_model(m_file, save) + elif "s01" in m_file.data: + new_cost_model(m_file, save) + else: + print("ERROR: Cannot identify cost data, check MFILE!") + + +@costs.command("bar", no_args_is_help=True) +@mfile_arg +@save +@click.option( + "-inf", + "--inflate", + type=float, + help="Inflation Factor (multiplies costs)", + default=1.0, +) +def bar_plot(mfile, save, inflate): + """Displays the cost breakdown as a bar chart. + + Multiple MFILEs can be given and will be plotted on the same chart. + """ + # Get file names + mfile_list = [mf.MFile(filename=item) for item in mfile] + + # Check which cost model is being used + if "c21" in mfile_list[0].data: + # Check all MFILEs use original cost model + for item in mfile_list: + if "c21" not in item.data: + sys.exit("ERROR: Inconsistent cost models used between MFILEs!") + + cost_comp_1990(mfile_list=mfile_list, inflate=inflate, save=save) + + elif "s01" in mfile_list[0].data: + # Check all MFILEs use new cost model + for item in mfile_list: + if "s01" not in item.data: + sys.exit("ERROR: Inconsistent cost models used between MFILEs!") + + cost_comp_2014(mfile_list=mfile_list, inflate=inflate, save=save) + + else: + print("ERROR: Failed to identify cost data, check MFILE!") diff --git a/process/core/io/plot/costs/costs_bar.py b/process/core/io/plot/costs/costs_bar.py index b2e65646c..1e556653a 100644 --- a/process/core/io/plot/costs/costs_bar.py +++ b/process/core/io/plot/costs/costs_bar.py @@ -5,66 +5,91 @@ """ -# Imported libraries -import argparse -import sys +from operator import itemgetter import matplotlib.pyplot as plt import numpy as np -import process.core.io.mfile as mf +import process.io.mfile.mfile as mf -def comp_orig(args, mfile_list: list[str], inflate: float): - """Plot bar chart for the orginal 1990 cost model. +def _format_fig(ax, fig, label, save, filename, index, inflate, ylabel_suffix, n_mfiles): + ax.set_xticks(index + (n_mfiles - 1) * 0.5 * (0.7 / n_mfiles)) + + ax.set_xticklabels(label, rotation=90) + ax.legend() + + if inflate: + ax.set_ylabel(f"{inflate:.2f} x {ylabel_suffix}") + else: + ax.set_ylabel(ylabel_suffix) + + fig.tight_layout() + + if save: + fig.savefig(filename) + else: + plt.show() + + +def cost_comp_1990(mfile_list: list[mf.MFile], inflate: float, save: bool) -> None: + """ + Plot bar chart for the orginal 1990 cost model. Two plots produced: (1) Breakdown of the direct costs and (2) Direct, indirect, etc. """ - # Setup figures - labels = [ - "Magnets and\n Power Conditioning", - "Site and Buildings", - "Maintenance\n Equipment", - "Power Injection", - "Reactor Systems", - "Fuel Handling\n System", - "Instrumentation\n and Control", - "Turbine Plant\n Equipment", - "Heat Transport\n System", - "Other", - ] - labels2 = [ - "Plant Direct\n Cost", - "Indirect\n Cost", - "Total\n Contingency", - "Interest during\n Construction", - ] - index = np.arange(len(labels)) - index2 = np.arange(len(labels2)) - bar_width = 0.7 / len(mfile_list) + fnames = ["direct_cost_bar.pdf", "cost_bar.pdf"] + n_mfiles = len(mfile_list) + bar_width = 0.7 / n_mfiles + ylabel_suffix = "(1990 M$)" fig, ax = plt.subplots() fig2, ax2 = plt.subplots() + variables = ( + "c21", # Site and Buildings + "c221", # Reactor Systems + "c222", # Magnets + "c223", # Power Injection + "c224", # Vacuum Systems + "c225", # Power Conditioning + "c226", # Heat Transport System + "c227", # Fuel Handling System + "c228", # Instrumentation and Control + "c229", # Maintenance Equipment + "c23", # Turbine Plant Equipment + "c24", # Electric Plant Equipment + "c25", # Miscellaneous Plant Equipment + "c26", # Heat Rejection System + "cdirt", # Plant Direct Cost + "c9", # Indirect Cost + "ccont", # Total Contingency + "moneyint", # Interest during Construction + ) + labels = [ + [ + "Magnets and\n Power Conditioning", + "Site and Buildings", + "Maintenance\n Equipment", + "Power Injection", + "Reactor Systems", + "Fuel Handling\n System", + "Instrumentation\n and Control", + "Turbine Plant\n Equipment", + "Heat Transport\n System", + "Other", + ], + [ + "Plant Direct\n Cost", + "Indirect\n Cost", + "Total\n Contingency", + "Interest during\n Construction", + ], + ] + index = np.arange(len(labels[0])) + index2 = np.arange(len(labels[1])) + # Read cost data for identity, item in enumerate(mfile_list): - cost = np.zeros(18) - cost[0] = item.data["c21"].get_scan(-1) # Site and Buildings - cost[1] = item.data["c221"].get_scan(-1) # Reactor Systems - cost[2] = item.data["c222"].get_scan(-1) # Magnets - cost[3] = item.data["c223"].get_scan(-1) # Power Injection - cost[4] = item.data["c224"].get_scan(-1) # Vacuum Systems - cost[5] = item.data["c225"].get_scan(-1) # Power Conditioning - cost[6] = item.data["c226"].get_scan(-1) # Heat Transport System - cost[7] = item.data["c227"].get_scan(-1) # Fuel Handling System - cost[8] = item.data["c228"].get_scan(-1) # Instrumentation and Control - cost[9] = item.data["c229"].get_scan(-1) # Maintenance Equipment - cost[10] = item.data["c23"].get_scan(-1) # Turbine Plant Equipment - cost[11] = item.data["c24"].get_scan(-1) # Electric Plant Equipment - cost[12] = item.data["c25"].get_scan(-1) # Miscellaneous Plant Equipment - cost[13] = item.data["c26"].get_scan(-1) # Heat Rejection System - cost[14] = item.data["cdirt"].get_scan(-1) # Plant Direct Cost - cost[15] = item.data["c9"].get_scan(-1) # Indirect Cost - cost[16] = item.data["ccont"].get_scan(-1) # Total Contingency - cost[17] = item.data["moneyint"].get_scan(-1) # Interest during Construction + cost = np.array(item.get_variables(*variables, scan=-1), dtype=float) # Explain why moneyint is missing if "moneyint" not in item.data: @@ -72,61 +97,47 @@ def comp_orig(args, mfile_list: list[str], inflate: float): "Interest during construction (moneyint) is only calculated for ireactor = 1" ) - # Inflate costs using value parsed if specified - if args.inf: - cost = inflate * cost + if inflate: # Inflate costs using value parsed if specified + cost *= inflate # Simplify grouping sizes = [ cost[2] + cost[5], - cost[0], - cost[9], - cost[3], - cost[1], - cost[7], - cost[8], - cost[10], - cost[6], - cost[4] + cost[11] + cost[12] + cost[13], + *itemgetter(0, 9, 3, 1, 7, 8, 6)(cost), + sum(itemgetter(4, 11, 12, 13)(cost)), ] # Direct, indirect costs etc. for second plot - sizes2 = [cost[14], cost[15], cost[16], cost[17]] + sizes2 = itemgetter(14, 15, 16, 17)(cost) # Plot bar charts - ax.bar(index + identity * bar_width, sizes, bar_width, label=args.f[identity]) - ax2.bar(index2 + identity * bar_width, sizes2, bar_width, label=args.f[identity]) + ax.bar(index + identity * bar_width, sizes, bar_width, label=item.filename) + ax2.bar(index2 + identity * bar_width, sizes2, bar_width, label=item.filename) - # Plot labels - ax.set_xticks(index + (len(mfile_list) - 1) * 0.5 * bar_width) - ax2.set_xticks(index2 + (len(mfile_list) - 1) * 0.5 * bar_width) - ax.set_xticklabels(labels, rotation=90) - ax2.set_xticklabels(labels2, rotation=90) - ax.legend() - ax2.legend() - - # Adjust axis label depending on if inflation factor is used - if args.inf: - ax.set_ylabel(f"{inflate:.2f} x (1990 M$)") - ax2.set_ylabel(f"{inflate:.2f} x (1990 M$)") - else: - ax.set_ylabel("1990 M$") - ax2.set_ylabel("1990 M$") - - fig.tight_layout() - fig2.tight_layout() - - # Save plots if option selected - if args.save: - fig.savefig("direct_cost_bar.pdf") - fig2.savefig("cost_bar.pdf") - else: - plt.show() + for _ax, _fig, _lab, _save_name, _ind in zip( + [ax, ax2], [fig, fig2], labels, fnames, [index, index2], strict=True + ): + _format_fig( + _ax, _fig, _lab, save, _save_name, _ind, inflate, ylabel_suffix, n_mfiles + ) -def comp_new(args, mfile_list: list[str], inflate: float): +def cost_comp_2014(mfile_list: list[mf.MFile], inflate: float, save: bool): """Plot bar chart for the new 2014 cost model.""" - # Setup figures + variables = ( + "s09", # Buildings + "s13", # Land + "s21", # TF Coils + "s27", # First wall and blanket + "s31", # Active maintenance and remote handling + "s34", # Vacuum vessel and liquid nitrogen plant + "s35", # System for converting heat to electricity + "s36", # CS and PF coils + "s51", # Cryoplant and distribution + "s52", # Electrical power supply and distribution + "s59", # Additional project expenditure + "s61", # Remaining subsystems + ) labels = [ "Land and Buildings", "TF Coils", @@ -139,118 +150,30 @@ def comp_new(args, mfile_list: list[str], inflate: float): "Additional project\n expenditure", "Other subsystems", ] + n_mfiles = len(mfile_list) + bar_width = 0.7 / n_mfiles index = np.arange(len(labels)) - bar_width = 0.7 / len(mfile_list) fig, ax = plt.subplots() # Read cost data for identity, item in enumerate(mfile_list): - cost = np.zeros(12) - cost[0] = item.data["s09"].get_scan(-1) # Buildings - cost[1] = item.data["s13"].get_scan(-1) # Land - cost[2] = item.data["s21"].get_scan(-1) # TF Coils - cost[3] = item.data["s27"].get_scan(-1) # First wall and blanket - cost[4] = item.data["s31"].get_scan(-1) # Active maintenance and remote handling - cost[5] = item.data["s34"].get_scan( - -1 - ) # Vacuum vessel and liquid nitrogen plant - cost[6] = item.data["s35"].get_scan( - -1 - ) # System for converting heat to electricity - cost[7] = item.data["s36"].get_scan(-1) # CS and PF coils - cost[8] = item.data["s51"].get_scan(-1) # Cryoplant and distribution - cost[9] = item.data["s52"].get_scan( - -1 - ) # Electrical power supply and distribution - cost[10] = item.data["s59"].get_scan(-1) # Additional project expenditure - cost[11] = item.data["s61"].get_scan(-1) # Remaining subsystems + cost = np.array(item.get_variables(*variables, scan=-1), dtype=float) # Inflate costs using value parsed if specified - if args.inf: - cost = inflate * cost + if inflate: + cost *= inflate # Split up Remaining Subsystems as it is too large sizes = [ cost[0] + cost[1], - cost[2], - cost[3], - cost[4], - cost[5], - cost[7], - cost[8], - cost[9], - cost[10], + *cost[2:6], + *cost[7:11], cost[6] + cost[11] - cost[7] - cost[8] - cost[9] - cost[10], ] # Plot bar chart - ax.bar(index + identity * bar_width, sizes, bar_width, label=args.f[identity]) - - # Plot labels - ax.set_xticks(index + (len(mfile_list) - 1) * 0.5 * bar_width) - ax.set_xticklabels(labels, rotation=90) - ax.legend() - - # Adjust axis label depending on if inflation factor is used - if args.inf: - ax.set_ylabel(f"{inflate:.2f} x (2014 M$)") - else: - ax.set_ylabel("2014 M$") - - fig.tight_layout() - - # Save plots if option selected - if args.save: - fig.savefig("cost_bar.pdf") - else: - plt.show() - - -def main(args=None): - # Setup command line arguments - parser = argparse.ArgumentParser( - description="Displays the cost breakdown as a bar chart. " - "Multiple MFILEs can be given and will be plotted on the same chart. " - ) + ax.bar(index + identity * bar_width, sizes, bar_width, label=item.filename) - parser.add_argument( - "-f", metavar="f", type=str, nargs="+", help="specify the MFILE(s) to plot" + _format_fig( + ax, fig, labels, save, "cost_bar.pdf", index, inflate, "(2014 M$)", n_mfiles ) - - parser.add_argument("-s", "--save", help="save figure", action="store_true") - - parser.add_argument( - "-inf", type=float, help="Inflation Factor (multiplies costs)", default=1.0 - ) - - args = parser.parse_args(args) - - # Get inflation factor if specified - inflate = args.inf - - # Get file names - mfile_list = [mf.MFile(filename=item) for item in args.f] - - # Check which cost model is being used - if "c21" in mfile_list[0].data: - # Check all MFILEs use original cost model - for item in mfile_list: - if "c21" not in item.data: - sys.exit("ERROR: Inconsistent cost models used between MFILEs!") - - comp_orig(args=args, mfile_list=mfile_list, inflate=inflate) - - elif "s01" in mfile_list[0].data: - # Check all MFILEs use new cost model - for item in mfile_list: - if "s01" not in item.data: - sys.exit("ERROR: Inconsistent cost models used between MFILEs!") - - comp_new(args=args, mfile_list=mfile_list, inflate=inflate) - - else: - print("ERROR: Failed to identify cost data, check MFILE!") - - -if __name__ == "__main__": - main() diff --git a/process/core/io/costs_pie.py b/process/core/io/plot/costs/costs_pie.py similarity index 85% rename from process/core/io/costs_pie.py rename to process/core/io/plot/costs/costs_pie.py index 6de9b0b1f..e6a187cc6 100644 --- a/process/core/io/costs_pie.py +++ b/process/core/io/plot/costs/costs_pie.py @@ -2,15 +2,14 @@ Code to display the cost breakdown as a pie chart """ -# Imported libraries -import argparse - import matplotlib.pyplot as plt +<<<<<<<< HEAD:process/core/io/costs_pie.py import process.core.io.mfile as mf +======== +>>>>>>>> 71bdc991 (Overhall CI):process/core/io/plot/costs/costs_pie.py - -def orig_cost_model(m_file, args): +def orig_cost_model(m_file, save): """Plot pie chart for the orginal 1990 cost model. Two plots produced: (1) Breakdown of the direct costs and (2) Direct, indirect, etc. """ @@ -113,14 +112,14 @@ def orig_cost_model(m_file, args): ax2.axis("equal") # Equal aspect ratio ensures that pie is drawn as a circle. # Save figures if option selected - if args.save: + if save: fig1.savefig("direct_cost_pie.pdf") fig2.savefig("cost_pie.pdf") else: plt.show() -def new_cost_model(m_file, args): +def new_cost_model(m_file, save): """Plot pie chart for the new 2014 cost model.""" # Read Cost Values s09 = m_file.data["s09"].get_scan(-1) # Buildings @@ -169,41 +168,7 @@ def new_cost_model(m_file, args): ax1.axis("equal") # Equal aspect ratio ensures that pie is drawn as a circle. # Save figures if option selected - if args.save: + if save: fig1.savefig("cost_pie.pdf") else: plt.show() - - -def main(args=None): - # Setup command line arguments - parser = argparse.ArgumentParser( - description="Displays the cost breakdown as a pie chart. " - ) - - parser.add_argument( - "-f", - metavar="MFILE", - type=str, - default="MFILE.DAT", - help="specify the MFILE (default=MFILE.DAT)", - ) - - parser.add_argument("-s", "--save", help="save figure", action="store_true") - - args = parser.parse_args(args) - - m_file = mf.MFile(args.f) - - # Check which cost model is being used - if "c21" in m_file.data: - orig_cost_model(m_file, args) - elif "s01" in m_file.data: - new_cost_model(m_file, args) - else: - print("ERROR: Cannot identify cost data, check MFILE!") - - -# Main code -if __name__ == "__main__": - main() diff --git a/process/core/io/alpha_particle.png b/process/core/io/plot/images/alpha_particle.png similarity index 100% rename from process/core/io/alpha_particle.png rename to process/core/io/plot/images/alpha_particle.png diff --git a/process/core/io/blanket_with_coolant.png b/process/core/io/plot/images/blanket_with_coolant.png similarity index 100% rename from process/core/io/blanket_with_coolant.png rename to process/core/io/plot/images/blanket_with_coolant.png diff --git a/process/core/io/divertor.png b/process/core/io/plot/images/divertor.png similarity index 100% rename from process/core/io/divertor.png rename to process/core/io/plot/images/divertor.png diff --git a/process/core/io/fw.png b/process/core/io/plot/images/fw.png similarity index 100% rename from process/core/io/fw.png rename to process/core/io/plot/images/fw.png diff --git a/process/core/io/generator.png b/process/core/io/plot/images/generator.png similarity index 100% rename from process/core/io/generator.png rename to process/core/io/plot/images/generator.png diff --git a/process/core/io/hcd_injector.png b/process/core/io/plot/images/hcd_injector.png similarity index 100% rename from process/core/io/hcd_injector.png rename to process/core/io/plot/images/hcd_injector.png diff --git a/process/core/io/neutron.png b/process/core/io/plot/images/neutron.png similarity index 100% rename from process/core/io/neutron.png rename to process/core/io/plot/images/neutron.png diff --git a/process/core/io/plasma.png b/process/core/io/plot/images/plasma.png similarity index 100% rename from process/core/io/plasma.png rename to process/core/io/plot/images/plasma.png diff --git a/process/core/io/pylon.png b/process/core/io/plot/images/pylon.png similarity index 100% rename from process/core/io/pylon.png rename to process/core/io/plot/images/pylon.png diff --git a/process/core/io/turbine.png b/process/core/io/plot/images/turbine.png similarity index 100% rename from process/core/io/turbine.png rename to process/core/io/plot/images/turbine.png diff --git a/process/core/io/vv.png b/process/core/io/plot/images/vv.png similarity index 100% rename from process/core/io/vv.png rename to process/core/io/plot/images/vv.png diff --git a/process/core/io/plot_proc.py b/process/core/io/plot/plot_proc.py similarity index 99% rename from process/core/io/plot_proc.py rename to process/core/io/plot/plot_proc.py index 40a1b1139..3001b18ac 100644 --- a/process/core/io/plot_proc.py +++ b/process/core/io/plot/plot_proc.py @@ -7,12 +7,10 @@ generation script imports, and inspects, process. """ -import argparse import json import os import pathlib import textwrap -from argparse import RawTextHelpFormatter from dataclasses import dataclass from importlib import resources from typing import Any, Literal @@ -31,11 +29,19 @@ import process.core.constants as constants import process.core.io.mfile as mf import process.data_structure.pfcoil_variables as pfcoil_variables +<<<<<<<< HEAD:process/core/io/plot_proc.py +======== +import process.io.mfile.mfile as mf +>>>>>>>> 71bdc991 (Overhall CI):process/core/io/plot/plot_proc.py import process.models.physics.confinement_time as confine import process.models.tfcoil.superconducting as sctf from process.core.io.mfile import MFileErrorClass from process.core.solver.objectives import OBJECTIVE_NAMES from process.data_structure import impurity_radiation_module, physics_variables +<<<<<<<< HEAD:process/core/io/plot_proc.py +======== +from process.io.mfile.mfile import MFileErrorClass +>>>>>>>> 71bdc991 (Overhall CI):process/core/io/plot/plot_proc.py from process.models.build import Build from process.models.geometry.blanket import ( blanket_geometry_double_null, @@ -78,70 +84,6 @@ class RadialBuild: cumulative_radial: dict[str, float] -def parse_args(args): - """Parse supplied arguments. - - Parameters - ---------- - args : list, None - arguments to parse - - Returns - ------- - Namespace - parsed arguments - """ - # Setup command line arguments - parser = argparse.ArgumentParser( - description="Produces a summary of the PROCESS MFILE output, using the MFILE. " - "For info please see https://github.com/ukaea/PROCESS?tab=readme-ov-file#contacts ", - formatter_class=RawTextHelpFormatter, - ) - - parser.add_argument( - "-f", - metavar="FILENAME", - type=str, - default="", - help="specify input/output file path", - ) - parser.add_argument("-s", "--show", help="show plot", action="store_true") - - parser.add_argument("-n", type=int, help="Which scan to plot?") - - parser.add_argument( - "-d", - "--DEMO-ranges", - help="Uses the DEMO dimensions as ranges for all graphics", - action="store_true", - ) - - parser.add_argument( - "-c", - "--colour", - type=int, - help=( - "Which colour scheme to use for cross-section plots\n" - "1: Original PROCESS (default)\n" - "2: BLUEMIRA" - ), - default=1, - choices=[1, 2], - ) - parser.add_argument( - "-o", - "--output-format", - type=str, - help=( - "Output file format\npdf: pdf output (default)\npng: png output\nnone: no output file written" - ), - default="pdf", - choices=["pdf", "png", "none"], - ) - - return parser.parse_args(args) - - # Colours are PROCESS defualt, BLUEMIRA SOLENOID_COLOUR = ["pink", "#1764ab"] CSCOMPRESSION_COLOUR = ["maroon", "#33CCCC"] @@ -13470,9 +13412,14 @@ def create_thickness_builds(m_file, scan: int): ) -def main(args=None): - args = parse_args(args) - +def setup_plot( + mfile: Path, + scan: int = -1, + demo_ranges: bool = False, + colour: Literal[1, 2] = 1, + output_format: str = "pdf", + show: bool = False, +): # create main plot # Increase range when adding new page pages = [plt.figure(figsize=(12, 9), dpi=80) for i in range(30)] @@ -13480,28 +13427,24 @@ def main(args=None): # run main_plot main_plot( pages, - mf.MFile(args.f) if args.f != "" else mf.MFile("MFILE.DAT"), - scan=args.n or -1, - demo_ranges=bool(args.DEMO_ranges), - colour_scheme=int(args.colour), + mf.MFile(mfile) if mfile != "" else mf.MFile("MFILE.DAT"), + scan=scan or -1, + demo_ranges=demo_ranges, + colour_scheme=colour, ) - if args.output_format == "pdf": - with bpdf.PdfPages(args.f + "SUMMARY.pdf") as pdf: + if output_format == "pdf": + with bpdf.PdfPages(mfile + "SUMMARY.pdf") as pdf: for p in pages: pdf.savefig(p) - elif args.output_format == "png": - folder = pathlib.Path(args.f.removesuffix(".DAT") + "_SUMMARY") + elif output_format == "png": + folder = pathlib.Path(mfile.removesuffix(".DAT") + "_SUMMARY") folder.mkdir(parents=True, exist_ok=True) for no, page in enumerate(pages): page.savefig(pathlib.Path(folder, f"page{no}.png"), format="png") # show fig if option used - if args.show: + if show: plt.show(block=True) plt.close("all") - - -if __name__ == "__main__": - main() diff --git a/process/core/io/plot_scans.py b/process/core/io/plot/plot_scans.py similarity index 88% rename from process/core/io/plot_scans.py rename to process/core/io/plot/plot_scans.py index ebf9d63c8..4f62a2abc 100644 --- a/process/core/io/plot_scans.py +++ b/process/core/io/plot/plot_scans.py @@ -22,11 +22,8 @@ - If the file is a folder, the contained MFILE is used as an input. """ -import argparse import math import os -from argparse import RawTextHelpFormatter -from pathlib import Path import matplotlib.pyplot as plt import matplotlib.ticker as mtick @@ -37,211 +34,6 @@ from process.core.io.variable_metadata import var_dicts as meta -def parse_args(args): - """Parse supplied arguments. - - Parameters - ---------- - args : list, None - arguments to parse - - Returns - ------- - Namespace - parsed arguments - """ - parser = argparse.ArgumentParser( - description="Plot optimization information", - formatter_class=RawTextHelpFormatter, - ) - - parser.add_argument( - "-f", - "--input_files", - default="MFILE.DAT", - help=( - "Specify input file(s) path(s) (default = MFILE.DAT)\n" - "More than one input file can be used eg: -f 'A_MFILE.DAT " - "B_MFILE.DAT'.\nYou can only specify the folder containing the " - "MFILE.\nThe different files scan will be plotted on the same " - "graph.\nThe scans must use the same scan variation." - ), - ) - - # At least one output variable must be supplied in order to plot - parser.add_argument( - "-yv", - "--y_vars", - required=True, - help=( - "Select the output variables\nMore than one output can be plotted " - "eg: -yv 'var1 var2'\nA separate plot will be created for each " - "inputs" - ), - ) - - parser.add_argument( - "-yv2", - "--y_vars2", - default="", - help=( - "Select the 2nd axis output variable\n " - "eg: -yv2 'var'\n 2nd variable will be plotted on shared figure " - "inputs" - ), - ) - - parser.add_argument( - "-o", - "--outputdir", - default=Path.cwd(), - help="Output directory for plots, defaults to current working directory.", - ) - - parser.add_argument( - "-out", - "--term_output", - action="store_true", - help="Option to show scans values on terminal", - ) - - parser.add_argument( - "-sf", - "--save_format", - nargs="?", - default="pdf", - help="Output format (default='pdf') ", - ) - - parser.add_argument( - "-afs", - "--axis_font_size", - nargs="?", - default=18, - help="Axis label font size selection (default=18)", - type=int, - ) - - parser.add_argument( - "-ats", - "--axis_ticklabel_size", - nargs="?", - default=16, - help="Axis tick label font size selection (default=16)", - type=int, - ) - - parser.add_argument( - "-x%", - "--x_axis_percent", - action="store_true", - help=( - "Used to set the x axis ticks to percentages in place of absolute \nvalues." - ), - ) - - parser.add_argument( - "-xm", - "--x_axis_max", - default="", - help=( - "Used to set the x value corresponding to 100 percent when \nconverting from absolute to percent values." - ), - ) - - parser.add_argument( - "-xr", - "--x_axis_range", - default="", - help=("Used to set the range for x axis"), - ) - - parser.add_argument( - "-y%", - "--y_axis_percent", - action="store_true", - help=( - "Used to set the y axis ticks to percentages in place of absolute \nvalues." - ), - ) - - parser.add_argument( - "-y2%", - "--y_axis_2_percent", - action="store_true", - help=( - "Used to set the y axis ticks to percentages in place of absolute \nvalues. For the twinned axis if present." - ), - ) - - parser.add_argument( - "-ym", - "--y_axis_max", - default="", - help=( - "Used to set the y value corresponding to 100 percent when \nconverting from absolute to percent values." - ), - ) - - parser.add_argument( - "-ym2", - "--y_axis_2_max", - default="", - help=( - "Used to set the y value corresponding to 100 percent when \nconverting from absolute to percent values." - "For the twinned axis if present." - ), - ) - - parser.add_argument( - "-yr", - "--y_axis_range", - default="", - help=("Used to set the range for y axis"), - ) - - parser.add_argument( - "-yr2", - "--y_axis_2_range", - default="", - help=("Used to set the range for y axis. For the twinned axis if present."), - ) - - parser.add_argument( - "-ln", - "--label_name", - default="", - help=( - "Label names for plot legend. If multiple input files used then \n" - "list the same number of label names eg: -nl 'leg1 leg2'\n" - "(default = MFile file name) " - ), - ) - - parser.add_argument( - "-2DC", - "--two_dimensional_contour", - action="store_true", - help=( - "Option to plot 2D scans as a coloured contour plot instead of a line plot \n " - "Note: Non convergent points will show up with a value of zero \n " - "Note: The scan paramters must both be in increasing orderl \n " - ), - ) - - parser.add_argument( - "-stc", - "--stack_plots", - action="store_true", - help=( - "Option to plot multiple 1D plots in a column of subplots \n " - "Variables will be plotted in order of input" - ), - ) - - return parser.parse_args(args) - - def main(args=None): """Main plot scans script. @@ -1208,7 +1000,3 @@ def main(args=None): # Display plot (used in Jupyter notebooks) plt.show() plt.clf() - - -if __name__ == "__main__": - main() diff --git a/process/core/io/plot_solutions.py b/process/core/io/plot/plot_solutions.py similarity index 100% rename from process/core/io/plot_solutions.py rename to process/core/io/plot/plot_solutions.py diff --git a/process/core/io/plot/plot_stress_tf.py b/process/core/io/plot/plot_stress_tf.py index c7466ddbc..ce9930471 100644 --- a/process/core/io/plot/plot_stress_tf.py +++ b/process/core/io/plot/plot_stress_tf.py @@ -6,70 +6,43 @@ SIG_TF.json """ -import argparse import json import os -from argparse import RawTextHelpFormatter +from dataclasses import dataclass +from operator import itemgetter from pathlib import Path import matplotlib.pyplot as plt -def main(args=None): - # PARSING USER PARAMETERS - # please execute 'python plot_stress_tf.py -h' for input information - # Option definition - # ----------------- - parser = argparse.ArgumentParser( - description="Plot optimization information", - formatter_class=RawTextHelpFormatter, - ) - parser.add_argument( - "-p", - "--plot_selec", - nargs="?", - default="all", - help=( - "Plot selection string :\n - If it containts 'sig' -> Stress radial dependency \n#" - " - If it containts 'strain' -> Strain \n - If it containts 'disp' -> Displacement \n" - " - If it containts 'all' -> all the mentioned plots (default value)" - ), - ) - parser.add_argument( - "-sf", - "--save_format", - nargs="?", - default="pdf", - help="output format (default='pdf') ", - ) - parser.add_argument( - "-as", - "--axis_font_size", - nargs="?", - default=18, - help="Axis label font size selection (default=18)", - type=int, - ) - parser.add_argument( - "-out", - "--term_output", - action="store_true", - help="Option to show stress on terminal output", - ) - parser.add_argument( - "-f", - "--input_file", - default="SIG_TF.json", - help="specify input file path (default = SIG_TF.json)", - ) - - # Option argument extraction - # -------------------------- - args = parser.parse_args(args) - plot_selection = str(args.plot_selec) - save_format = str(args.save_format) - axis_font_size = int(args.axis_font_size) - term_output = args.term_output +@dataclass +class StressPlotConfig: + axis_font_size: float + axis_tick_size: int = 16 + legend_size: int = 12 + mark_size: int = 13 + line_width: float = 3.5 + outdir: Path | None = None + + def __post_init__(self): + if self.outdir is None: + self.outdir = Path.cwd() + if not os.path.isdir(self.outdir): + os.mkdir(self.outdir) + + +def plot_stress( + plot_selection, + save_format, + axis_font_size, + input_file, + term_output, + plot_conf: StressPlotConfig | dict | None = None, +): + if plot_conf is None: + plot_conf = StressPlotConfig(axis_font_size) + elif isinstance(plot_conf, dict): + plot_conf = StressPlotConfig(axis_font_size, **plot_conf) # Boolean swiches for plot selection # ----------------------------------- @@ -83,22 +56,32 @@ def main(args=None): # Number of physical quantity value per coil layer n_radial_array_layer = 0 + with open(input_file) as f: + sig_file_data = json.load(f) + + # Getting the data to be plotted + n_radial_array_layer = sig_file_data["Points per layers"] + n_points = len(sig_file_data["Radius (m)"]) + n_layers = int(n_points / n_radial_array_layer) + + # Assumes n_layers >= 1 + # Physical quantities : full vectors - radius = [] - radial_smeared_stress = [] - toroidal_smeared_stress = [] - vertical_smeared_stress = [] - tresca_smeared_stress = [] - radial_stress = [] - toroidal_stress = [] - vertical_stress = [] - vm_stress = [] - tresca_stress = [] - cea_tresca_stress = [] - radial_strain = [] - toroidal_strain = [] - vertical_strain = [] - radial_displacement = [] + radius = [[] * n_layers] + radial_smeared_stress = [[] * n_layers] + toroidal_smeared_stress = [[] * n_layers] + vertical_smeared_stress = [[] * n_layers] + tresca_smeared_stress = [[] * n_layers] + radial_stress = [[] * n_layers] + toroidal_stress = [[] * n_layers] + vertical_stress = [[] * n_layers] + vm_stress = [[] * n_layers] + tresca_stress = [[] * n_layers] + cea_tresca_stress = [[] * n_layers] + radial_strain = [[] * n_layers] + toroidal_strain = [[] * n_layers] + vertical_strain = [[] * n_layers] + radial_displacement = [[] * n_layers] # Physical quantity : WP stress wp_vertical_stress = [] @@ -120,159 +103,74 @@ def main(args=None): bound_vertical_strain = [] bound_radial_displacement = [] - with open(args.input_file) as f: - sig_file_data = json.load(f) - - # Getting the data to be plotted - n_radial_array_layer = sig_file_data["Points per layers"] - n_points = len(sig_file_data["Radius (m)"]) - n_layers = int(n_points / n_radial_array_layer) for ii in range(n_layers): # Full vector - radius.append([]) - radial_stress.append([]) - toroidal_stress.append([]) - vertical_stress.append([]) - radial_smeared_stress.append([]) - toroidal_smeared_stress.append([]) - vertical_smeared_stress.append([]) - vm_stress.append([]) - tresca_stress.append([]) - cea_tresca_stress.append([]) - radial_displacement.append([]) + lb_ind = ii * n_radial_array_layer + ub_ind = (ii + 1) * n_radial_array_layer - 1 + lb_ub = itemgetter(lb_ind, ub_ind) for jj in range(n_radial_array_layer): - radius[ii].append( - sig_file_data["Radius (m)"][ii * n_radial_array_layer + jj] - ) - radial_stress[ii].append( - sig_file_data["Radial stress (MPa)"][ii * n_radial_array_layer + jj] - ) - toroidal_stress[ii].append( - sig_file_data["Toroidal stress (MPa)"][ii * n_radial_array_layer + jj] + ij_ind = lb_ind + jj + + radius[ii].append(sig_file_data["Radius (m)"][ij_ind]) + radial_stress[ii].append(sig_file_data["Radial stress (MPa)"][ij_ind]) + toroidal_stress[ii].append(sig_file_data["Toroidal stress (MPa)"][ij_ind]) + vertical_stress[ii].append( + sig_file_data["Vertical stress (MPa)"][ + 0 if len(sig_file_data["Vertical stress (MPa)"]) == 1 else ij_ind + ] ) - if len(sig_file_data["Vertical stress (MPa)"]) == 1: - vertical_stress[ii].append(sig_file_data["Vertical stress (MPa)"][0]) - else: - vertical_stress[ii].append( - sig_file_data["Vertical stress (MPa)"][ - ii * n_radial_array_layer + jj - ] - ) radial_smeared_stress[ii].append( - sig_file_data["Radial smear stress (MPa)"][ - ii * n_radial_array_layer + jj - ] + sig_file_data["Radial smear stress (MPa)"][ij_ind] ) toroidal_smeared_stress[ii].append( - sig_file_data["Toroidal smear stress (MPa)"][ - ii * n_radial_array_layer + jj - ] + sig_file_data["Toroidal smear stress (MPa)"][ij_ind] ) vertical_smeared_stress[ii].append( - sig_file_data["Vertical smear stress (MPa)"][ - ii * n_radial_array_layer + jj - ] - ) - vm_stress[ii].append( - sig_file_data["Von-Mises stress (MPa)"][ii * n_radial_array_layer + jj] - ) - tresca_stress[ii].append( - sig_file_data["CEA Tresca stress (MPa)"][ii * n_radial_array_layer + jj] + sig_file_data["Vertical smear stress (MPa)"][ij_ind] ) + vm_stress[ii].append(sig_file_data["Von-Mises stress (MPa)"][ij_ind]) + tresca_stress[ii].append(sig_file_data["CEA Tresca stress (MPa)"][ij_ind]) cea_tresca_stress[ii].append( - sig_file_data["CEA Tresca stress (MPa)"][ii * n_radial_array_layer + jj] + sig_file_data["CEA Tresca stress (MPa)"][ij_ind] ) radial_displacement[ii].append( - sig_file_data["rad. displacement (mm)"][ii * n_radial_array_layer + jj] + sig_file_data["rad. displacement (mm)"][ij_ind] ) - # Layer lower boundaries values - bound_radius.append(sig_file_data["Radius (m)"][ii * n_radial_array_layer]) - bound_radial_stress.append( - sig_file_data["Radial stress (MPa)"][ii * n_radial_array_layer] - ) - bound_toroidal_stress.append( - sig_file_data["Toroidal stress (MPa)"][ii * n_radial_array_layer] - ) + # Layer lower/upper boundaries values + bound_radius.extend(lb_ub(sig_file_data["Radius (m)"])) + bound_radial_stress.extend(lb_ub(sig_file_data["Radial stress (MPa)"])) + bound_toroidal_stress.extend(lb_ub(sig_file_data["Toroidal stress (MPa)"])) + if len(sig_file_data["Vertical stress (MPa)"]) == 1: - bound_vertical_stress.append(sig_file_data["Vertical stress (MPa)"][0]) + bvs_l = bvs_u = 0 else: - bound_vertical_stress.append( - sig_file_data["Vertical stress (MPa)"][ii * n_radial_array_layer] - ) - bound_radial_smeared_stress.append( - sig_file_data["Radial smear stress (MPa)"][ii * n_radial_array_layer] - ) - bound_toroidal_smeared_stress.append( - sig_file_data["Toroidal smear stress (MPa)"][ii * n_radial_array_layer] - ) - bound_vertical_smeared_stress.append( - sig_file_data["Vertical smear stress (MPa)"][ii * n_radial_array_layer] - ) - bound_vm_stress.append( - sig_file_data["Von-Mises stress (MPa)"][ii * n_radial_array_layer] - ) - bound_tresca_stress.append( - sig_file_data["CEA Tresca stress (MPa)"][ii * n_radial_array_layer] + bvs_l = lb_ind + bvs_u = ub_ind + bound_vertical_stress.extend([ + sig_file_data["Vertical stress (MPa)"][bvs_l], + sig_file_data["Vertical stress (MPa)"][bvs_u], + ]) + + bound_radial_smeared_stress.extend( + lb_ub(sig_file_data["Radial smear stress (MPa)"]) ) - bound_cea_tresca_stress.append( - sig_file_data["CEA Tresca stress (MPa)"][ii * n_radial_array_layer] + bound_toroidal_smeared_stress.extend( + lb_ub(sig_file_data["Toroidal smear stress (MPa)"]) ) - bound_radial_displacement.append( - sig_file_data["rad. displacement (mm)"][ii * n_radial_array_layer] + bound_vertical_smeared_stress.extend( + lb_ub(sig_file_data["Vertical smear stress (MPa)"]) ) + bound_vm_stress.extend(lb_ub(sig_file_data["Von-Mises stress (MPa)"])) + bound_tresca_stress.extend(lb_ub(sig_file_data["CEA Tresca stress (MPa)"])) + bound_cea_tresca_stress.extend(lb_ub(sig_file_data["CEA Tresca stress (MPa)"])) + bound_radial_displacement.extend(lb_ub(sig_file_data["rad. displacement (mm)"])) # Layer upper boundaries values - bound_radius.append( - sig_file_data["Radius (m)"][(ii + 1) * n_radial_array_layer - 1] - ) - bound_radial_stress.append( - sig_file_data["Radial stress (MPa)"][(ii + 1) * n_radial_array_layer - 1] - ) - bound_toroidal_stress.append( - sig_file_data["Toroidal stress (MPa)"][(ii + 1) * n_radial_array_layer - 1] - ) - if len(sig_file_data["Vertical stress (MPa)"]) == 1: - bound_vertical_stress.append(sig_file_data["Vertical stress (MPa)"][0]) - else: - bound_vertical_stress.append( - sig_file_data["Vertical stress (MPa)"][ - (ii + 1) * n_radial_array_layer - 1 - ] - ) - bound_radial_smeared_stress.append( - sig_file_data["Radial smear stress (MPa)"][ - (ii + 1) * n_radial_array_layer - 1 - ] - ) - bound_toroidal_smeared_stress.append( - sig_file_data["Toroidal smear stress (MPa)"][ - (ii + 1) * n_radial_array_layer - 1 - ] - ) - bound_vertical_smeared_stress.append( - sig_file_data["Vertical smear stress (MPa)"][ - (ii + 1) * n_radial_array_layer - 1 - ] - ) - bound_vm_stress.append( - sig_file_data["Von-Mises stress (MPa)"][(ii + 1) * n_radial_array_layer - 1] - ) - bound_tresca_stress.append( - sig_file_data["CEA Tresca stress (MPa)"][(ii + 1) * n_radial_array_layer - 1] - ) - bound_cea_tresca_stress.append( - sig_file_data["CEA Tresca stress (MPa)"][(ii + 1) * n_radial_array_layer - 1] - ) - bound_radial_displacement.append( - sig_file_data["rad. displacement (mm)"][(ii + 1) * n_radial_array_layer - 1] - ) # TRESCA smeared stress [MPa] for ii in range(n_layers): - tresca_smeared_stress.append([]) - bound_tresca_smeared_stress.extend([ max(abs(radial_smeared_stress[ii][0]), abs(toroidal_smeared_stress[ii][0])) + vertical_smeared_stress[ii][0], @@ -294,434 +192,419 @@ def main(args=None): # Strains if len(sig_file_data) > 16: for ii in range(n_layers): - radial_strain.append([]) - toroidal_strain.append([]) - vertical_strain.append([]) - - bound_radial_strain.extend([ - sig_file_data["Radial strain"][ii * n_radial_array_layer], - sig_file_data["Radial strain"][(ii + 1) * n_radial_array_layer - 1], - ]) - bound_toroidal_strain.extend([ - sig_file_data["Toroidal strain"][ii * n_radial_array_layer], - sig_file_data["Toroidal strain"][(ii + 1) * n_radial_array_layer - 1], - ]) - bound_vertical_strain.extend([ - sig_file_data["Vertical strain"][ii * n_radial_array_layer], - sig_file_data["Vertical strain"][(ii + 1) * n_radial_array_layer - 1], - ]) + bound_radial_strain.extend([lb_ub(sig_file_data["Radial strain"])]) + bound_toroidal_strain.extend([lb_ub(sig_file_data["Toroidal strain"])]) + bound_vertical_strain.extend([lb_ub(sig_file_data["Vertical strain"])]) for jj in range(n_radial_array_layer): - radial_strain[ii].append( - sig_file_data["Radial strain"][ii * n_radial_array_layer + jj] - ) - toroidal_strain[ii].append( - sig_file_data["Toroidal strain"][ii * n_radial_array_layer + jj] - ) - vertical_strain[ii].append( - sig_file_data["Vertical strain"][ii * n_radial_array_layer + jj] - ) + ij_ind = lb_ind + jj + + radial_strain[ii].append(sig_file_data["Radial strain"][ij_ind]) + toroidal_strain[ii].append(sig_file_data["Toroidal strain"][ij_ind]) + vertical_strain[ii].append(sig_file_data["Vertical strain"][ij_ind]) if "WP smeared stress (MPa)" in sig_file_data: wp_vertical_stress.append( sig_file_data["WP smeared stress (MPa)"][jj] ) - # Terminal output - # --------------- if term_output: - ii_ins = 0 - ii_mids = int(0.5 * float(n_radial_array_layer)) - ii_outs = n_radial_array_layer - 1 - - print() - print() - print("Layer stress details") - print("____________________") - - for ii in range(n_layers): - print(f"Layer {ii + 1}") - print("------------------------------") - print( - "steel radial stress in the inner/middle/out point:" - f" {radial_stress[ii][ii_ins]}/{radial_stress[ii][ii_mids]}/{radial_stress[ii][ii_outs]} MPa" - ) - print( - "steel toroidal stress in the inner/middle/out point:" - f" {toroidal_stress[ii][ii_ins]}/{toroidal_stress[ii][ii_mids]}/{toroidal_stress[ii][ii_outs]} MPa" - ) - print( - "steel vertical stress in the inner/middle/out point:" - f" {vertical_stress[ii][ii_ins]}/{vertical_stress[ii][ii_mids]}/{vertical_stress[ii][ii_outs]} MPa" - ) - print( - "steel TRESCA stress in the inner/middle/out point:" - f" {tresca_stress[ii][ii_ins]}/{tresca_stress[ii][ii_mids]}/{tresca_stress[ii][ii_outs]} MPa" - ) - print() - print( - "smeared radial stress in the inner/middle/out point :" - f" {radial_smeared_stress[ii][ii_ins]}/{radial_smeared_stress[ii][ii_mids]}/{radial_smeared_stress[ii][ii_outs]} MPa" - ) - print( - "smeared toroidal stress in the inner/middle/out point :" - f" {toroidal_smeared_stress[ii][ii_ins]}/{toroidal_smeared_stress[ii][ii_mids]}/{toroidal_smeared_stress[ii][ii_outs]} MPa" - ) - print( - "smeared vertical stress in the inner/middle/out point :" - f" {vertical_smeared_stress[ii][ii_ins]}/{vertical_smeared_stress[ii][ii_mids]}/{vertical_smeared_stress[ii][ii_outs]} MPa" - ) - print( - "smeared TRESCA stress in the inner/middle/out point :" - f" {tresca_smeared_stress[ii][ii_ins]}/{tresca_smeared_stress[ii][ii_mids]}/{tresca_smeared_stress[ii][ii_outs]} MPa" - ) - print() - - if len(sig_file_data) > 16: - print( - "radial strain in the inner/middle/out point :" - f" {radial_strain[ii][ii_ins]}/{radial_strain[ii][ii_mids]}/{radial_strain[ii][ii_outs]}" - ) - print( - "toroidal strain in the inner/middle/out point :" - f" {toroidal_strain[ii][ii_ins]}/{toroidal_strain[ii][ii_mids]}/{toroidal_strain[ii][ii_outs]}" - ) - print(f"vertical strain : {vertical_strain[ii][0]}") - print() - - if len(wp_vertical_stress) != 0: - print( - "smeared WP vertical stress in the inner/middle/out point :" - f" {wp_vertical_stress[0]}/{wp_vertical_stress[ii_mids]}/{wp_vertical_stress[ii_outs]} MPa" - ) - print() - - outdir = Path.cwd() - if not os.path.isdir(outdir): - os.mkdir(outdir) - - axis_tick_size = 16 - legend_size = 12 - mark_size = 13 - line_width = 3.5 + terminal_output( + n_layers, + n_radial_array_layer, + sig_file_data, + radial_stress, + toroidal_stress, + vertical_stress, + tresca_stress, + wp_vertical_stress, + radial_smeared_stress, + toroidal_smeared_stress, + vertical_smeared_stress, + tresca_smeared_stress, + radial_strain, + toroidal_strain, + vertical_strain, + ) - # PLOT 1 : Stress summary - # ------------------------ if plot_sig: - for ii in range(n_layers): - plt.plot( - radius[ii], - radial_stress[ii], - "-", - linewidth=line_width, - color="lightblue", - ) - plt.plot( - radius[ii], - toroidal_stress[ii], - "-", - linewidth=line_width, - color="wheat", - ) - plt.plot( - radius[ii], - vertical_stress[ii], - "-", - linewidth=line_width, - color="lightgrey", - ) - plt.plot( - radius[ii], tresca_stress[ii], "-", linewidth=line_width, color="pink" - ) - plt.plot( - radius[ii], vm_stress[ii], "-", linewidth=line_width, color="violet" - ) - plt.plot( - radius[0], - radial_stress[0], - "--", - color="dodgerblue", - label=r"$\sigma_{rr}$", - ) - plt.plot( - radius[0], - toroidal_stress[0], - "--", - color="orange", - label=r"$\sigma_{\theta\theta}$", - ) - plt.plot( - radius[0], - vertical_stress[0], - "--", - color="mediumseagreen", - label=r"$\sigma_{zz}$", - ) - plt.plot( - radius[0], - tresca_stress[0], - "-", - color="crimson", - label=r"$\sigma_{TRESCA}$", - ) - plt.plot( - radius[0], - vm_stress[0], - "-", - color="darkviolet", - label=r"$\sigma_{Von\ mises}$", - ) - for ii in range(1, n_layers): - plt.plot(radius[ii], radial_stress[ii], "--", color="dodgerblue") - plt.plot(radius[ii], toroidal_stress[ii], "--", color="orange") - plt.plot(radius[ii], vertical_stress[ii], "--", color="mediumseagreen") - plt.plot(radius[ii], tresca_stress[ii], "-", color="crimson") - plt.plot(radius[ii], vm_stress[ii], "-", color="darkviolet") - plt.plot( + stress_summary( + n_layers, + radius, bound_radius, + radial_stress, + toroidal_stress, + vertical_stress, + tresca_stress, + vm_stress, bound_radial_stress, - "|", - markersize=mark_size, - color="dodgerblue", - ) - plt.plot( - bound_radius, bound_toroidal_stress, - "|", - markersize=mark_size, - color="orange", - ) - plt.plot( - bound_radius, bound_vertical_stress, - "|", - markersize=mark_size, - color="mediumseagreen", - ) - plt.plot( - bound_radius, bound_tresca_stress, - "|", - markersize=mark_size, - color="crimson", - ) - plt.plot( - bound_radius, bound_vm_stress, "|", markersize=mark_size, color="darkviolet" + bound_vm_stress, + save_format, + plot_conf, ) - plt.grid(True) - plt.ylabel(r"$\sigma$ [$MPa$]", fontsize=axis_font_size) - plt.xlabel(r"$R$ [$m$]", fontsize=axis_font_size) - plt.legend(loc="best", fontsize=legend_size) - plt.xticks(size=axis_tick_size) - plt.yticks(size=axis_tick_size) - plt.tight_layout() - plt.savefig(f"{outdir}/structure_stress.{save_format}") - plt.clf() - plt.cla() - - # PLOT 2 : Smeared stress summary - # ------------------------ + if plot_sm_sig: - for ii in range(n_layers): - plt.plot( - radius[ii], - radial_smeared_stress[ii], - "-", - linewidth=line_width, - color="lightblue", - ) - plt.plot( - radius[ii], - toroidal_smeared_stress[ii], - "-", - linewidth=line_width, - color="wheat", - ) - plt.plot( - radius[ii], - vertical_smeared_stress[ii], - "-", - linewidth=line_width, - color="lightgrey", - ) - plt.plot( - radius[ii], - tresca_smeared_stress[ii], - "-", - linewidth=line_width, - color="pink", - ) - plt.plot( - radius[0], - radial_smeared_stress[0], - "--", - color="dodgerblue", - label=r"$\sigma_{rr}^\mathrm{smeared}$", - ) - plt.plot( - radius[0], - toroidal_smeared_stress[0], - "--", - color="orange", - label=r"$\sigma_{\theta\theta}^\mathrm{smeared}$", - ) - plt.plot( - radius[0], - vertical_smeared_stress[0], - "--", - color="mediumseagreen", - label=r"$\sigma_{zz}^\mathrm{smeared}$", - ) - plt.plot( - radius[0], - tresca_smeared_stress[0], - "-", - color="crimson", - label=r"$\sigma_{TRESCA}^\mathrm{smeared}$", - ) - for ii in range(1, n_layers): - plt.plot(radius[ii], radial_smeared_stress[ii], "--", color="dodgerblue") - plt.plot(radius[ii], toroidal_smeared_stress[ii], "--", color="orange") - plt.plot( - radius[ii], vertical_smeared_stress[ii], "--", color="mediumseagreen" - ) - plt.plot(radius[ii], tresca_smeared_stress[ii], "-", color="crimson") - plt.plot( + smeared_stress_summary( + n_layers, + radius, bound_radius, + radial_smeared_stress, + toroidal_smeared_stress, + vertical_smeared_stress, + tresca_smeared_stress, bound_radial_smeared_stress, - "|", - markersize=mark_size, - color="dodgerblue", - ) - plt.plot( - bound_radius, bound_toroidal_smeared_stress, - "|", - markersize=mark_size, - color="orange", - ) - plt.plot( - bound_radius, bound_vertical_smeared_stress, - "|", - markersize=mark_size, - color="mediumseagreen", - ) - plt.plot( - bound_radius, bound_tresca_smeared_stress, - "|", - markersize=mark_size, - color="crimson", + save_format, + plot_conf, ) - plt.grid(True) - plt.ylabel(r"$\sigma$ [$MPa$]", fontsize=axis_font_size) - plt.xlabel(r"$R$ [$m$]", fontsize=axis_font_size) - plt.legend(loc="best", fontsize=legend_size) - plt.xticks(size=axis_tick_size) - plt.yticks(size=axis_tick_size) - plt.tight_layout() - plt.savefig(f"{outdir}/smeared_stress.{save_format}") - plt.clf() - plt.cla() - - # PLOT 3 : Strain summary - # ------------------------ + if plot_strain and len(sig_file_data) > 15: - for ii in range(n_layers): - plt.plot( - radius[ii], - radial_strain[ii], - "-", - linewidth=line_width, - color="lightblue", - ) - plt.plot( - radius[ii], - toroidal_strain[ii], - "-", - linewidth=line_width, - color="wheat", - ) - plt.plot( - radius[ii], - vertical_strain[ii], - "-", - linewidth=line_width, - color="lightgrey", - ) - plt.plot( - radius[0], - radial_strain[0], - "--", - color="dodgerblue", - label=r"$\epsilon_{rr}$", - ) - plt.plot( - radius[0], - toroidal_strain[0], - "--", - color="orange", - label=r"$\epsilon_{\theta\theta}$", - ) - plt.plot( - radius[0], - vertical_strain[0], - "--", - color="mediumseagreen", - label=r"$\epsilon_{zz}$", - ) - for ii in range(1, n_layers): - plt.plot(radius[ii], radial_strain[ii], "--", color="dodgerblue") - plt.plot(radius[ii], toroidal_strain[ii], "--", color="orange") - plt.plot(radius[ii], vertical_strain[ii], "--", color="mediumseagreen") - plt.plot( + strain_summary( + n_layers, + radius, bound_radius, + radial_strain, bound_radial_strain, - "|", - markersize=mark_size, - color="dodgerblue", + toroidal_strain, + bound_toroidal_strain, + vertical_strain, + bound_vertical_strain, + save_format, + plot_conf, ) + + if plot_disp: + displacement(n_layers, radius, radial_displacement, save_format, plot_conf) + + +def terminal_output( + n_layers, + n_radial_array_layer, + sig_file_data, + radial_stress, + toroidal_stress, + vertical_stress, + tresca_stress, + wp_vertical_stress, + radial_smeared_stress, + toroidal_smeared_stress, + vertical_smeared_stress, + tresca_smeared_stress, + radial_strain, + toroidal_strain, + vertical_strain, +): + ii_ins = 0 + ii_mids = int(0.5 * float(n_radial_array_layer)) + ii_outs = n_radial_array_layer - 1 + dg = itemgetter(ii_ins, ii_mids, ii_outs) + + print("\n\nLayer stress details\n____________________") + + frame = """Layer {} +------------------------------ +steel radial stress in the inner/middle/out point: {} MPa +steel toroidal stress in the inner/middle/out point: {} MPa +steel vertical stress in the inner/middle/out point: {} MPa +steel TRESCA stress in the inner/middle/out point: {} MPa +smeared radial stress in the inner/middle/out point: {} MPa +smeared toroidal stress in the inner/middle/out point: {} MPa +smeared vertical stress in the inner/middle/out point: {} MPa +smeared TRESCA stress in the inner/middle/out point: {} MPa + +""" + frame2 = """ +radial strain in the inner/middle/out point: {} +toroidal strain in the inner/middle/out point: {} +vertical strain: {} + +""" + frame3 = "smeared WP vertical stress in the inner/middle/out point: {} MPa" + layer_line = "{}/{}/{}" + for ii in range(n_layers): + layer = ii + 1 + s_radial = layer_line.format(*dg(radial_stress[ii])) + s_toro = layer_line.format(*dg(toroidal_stress[ii])) + s_vert = layer_line.format(*dg(vertical_stress[ii])) + s_tres = layer_line.format(*dg(tresca_stress[ii])) + sm_rad = layer_line.format(*dg(radial_smeared_stress[ii])) + sm_toro = layer_line.format(*dg(toroidal_smeared_stress[ii])) + sm_vert = layer_line.format(*dg(vertical_smeared_stress[ii])) + sm_tres = layer_line.format(*dg(tresca_smeared_stress[ii])) + + print( + frame.format( + layer, + s_radial, + s_toro, + s_vert, + s_tres, + sm_rad, + sm_toro, + sm_vert, + sm_tres, + ) + ) + + if len(sig_file_data) > 16: + r_strain = layer_line.format(*dg(radial_strain[ii])) + t_strain = layer_line.format(*dg(toroidal_strain[ii])) + print(frame2.format(r_strain, t_strain, vertical_strain[ii][0])) + + if len(wp_vertical_stress) != 0: + print( + frame3.format( + wp_vertical_stress[0], + wp_vertical_stress[ii_mids], + wp_vertical_stress[ii_outs], + ) + ) + print() + + +def stress_summary( + n_layers, + radius, + bound_radius, + radial_stress, + toroidal_stress, + vertical_stress, + tresca_stress, + vm_stress, + bound_radial_stress, + bound_toroidal_stress, + bound_vertical_stress, + bound_tresca_stress, + bound_vm_stress, + save_format, + plot_conf, +): + lw = plot_conf.line_width + ms = plot_conf.mark_size + ats = plot_conf.axis_tick_size + afs = plot_conf.axis_font_size + for ii in range(n_layers): + plt.plot(radius[ii], radial_stress[ii], "-", linewidth=lw, color="lightblue") + plt.plot(radius[ii], toroidal_stress[ii], "-", linewidth=lw, color="wheat") + plt.plot(radius[ii], vertical_stress[ii], "-", linewidth=lw, color="lightgrey") + plt.plot(radius[ii], tresca_stress[ii], "-", linewidth=lw, color="pink") + plt.plot(radius[ii], vm_stress[ii], "-", linewidth=lw, color="violet") + plt.plot( + radius[0], radial_stress[0], "--", color="dodgerblue", label=r"$\sigma_{rr}$" + ) + plt.plot( + radius[0], + toroidal_stress[0], + "--", + color="orange", + label=r"$\sigma_{\theta\theta}$", + ) + plt.plot( + radius[0], + vertical_stress[0], + "--", + color="mediumseagreen", + label=r"$\sigma_{zz}$", + ) + plt.plot( + radius[0], tresca_stress[0], "-", color="crimson", label=r"$\sigma_{TRESCA}$" + ) + plt.plot( + radius[0], vm_stress[0], "-", color="darkviolet", label=r"$\sigma_{Von\ mises}$" + ) + for ii in range(1, n_layers): + plt.plot(radius[ii], radial_stress[ii], "--", color="dodgerblue") + plt.plot(radius[ii], toroidal_stress[ii], "--", color="orange") + plt.plot(radius[ii], vertical_stress[ii], "--", color="mediumseagreen") + plt.plot(radius[ii], tresca_stress[ii], "-", color="crimson") + plt.plot(radius[ii], vm_stress[ii], "-", color="darkviolet") + plt.plot(bound_radius, bound_radial_stress, "|", markersize=ms, color="dodgerblue") + plt.plot(bound_radius, bound_toroidal_stress, "|", markersize=ms, color="orange") + plt.plot( + bound_radius, bound_vertical_stress, "|", markersize=ms, color="mediumseagreen" + ) + plt.plot(bound_radius, bound_tresca_stress, "|", markersize=ms, color="crimson") + plt.plot(bound_radius, bound_vm_stress, "|", markersize=ms, color="darkviolet") + plt.grid(True) + plt.ylabel(r"$\sigma$ [$MPa$]", fontsize=afs) + plt.xlabel(r"$R$ [$m$]", fontsize=afs) + plt.legend(loc="best", fontsize=plot_conf.legend_size) + plt.xticks(size=ats) + plt.yticks(size=ats) + plt.tight_layout() + plt.savefig(f"{plot_conf.outdir}/structure_stress.{save_format}") + plt.clf() + plt.cla() + + +def smeared_stress_summary( + n_layers, + radius, + bound_radius, + radial_smeared_stress, + toroidal_smeared_stress, + vertical_smeared_stress, + tresca_smeared_stress, + bound_radial_smeared_stress, + bound_toroidal_smeared_stress, + bound_vertical_smeared_stress, + bound_tresca_smeared_stress, + save_format, + plot_conf, +): + lw = plot_conf.line_width + ms = plot_conf.mark_size + ats = plot_conf.axis_tick_size + afs = plot_conf.axis_font_size + for ii in range(n_layers): plt.plot( - bound_radius, - bound_toroidal_strain, - "|", - markersize=mark_size, - color="orange", + radius[ii], radial_smeared_stress[ii], "-", linewidth=lw, color="lightblue" ) plt.plot( - bound_radius, - bound_vertical_strain, - "|", - markersize=mark_size, - color="mediumseagreen", + radius[ii], toroidal_smeared_stress[ii], "-", linewidth=lw, color="wheat" ) - plt.grid(True) - plt.ylabel(r"$\epsilon$", fontsize=axis_font_size) - plt.xlabel(r"$R$ [$m$]", fontsize=axis_font_size) - plt.legend(loc="best", fontsize=legend_size) - plt.xticks(size=axis_tick_size) - plt.yticks(size=axis_tick_size) - plt.tight_layout() - plt.savefig(f"{outdir}/strains.{save_format}") - plt.clf() - plt.cla() - - # PLOT 4 : Displacement - # ---------------------- - if plot_disp: - plt.plot(radius[0], radial_displacement[0], color="dodgerblue") - for ii in range(1, n_layers): - plt.plot(radius[ii], radial_displacement[ii], color="dodgerblue") - plt.grid(True) - plt.ylabel(r"$u_{r}$ [mm]", fontsize=axis_font_size) - plt.xlabel(r"$R$ [$m$]", fontsize=axis_font_size) - plt.xticks(size=axis_tick_size) - plt.yticks(size=axis_tick_size) - plt.tight_layout() - plt.savefig(f"{outdir}/displacement.{save_format}") - plt.clf() - plt.cla() - - -if __name__ == "__main__": - main() + plt.plot( + radius[ii], vertical_smeared_stress[ii], "-", linewidth=lw, color="lightgrey" + ) + plt.plot(radius[ii], tresca_smeared_stress[ii], "-", linewidth=lw, color="pink") + plt.plot( + radius[0], + radial_smeared_stress[0], + "--", + color="dodgerblue", + label=r"$\sigma_{rr}^\mathrm{smeared}$", + ) + plt.plot( + radius[0], + toroidal_smeared_stress[0], + "--", + color="orange", + label=r"$\sigma_{\theta\theta}^\mathrm{smeared}$", + ) + plt.plot( + radius[0], + vertical_smeared_stress[0], + "--", + color="mediumseagreen", + label=r"$\sigma_{zz}^\mathrm{smeared}$", + ) + plt.plot( + radius[0], + tresca_smeared_stress[0], + "-", + color="crimson", + label=r"$\sigma_{TRESCA}^\mathrm{smeared}$", + ) + for ii in range(1, n_layers): + plt.plot(radius[ii], radial_smeared_stress[ii], "--", color="dodgerblue") + plt.plot(radius[ii], toroidal_smeared_stress[ii], "--", color="orange") + plt.plot(radius[ii], vertical_smeared_stress[ii], "--", color="mediumseagreen") + plt.plot(radius[ii], tresca_smeared_stress[ii], "-", color="crimson") + plt.plot( + bound_radius, bound_radial_smeared_stress, "|", markersize=ms, color="dodgerblue" + ) + plt.plot( + bound_radius, bound_toroidal_smeared_stress, "|", markersize=ms, color="orange" + ) + plt.plot( + bound_radius, + bound_vertical_smeared_stress, + "|", + markersize=ms, + color="mediumseagreen", + ) + plt.plot( + bound_radius, bound_tresca_smeared_stress, "|", markersize=ms, color="crimson" + ) + plt.grid(True) + plt.ylabel(r"$\sigma$ [$MPa$]", fontsize=afs) + plt.xlabel(r"$R$ [$m$]", fontsize=afs) + plt.legend(loc="best", fontsize=plot_conf.legend_size) + plt.xticks(size=ats) + plt.yticks(size=ats) + plt.tight_layout() + plt.savefig(f"{plot_conf.outdir}/smeared_stress.{save_format}") + plt.clf() + plt.cla() + + +def strain_summary( + n_layers, + radius, + bound_radius, + radial_strain, + bound_radial_strain, + toroidal_strain, + bound_toroidal_strain, + vertical_strain, + bound_vertical_strain, + save_format, + plot_conf, +): + lw = plot_conf.line_width + ms = plot_conf.mark_size + ats = plot_conf.axis_tick_size + afs = plot_conf.axis_font_size + for ii in range(n_layers): + plt.plot(radius[ii], radial_strain[ii], "-", linewidth=lw, color="lightblue") + plt.plot(radius[ii], toroidal_strain[ii], "-", linewidth=lw, color="wheat") + plt.plot(radius[ii], vertical_strain[ii], "-", linewidth=lw, color="lightgrey") + plt.plot( + radius[0], radial_strain[0], "--", color="dodgerblue", label=r"$\epsilon_{rr}$" + ) + plt.plot( + radius[0], + toroidal_strain[0], + "--", + color="orange", + label=r"$\epsilon_{\theta\theta}$", + ) + plt.plot( + radius[0], + vertical_strain[0], + "--", + color="mediumseagreen", + label=r"$\epsilon_{zz}$", + ) + for ii in range(1, n_layers): + plt.plot(radius[ii], radial_strain[ii], "--", color="dodgerblue") + plt.plot(radius[ii], toroidal_strain[ii], "--", color="orange") + plt.plot(radius[ii], vertical_strain[ii], "--", color="mediumseagreen") + plt.plot(bound_radius, bound_radial_strain, "|", markersize=ms, color="dodgerblue") + plt.plot(bound_radius, bound_toroidal_strain, "|", markersize=ms, color="orange") + plt.plot( + bound_radius, + bound_vertical_strain, + "|", + markersize=ms, + color="mediumseagreen", + ) + plt.grid(True) + plt.ylabel(r"$\epsilon$", fontsize=afs) + plt.xlabel(r"$R$ [$m$]", fontsize=afs) + plt.legend(loc="best", fontsize=plot_conf.legend_size) + plt.xticks(size=ats) + plt.yticks(size=ats) + plt.tight_layout() + plt.savefig(f"{plot_conf.outdir}/strains.{save_format}") + plt.clf() + plt.cla() + + +def displacement(n_layers, radius, radial_displacement, save_format, plot_conf): + plt.plot(radius[0], radial_displacement[0], color="dodgerblue") + for ii in range(1, n_layers): + plt.plot(radius[ii], radial_displacement[ii], color="dodgerblue") + plt.grid(True) + plt.ylabel(r"$u_{r}$ [mm]", fontsize=plot_conf.axis_font_size) + plt.xlabel(r"$R$ [$m$]", fontsize=plot_conf.axis_font_size) + plt.xticks(size=plot_conf.axis_tick_size) + plt.yticks(size=plot_conf.axis_tick_size) + plt.tight_layout() + plt.savefig(f"{plot_conf.outdir}/displacement.{save_format}") + plt.clf() + plt.cla() diff --git a/process/core/io/plot_plotly_sankey.py b/process/core/io/plot/sankey/plot_plotly_sankey.py similarity index 98% rename from process/core/io/plot_plotly_sankey.py rename to process/core/io/plot/sankey/plot_plotly_sankey.py index 549e7601f..dfafabf47 100644 --- a/process/core/io/plot_plotly_sankey.py +++ b/process/core/io/plot/sankey/plot_plotly_sankey.py @@ -9,7 +9,11 @@ except ImportError: PLOT_SANKEY = False +<<<<<<<< HEAD:process/core/io/plot_plotly_sankey.py from process.core.io.mfile import MFile +======== +from process.io.mfile.mfile import MFile +>>>>>>>> 71bdc991 (Overhall CI):process/core/io/plot/sankey/plot_plotly_sankey.py def main(args=None): diff --git a/process/core/io/plot_sankey.py b/process/core/io/plot/sankey/plot_sankey.py similarity index 100% rename from process/core/io/plot_sankey.py rename to process/core/io/plot/sankey/plot_sankey.py diff --git a/process/core/io/sankey_funcs.py b/process/core/io/plot/sankey/sankey_funcs.py similarity index 63% rename from process/core/io/sankey_funcs.py rename to process/core/io/plot/sankey/sankey_funcs.py index 3c6e49b76..41366b98e 100644 --- a/process/core/io/sankey_funcs.py +++ b/process/core/io/plot/sankey/sankey_funcs.py @@ -10,6 +10,348 @@ from process.core.io.mfile import MFile +def power_balance_sankey(m_file): + m_file = MFile(m_file) + p_hcd_injected_total_mw = m_file.get("p_hcd_injected_total_mw", scan=-1) + p_plasma_ohmic_mw = m_file.get("p_plasma_ohmic_mw", scan=-1) + p_alpha_total_mw = m_file.get("p_alpha_total_mw", scan=-1) + p_neutron_total_mw = m_file.get("p_neutron_total_mw", scan=-1) + p_plasma_rad_mw = m_file.get("p_plasma_rad_mw", scan=-1) + p_fw_rad_total_mw = m_file.get("p_fw_rad_total_mw", scan=-1) + p_fw_alpha_mw = p_alpha_total_mw * ( + 1 - m_file.get("f_p_alpha_plasma_deposited", scan=-1) + ) + p_blkt_nuclear_heat_total_mw = m_file.get("p_blkt_nuclear_heat_total_mw", scan=-1) + + # Define node labels (linearized flow) + labels = [ + "H&CD injector", # 0 + "Ohmic", # 1 + "Plasma Fusion Power", # 2 + "Alpha particles", # 3 + "Neutrons", # 4 + "Radiation", # 5 + "First Wall", # 6 + "Blanket", # 7 + "Divertor", # 8 + "FW+Blkt", # 9 + "Primary Thermal", # 10 + "Turbine", # 11 + "Gross Electric", # 12 + "Net Electric", # 13 + "HCD Electric Power", # 14 + "HCD electric losses", # 15 + "Core systems", # 16 + "Cryo plant", # 17 + "Base plant load", # 18 + "TF power supplies", # 19 + "PF power supplies", # 20 + "Vacuum pumps", # 21 + "Tritium plant", # 22 + "Coolant pumps electric", # 23 + "Coolant pump electric losses", # 24 + "Divertor pump", # 25 + "FW+Blkt pumps", # 26 + "Shield pump", # 27 + "Shield", # 28 + "Secondary heat", # 29 + "TF nuclear heat", # 30 + "H&CD & Diagnostics", # 31 + "Total Secondary Heat", # 32 + "Turbine Loss", # 33 + "Blanket neutron multiplication", # 34 + ] + + # Define links (source, target, value) for a more linear flow + sources = [ + 0, # 0: H&CD to Fusion + 1, # 1: Ohmic to Fusion + 2, # 2: Fusion to Alpha + 2, # 3: Fusion to Neutrons + 2, # 4: Fusion to Radiation + 3, # 5: Alpha to First Wall + 4, # 6: Neutrons to Blanket + 5, # 7: Radiation to First Wall + 4, # 8: Neutrons to Divertor + 5, # 9: Radiation to Divertor + 6, # 10: First Wall to FW+Blkt + 7, # 11: Blanket to FW+Blkt + 8, # 12: Divertor to FW+Blkt + 9, # 13: FW+Blkt to Primary Thermal + 10, # 14: Primary Thermal to Turbine + 11, # 15: Turbine to Gross Electric + 12, # 16: Gross Electric to Net Electric + 12, # 17: Gross Electric to HCD Electric Power + 14, # 18: HCD Electric Power to HCD electric losses + 14, # 19: HCD Electric Power to H&CD + 12, # 20: Gross Electric to Core systems + 16, # 21: Core systems to Cryo plant + 16, # 22: Core systems to Base plant load + 16, # 23: Core systems to TF coils + 16, # 24: Core systems to PF coils + 16, # 25: Core systems to Vacuum pumps + 16, # 26: Core systems to Tritium plant + 12, # 27: Gross Electric to Coolant pumps electric + 23, # 28: Coolant pumps electric to Coolant pump electric losses + 23, # 29: Coolant pumps electric to Divertor pump + 23, # 30: Coolant pumps electric to FW+Blkt pumps + 26, # 31: FW+Blkt pumps to FW+Blkt + 25, # 32: Divertor pump to Divertor + 23, # 33: Coolant pumps electric to Shield pump + 27, # 34: Shield pump to Shield + 28, # 35: Shield to primary thermal + 4, # 36: Neutrons to shield + 17, # 37: Cryo plant to secondary heat + 18, # 38: Base plant load to secondary heat + 19, # 39: TF coils to secondary heat + 20, # 40: PF coils to secondary heat + 21, # 41: Vacuum pumps to secondary heat + 22, # 42: Tritium plant to secondary heat + 4, # 43: Neutrons to tf + 30, # 44: TF nuclear heat to secondary heat + 15, # 45: HCD electric losses to secondary heat + 24, # 46: Coolant pumps electric to secondary heat + 6, # 47: FW pump to primary heat, Should only show if FW and Bkt pumps are separate + 7, # 48: Blkt pump to primary heat, Should only show if FW and Blkt pumps are separate + 2, # 49 Should show in beams are present + 2, # 50: Should show in beams are present + 4, # 51 Neutrons to CP shield, should only show if CP shield is present + 2, # 52 Plasma separatrix power to divertor + 8, # 53 Divertor secondary heat, + 28, # 54 Shield secondary heat + 4, # 55 Neutron power to H&CD & Diagnostics + 5, # 56: Radiation to H&CD & Diagnostics + 29, # 57: Total Secondary Heat + 31, # 58: H&CD & Diagnostics secondary heat + 11, # 59: Turbine Loss + 4, # 60: FW nuclear heat + 3, # 61: Alpha particles back to plasma + 34, # 62: Blanket neutron multiplication + ] + targets = [ + 2, # 0: H&CD to Fusion + 2, # 1: Ohmic to Fusion + 3, # 2: Fusion to Alpha + 4, # 3: Fusion to Neutrons + 5, # 4: Fusion to Radiation + 6, # 5: Alpha to First Wall + 7, # 6: Neutrons to Blanket + 6, # 7: Radiation to First Wall + 8, # 8: Neutrons to Divertor + 8, # 9: Radiation to Divertor + 9, # 10: First Wall to FW+Blkt + 9, # 11: Blanket to FW+Blkt + 10, # 12: Divertor to FW+Blkt + 10, # 13: FW+Blkt to Primary Thermal + 11, # 14: Primary Thermal to Turbine + 12, # 15: Turbine to Gross Electric + 13, # 16: Gross Electric to Net Electric + 14, # 17: Gross Electric to HCD Electric Power + 15, # 18: HCD Electric Power to HCD electric losses + 0, # 19: HCD Electric Power to H&CD + 16, # 20: Gross Electric to Core systems + 17, # 21: Core systems to Cryo plant + 18, # 22: Core systems to Base plant load + 19, # 23: Core systems to TF coils + 20, # 24: Core systems to PF coils + 21, # 25: Core systems to Vacuum pumps + 22, # 26: Core systems to Tritium plant + 23, # 27: Gross Electric to Coolant pumps electric + 24, # 28: Coolant pumps electric to Coolant pump electric losses + 25, # 29: Coolant pumps electric to Divertor pump + 26, # 30: Coolant pumps electric to FW+Blkt pumps + 9, # 31: FW+Blkt pumps to FW+Blkt + 8, # 32: Divertor pump to Divertor + 27, # 33: Coolant pumps electric to Shield pump + 28, # 34: Shield pump to Shield + 10, # 35: Shield to primary thermal + 28, # 36: Neutrons to shield + 29, # 37: Cryo plant to secondary heat + 29, # 38: Base plant load to secondary heat + 29, # 39: TF coils to secondary heat + 29, # 40: PF coils to secondary heat + 29, # 41: Vacuum pumps to secondary heat + 29, # 42: Tritium plant to secondary heat + 30, # 43: Neutrons to tf + 29, # 44: TF nuclear heat to secondary heat + 29, # 45: HCD electric losses to secondary heat + 29, # 46: Coolant pumps electric to secondary heat + 9, # 47: FW pump to primary heat, Should only show if FW and Bkt pumps are separate + 9, # 48: Blkt pump to primary heat, Should only show if FW and Blkt pumps are separate + 6, # 49 Should show in beams are present + 6, # 50: Should show in beams are present + 28, # 51 Neutrons to CP shield, should only show if CP shield is present + 8, # 52 Plasma separatrix power to divertor + 29, # 53 Divertor secondary heat, + 29, # 54 Shield secondary heat + 31, # 55 Neutron power to H&CD & Diagnostics + 31, # 56: Radiation to H&CD & Diagnostics + 32, # 57: Total Secondary Heat + 32, # 58: H&CD & Diagnostics secondary heat + 33, # 59: Turbine Loss + 6, # 60: FW nuclear heat + 2, # 61: Alpha particles back to plasma + 7, # 62: Blanket neutron multiplication + ] + values = [ + p_hcd_injected_total_mw, # 0 + p_plasma_ohmic_mw, # 1 + p_alpha_total_mw, # 2 + p_neutron_total_mw, # 3 + p_plasma_rad_mw, # 4 + p_fw_alpha_mw, # 5 + p_blkt_nuclear_heat_total_mw + - m_file.get("p_blkt_multiplication_mw", scan=-1), # 6 + p_fw_rad_total_mw, # 7 + m_file.get("p_div_nuclear_heat_total_mw", scan=-1), # 8 + m_file.get("p_div_rad_total_mw", scan=-1), # 9 + m_file.get("p_fw_heat_deposited_mw", scan=-1), # 10 + m_file.get("p_blkt_heat_deposited_mw", scan=-1), # 11 + m_file.get("p_div_heat_deposited_mw", scan=-1), # 12 + m_file.get("p_fw_blkt_heat_deposited_mw", scan=-1), # 13 + m_file.get("p_plant_primary_heat_mw", scan=-1), # 14 + m_file.get("p_plant_electric_gross_mw", scan=-1), # 15 + m_file.get("p_plant_electric_net_mw", scan=-1), # 16 + m_file.get("p_hcd_electric_total_mw", scan=-1), # 17 + m_file.get("p_hcd_electric_loss_mw", scan=-1), # 18 + p_hcd_injected_total_mw, # 19 + m_file.get("p_plant_core_systems_elec_mw", scan=-1), # 20 + m_file.get("p_cryo_plant_electric_mw", scan=-1), # 21 + m_file.get("p_plant_electric_base_total_mw", scan=-1), # 22 + m_file.get("p_tf_electric_supplies_mw", scan=-1), # 23 + m_file.get("p_pf_electric_supplies_mw", scan=-1), # 24 + m_file.get("vachtmw", scan=-1), # 25 + m_file.get("p_tritium_plant_electric_mw", scan=-1), # 26 + m_file.get("p_coolant_pump_elec_total_mw", scan=-1), # 27 + m_file.get("p_coolant_pump_loss_total_mw", scan=-1), # 28 + m_file.get("p_div_coolant_pump_mw", scan=-1), # 29 + m_file.get("p_fw_blkt_coolant_pump_mw", scan=-1), # 30 + m_file.get("p_fw_blkt_coolant_pump_mw", scan=-1), # 31 + m_file.get("p_div_coolant_pump_mw", scan=-1), # 32 + m_file.get("p_shld_coolant_pump_mw", scan=-1), # 33 + m_file.get("p_shld_coolant_pump_mw", scan=-1), # 34 + m_file.get("p_shld_heat_deposited_mw", scan=-1), # 35 + m_file.get("p_shld_nuclear_heat_mw", scan=-1), # 36 + m_file.get("p_cryo_plant_electric_mw", scan=-1), # 37 + m_file.get("p_plant_electric_base_total_mw", scan=-1), # 38 + m_file.get("p_tf_electric_supplies_mw", scan=-1), # 39 + m_file.get("p_pf_electric_supplies_mw", scan=-1), # 40 + m_file.get("vachtmw", scan=-1), # 41 + m_file.get("p_tritium_plant_electric_mw", scan=-1), # 42 + m_file.get("p_tf_nuclear_heat_mw", scan=-1), # 43 + m_file.get("p_tf_nuclear_heat_mw", scan=-1), # 44 + m_file.get("p_hcd_electric_loss_mw", scan=-1), # 45 + m_file.get("p_coolant_pump_loss_total_mw", scan=-1), # 46 + # + # Should only show if FW and Bkt pumps are seperate + m_file.get("p_fw_coolant_pump_mw", scan=-1), # 47 + m_file.get("p_blkt_coolant_pump_mw", scan=-1), # 48 + # + # Should show in beams are present + m_file.get("p_beam_shine_through_mw", scan=-1), # 49 + m_file.get("p_beam_orbit_loss_mw", scan=-1), # 50 + # + # Neutrons to CP shield, should only show if CP shield is present + m_file.get("p_cp_shield_nuclear_heat_mw", scan=-1), # 51 + # + m_file.get("p_plasma_separatrix_mw", scan=-1), # 52 + m_file.get("p_div_secondary_heat_mw", scan=-1), # 53 + m_file.get("p_shld_secondary_heat_mw", scan=-1), # 54 + m_file.get("p_fw_hcd_nuclear_heat_mw", scan=-1), # + m_file.get("p_fw_hcd_rad_total_mw", scan=-1), # 56 + m_file.get("p_plant_secondary_heat_mw", scan=-1), # 57 + m_file.get("p_hcd_secondary_heat_mw", scan=-1), # 58 + m_file.get("p_turbine_loss_mw", scan=-1), # 59 + m_file.get("p_fw_nuclear_heat_total_mw", scan=-1), # 60 + # + # Alpha particles back to plasma + p_alpha_total_mw * m_file.get("f_p_alpha_plasma_deposited", scan=-1), # 61 + m_file.get("p_blkt_multiplication_mw", scan=-1), + ] + + # Define colors for each node (hex or rgba) + node_colors = [ + "#1f77b4", # 0: H&CD injector + "#ff7f0e", # 1: Ohmic + "#2ca02c", # 2: Plasma Fusion Power + "#d62728", # 3: Alpha particles + "#9467bd", # 4: Neutrons + "#8c564b", # 5: Radiation + "#e377c2", # 6: First Wall + "#7f7f7f", # 7: Blanket + "#bcbd22", # 8: Divertor + "#17becf", # 9: FW+Blkt + "#aec7e8", # 10: Primary Thermal + "#ffbb78", # 11: Turbine + "#98df8a", # 12: Gross Electric + "#ff9896", # 13: Net Electric + "#c5b0d5", # 14: HCD Electric Power + "#c49c94", # 15: HCD electric losses + "#f7b6d2", # 16: Core systems + "#c7c7c7", # 17: Cryo plant + "#dbdb8d", # 18: Base plant load + "#9edae5", # 19: TF coils + "#393b79", # 20: PF coils + "#637939", # 21: Vacuum pumps + "#8c6d31", # 22: Tritium plant + "#843c39", # 23: Coolant pumps electric + "#7b4173", # 24: Coolant pump electric losses + "#5254a3", # 25: Divertor pump + "#6b6ecf", # 26: FW+Blkt pumps + "#b5cf6b", # 27: Shield pump + "#cedb9c", # 28: Shield + "#9c9ede", # 29: Secondary heat + "#e7ba52", # 30: TF nuclear heat + "#ad494a", # 31: H&CD & Diagnostics + "#a55194", # 32: Total Secondary Heat + "#393b79", # 33: Turbine Loss + "#637939", # 34: Blanket neutron multiplication + ] + + # Assign link colors to match their source node + link_colors = [node_colors[src] for src in sources] + + # Add value labels to the links + value_labels = [f"{v:.3f} MW" for v in values] + + return { + "type": "sankey", + "node": { + "pad": 30, + "thickness": 20, + "line": {"color": "black", "width": 0.5}, + "label": labels, + "color": node_colors, + }, + "link": { + "source": sources, + "target": targets, + "value": values, + "label": value_labels, + "color": link_colors, + }, + } + + +def plotly(sankey_dict, mfile): + fig = go.Figure(data=[sankey_dict]) + + fig.update_layout({ + "title_text": "Fusion Power Balance Sankey Diagram", + "font_size": 7, + "autosize": True, + "margin": {"l": 40, "r": 40, "t": 40, "b": 40}, + }) + # Strip 'MFILE' from the filename for the HTML output + # Remove the character before "MFILE" and "MFILE" itself from the filename + html_output_path = pathlib.Path( + re.sub(r"(.)?[ \.\_]?MFILE", r"\1_plotly_sankey", m_file.filename) + ).with_suffix(".html") + fig.write_html(str(html_output_path)) + print(f"Interactive Sankey diagram saved to {html_output_path}") + return fig + + def plot_full_sankey( mfilename="MFILE.DAT", ): # Plots the power flow from PROCESS as a Sankey Diagram @@ -18,72 +360,70 @@ def plot_full_sankey( m_file = MFile(mfilename) # Used in [PLASMA] - p_fusion_total_mw = m_file.data["p_fusion_total_mw"].get_scan( - -1 - ) # Fusion power (MW) - p_hcd_injected_total_mw = m_file.data["p_hcd_injected_total_mw"].get_scan( - -1 + p_fusion_total_mw = m_file.get("p_fusion_total_mw", scan=-1) # Fusion power (MW) + p_hcd_injected_total_mw = m_file.get( + "p_hcd_injected_total_mw", scan=-1 ) # Total auxiliary injected power (MW) - p_plasma_ohmic_mw = m_file.data["p_plasma_ohmic_mw"].get_scan( - -1 + p_plasma_ohmic_mw = m_file.get( + "p_plasma_ohmic_mw", scan=-1 ) # Ohmic heating power (MW) totalplasma = ( p_fusion_total_mw + p_hcd_injected_total_mw + p_plasma_ohmic_mw ) # Total Power in plasma (MW) - p_neutron_total_mw = m_file.data["p_neutron_total_mw"].get_scan( - -1 + p_neutron_total_mw = m_file.get( + "p_neutron_total_mw", scan=-1 ) # Neutron fusion power (MW) - p_non_alpha_charged_mw = m_file.data["p_non_alpha_charged_mw"].get_scan( - -1 + p_non_alpha_charged_mw = m_file.get( + "p_non_alpha_charged_mw", scan=-1 ) # Non-alpha charged particle power (MW) pcharohmmw = ( p_non_alpha_charged_mw + p_plasma_ohmic_mw ) # The ohmic and charged particle power (MW) - p_alpha_total_mw = m_file.data["p_alpha_total_mw"].get_scan(-1) # Alpha power (MW) + p_alpha_total_mw = m_file.get("p_alpha_total_mw", scan=-1) # Alpha power (MW) palpinjmw = ( p_alpha_total_mw + p_hcd_injected_total_mw ) # Alpha particle and HC&D power (MW) # Used in [NEUTRONICS] - p_blkt_multiplication_mw = m_file.data["p_blkt_multiplication_mw"].get_scan( - -1 + p_blkt_multiplication_mw = m_file.get( + "p_blkt_multiplication_mw", scan=-1 ) # Energy multiplication in blanket (MW) - p_blkt_nuclear_heat_total_mw = m_file.data["p_blkt_nuclear_heat_total_mw"].get_scan( - -1 + p_blkt_nuclear_heat_total_mw = m_file.get( + "p_blkt_nuclear_heat_total_mw", scan=-1 ) # Total Nuclear heating in the blanket (MW) pnucemblkt = ( p_blkt_nuclear_heat_total_mw - p_blkt_multiplication_mw ) # External nuclear heating in blanket (MW) - p_div_nuclear_heat_total_mw = m_file.data["p_div_nuclear_heat_total_mw"].get_scan( - -1 + p_div_nuclear_heat_total_mw = m_file.get( + "p_div_nuclear_heat_total_mw", scan=-1 ) # Nuclear heating in the divertor (MW) - p_fw_nuclear_heat_total_mw = m_file.data["p_fw_nuclear_heat_total_mw"].get_scan( - -1 + p_fw_nuclear_heat_total_mw = m_file.get( + "p_fw_nuclear_heat_total_mw", scan=-1 ) # Nuclear heating in the first wall (MW) - p_shld_nuclear_heat_mw = m_file.data["p_shld_nuclear_heat_mw"].get_scan( - -1 + p_shld_nuclear_heat_mw = m_file.get( + "p_shld_nuclear_heat_mw", scan=-1 ) # Nuclear heating in the shield (MW) - p_tf_nuclear_heat_mw = m_file.data["p_tf_nuclear_heat_mw"].get_scan( - -1 + p_tf_nuclear_heat_mw = m_file.get( + "p_tf_nuclear_heat_mw", scan=-1 ) # Nuclear heating in the TF coil (MW) # Used in [CHARGEP] - p_plasma_separatrix_mw = m_file.data["p_plasma_separatrix_mw"].get_scan( - -1 + p_plasma_separatrix_mw = m_file.get( + "p_plasma_separatrix_mw", scan=-1 ) # Charged particle power deposited on divertor (MW) - f_p_alpha_plasma_deposited = m_file.data["f_p_alpha_plasma_deposited"].get_scan( - -1 + f_p_alpha_plasma_deposited = m_file.get( + "f_p_alpha_plasma_deposited", scan=-1 ) # Fraction of alpha power deposited in plasma p_fw_alpha_mw = p_alpha_total_mw * ( 1 - f_p_alpha_plasma_deposited ) # Alpha particles hitting first wall (MW) - p_plasma_rad_mw = m_file.data["p_plasma_rad_mw"].get_scan( - -1 + p_plasma_rad_mw = m_file.get( + "p_plasma_rad_mw", scan=-1 ) # Total radiation Power (MW) # Used in [RADIATION] - p_div_rad_total_mw = p_plasma_rad_mw * m_file.data["f_ster_div_single"].get_scan( - -1 + p_div_rad_total_mw = p_plasma_rad_mw * m_file.get( + "f_ster_div_single", scan=-1 ) # Radiation deposited on the divertor (MW) p_fw_hcd_rad_total_mw = p_plasma_rad_mw * m_file.data[ "f_a_fw_outboard_hcd" @@ -93,19 +433,19 @@ def plot_full_sankey( ) # Radiation deposited in the FW (MW) # Used in [DIVERTOR] - p_div_coolant_pump_mw = m_file.data["p_div_coolant_pump_mw"].get_scan( - -1 + p_div_coolant_pump_mw = m_file.get( + "p_div_coolant_pump_mw", scan=-1 ) # Divertor coolant pumping power - p_div_heat_deposited_mw = m_file.data["p_div_heat_deposited_mw"].get_scan( - -1 + p_div_heat_deposited_mw = m_file.get( + "p_div_heat_deposited_mw", scan=-1 ) # Total power extracted from divertor (MW) # Used in [FIRST_WALL] - p_fw_blkt_heat_deposited_mw = m_file.data["p_fw_blkt_heat_deposited_mw"].get_scan( - -1 + p_fw_blkt_heat_deposited_mw = m_file.get( + "p_fw_blkt_heat_deposited_mw", scan=-1 ) # Power extracted blanket & FW (MW) - p_fw_blkt_coolant_pump_mw = m_file.data["p_fw_blkt_coolant_pump_mw"].get_scan( - -1 + p_fw_blkt_coolant_pump_mw = m_file.get( + "p_fw_blkt_coolant_pump_mw", scan=-1 ) # Pump Power in FW and blanket (MW) htpmwblkt = p_fw_blkt_coolant_pump_mw / 2 # Pump power in blanket (MW) htpmwfw = p_fw_blkt_coolant_pump_mw / 2 # Pump power in FW (MW) @@ -500,36 +840,34 @@ def plot_sankey(mfilename="MFILE.DAT"): # Plot simplified power flow Sankey Dia m_file = MFile(mfilename) # Used in [PLASMA] - p_fusion_total_mw = m_file.data["p_fusion_total_mw"].get_scan( - -1 - ) # Fusion Power (MW) - p_hcd_injected_total_mw = m_file.data["p_hcd_injected_total_mw"].get_scan( - -1 + p_fusion_total_mw = m_file.get("p_fusion_total_mw", scan=-1) # Fusion Power (MW) + p_hcd_injected_total_mw = m_file.get( + "p_hcd_injected_total_mw", scan=-1 ) # Total auxiliary injected Power (MW) - p_plasma_ohmic_mw = m_file.data["p_plasma_ohmic_mw"].get_scan( - -1 + p_plasma_ohmic_mw = m_file.get( + "p_plasma_ohmic_mw", scan=-1 ) # Ohmic heating Power (MW) totalplasma = ( p_fusion_total_mw + p_hcd_injected_total_mw + p_plasma_ohmic_mw ) # Total Power in plasma (MW) # Used in [DEPOSITION] - p_plasma_rad_mw = m_file.data["p_plasma_rad_mw"].get_scan( - -1 + p_plasma_rad_mw = m_file.get( + "p_plasma_rad_mw", scan=-1 ) # Total radiation Power (MW) - f_ster_div_single = m_file.data["f_ster_div_single"].get_scan( - -1 + f_ster_div_single = m_file.get( + "f_ster_div_single", scan=-1 ) # Area fraction taken up by divertor - fdiv_2 = m_file.data["2*f_ster_div_single"].get_scan( - -1 + fdiv_2 = m_file.get( + "2*f_ster_div_single", scan=-1 ) # Area fraction taken up by double null divertor if fdiv_2 > 0: # Takes into account old MFILE representation of double null divertor f_ster_div_single = fdiv_2 p_div_rad_total_mw = ( p_plasma_rad_mw * f_ster_div_single ) # Radiation deposited on the divertor (MW) - f_a_fw_outboard_hcd = m_file.data["f_a_fw_outboard_hcd"].get_scan( - -1 + f_a_fw_outboard_hcd = m_file.get( + "f_a_fw_outboard_hcd", scan=-1 ) # Area fraction covered by HCD and diagnostics p_fw_hcd_rad_total_mw = ( p_plasma_rad_mw * f_a_fw_outboard_hcd @@ -537,35 +875,35 @@ def plot_sankey(mfilename="MFILE.DAT"): # Plot simplified power flow Sankey Dia p_fw_rad_total_mw = ( p_plasma_rad_mw - p_div_rad_total_mw - p_fw_hcd_rad_total_mw ) # Radiation deposited in the blanket (MW) - p_plasma_separatrix_mw = m_file.data["p_plasma_separatrix_mw"].get_scan( - -1 + p_plasma_separatrix_mw = m_file.get( + "p_plasma_separatrix_mw", scan=-1 ) # power to conducted to the divertor region (MW) - p_div_nuclear_heat_total_mw = m_file.data["p_div_nuclear_heat_total_mw"].get_scan( - -1 + p_div_nuclear_heat_total_mw = m_file.get( + "p_div_nuclear_heat_total_mw", scan=-1 ) # nuclear heating in the divertor (MW) - p_fw_nuclear_heat_total_mw = m_file.data["p_fw_nuclear_heat_total_mw"].get_scan( - -1 + p_fw_nuclear_heat_total_mw = m_file.get( + "p_fw_nuclear_heat_total_mw", scan=-1 ) # nuclear heating in the first wall (MW) - p_blkt_nuclear_heat_total_mw = m_file.data["p_blkt_nuclear_heat_total_mw"].get_scan( - -1 + p_blkt_nuclear_heat_total_mw = m_file.get( + "p_blkt_nuclear_heat_total_mw", scan=-1 ) # nuclear heating in the blanket (MW) - p_shld_nuclear_heat_mw = m_file.data["p_shld_nuclear_heat_mw"].get_scan( - -1 + p_shld_nuclear_heat_mw = m_file.get( + "p_shld_nuclear_heat_mw", scan=-1 ) # nuclear heating in the shield (MW) - p_cp_shield_nuclear_heat_mw = m_file.data["p_cp_shield_nuclear_heat_mw"].get_scan( - -1 + p_cp_shield_nuclear_heat_mw = m_file.get( + "p_cp_shield_nuclear_heat_mw", scan=-1 ) # nuclear heating in the CP shield (MW) - p_blkt_multiplication_mw = m_file.data["p_blkt_multiplication_mw"].get_scan( - -1 + p_blkt_multiplication_mw = m_file.get( + "p_blkt_multiplication_mw", scan=-1 ) # Blanket energy multiplication (MW) - p_alpha_total_mw = m_file.data["p_alpha_total_mw"].get_scan(-1) # Alpha power (MW) - f_p_alpha_plasma_deposited = m_file.data["f_p_alpha_plasma_deposited"].get_scan( - -1 + p_alpha_total_mw = m_file.get("p_alpha_total_mw", scan=-1) # Alpha power (MW) + f_p_alpha_plasma_deposited = m_file.get( + "f_p_alpha_plasma_deposited", scan=-1 ) # Fraction of alpha power deposited in plasma p_fw_alpha_mw = p_alpha_total_mw * ( 1 - f_p_alpha_plasma_deposited ) # Alpha power hitting 1st wall (MW) - itart = m_file.data["itart"].get_scan(-1) # switch for spherical tokamak (ST) models + itart = m_file.get("itart", scan=-1) # switch for spherical tokamak (ST) models # Power deposited on divertor (MW) totaldivetc = ( @@ -589,46 +927,46 @@ def plot_sankey(mfilename="MFILE.DAT"): # Plot simplified power flow Sankey Dia totalcpetc = p_cp_shield_nuclear_heat_mw # Used in [BLANKETSETC] - p_fw_blkt_heat_deposited_mw = m_file.data["p_fw_blkt_heat_deposited_mw"].get_scan( - -1 + p_fw_blkt_heat_deposited_mw = m_file.get( + "p_fw_blkt_heat_deposited_mw", scan=-1 ) # Heat for electricity (MW) - p_fw_blkt_coolant_pump_mw = m_file.data["p_fw_blkt_coolant_pump_mw"].get_scan( - -1 + p_fw_blkt_coolant_pump_mw = m_file.get( + "p_fw_blkt_coolant_pump_mw", scan=-1 ) # 1st wall & blanket pumping (MW) pthermmw_p = ( p_fw_blkt_heat_deposited_mw - p_fw_blkt_coolant_pump_mw ) # Heat - pumping power (MW) # Used in [PRIMARY] - p_plant_electric_gross_mw = m_file.data["p_plant_electric_gross_mw"].get_scan( - -1 + p_plant_electric_gross_mw = m_file.get( + "p_plant_electric_gross_mw", scan=-1 ) # gross electric power (MW) # Used in [NET] - p_plant_electric_net_mw = m_file.data["p_plant_electric_net_mw"].get_scan( - -1 + p_plant_electric_net_mw = m_file.get( + "p_plant_electric_net_mw", scan=-1 ) # net electric power (MW) p_plant_electric_recirc_mw = ( p_plant_electric_gross_mw - p_plant_electric_net_mw ) # Recirculating power (MW) # Used in [RECIRC] - p_cryo_plant_electric_mw = m_file.data["p_cryo_plant_electric_mw"].get_scan( - -1 + p_cryo_plant_electric_mw = m_file.get( + "p_cryo_plant_electric_mw", scan=-1 ) # cryogenic plant power (MW) - fachtmw = m_file.data["fachtmw"].get_scan(-1) # facility heat removal (MW) - p_tf_electric_supplies_mw = m_file.data["p_tf_electric_supplies_mw"].get_scan( - -1 + fachtmw = m_file.get("fachtmw", scan=-1) # facility heat removal (MW) + p_tf_electric_supplies_mw = m_file.get( + "p_tf_electric_supplies_mw", scan=-1 ) # total steady state TF coil AC power demand (MW) - p_tritium_plant_electric_mw = m_file.data["p_tritium_plant_electric_mw"].get_scan( - -1 + p_tritium_plant_electric_mw = m_file.get( + "p_tritium_plant_electric_mw", scan=-1 ) # power required for tritium processing (MW) - vachtmw = m_file.data["vachtmw"].get_scan(-1) # vacuum pump power (MW) - p_pf_electric_supplies_mw = m_file.data["p_pf_electric_supplies_mw"].get_scan( - -1 + vachtmw = m_file.get("vachtmw", scan=-1) # vacuum pump power (MW) + p_pf_electric_supplies_mw = m_file.get( + "p_pf_electric_supplies_mw", scan=-1 ) # Total mean wall plug power for PFC & CS (MW) p_cp_coolant_pump_elec_mw = ( - m_file.data["p_cp_coolant_pump_elec"].get_scan(-1) / 1e6 + m_file.get("p_cp_coolant_pump_elec", scan=-1) / 1e6 ) # Set pumping power to MW by dividing by 1e6 # Energy required for rest of power plant (MW) @@ -641,11 +979,11 @@ def plot_sankey(mfilename="MFILE.DAT"): # Plot simplified power flow Sankey Dia + p_pf_electric_supplies_mw + p_cp_coolant_pump_elec_mw ) - p_hcd_electric_total_mw = m_file.data["p_hcd_electric_total_mw"].get_scan( - -1 + p_hcd_electric_total_mw = m_file.get( + "p_hcd_electric_total_mw", scan=-1 ) # injector wall plug power (MW) - p_coolant_pump_elec_total_mw = m_file.data["p_coolant_pump_elec_total_mw"].get_scan( - -1 + p_coolant_pump_elec_total_mw = m_file.get( + "p_coolant_pump_elec_total_mw", scan=-1 ) # heat transport system electrical pump power (MW) # Initialising x and y variables for adjusting 'Plasma Heating' branch tip location diff --git a/process/core/io/process_config.py b/process/core/io/process_config.py index dbc1a02ff..47e791342 100644 --- a/process/core/io/process_config.py +++ b/process/core/io/process_config.py @@ -15,8 +15,8 @@ from numpy.random import default_rng -from process.core.io.in_dat import InDat -from process.core.io.mfile import MFile +from process.core.io.in_dat.base import InDat +from process.core.io.mfile.mfile import MFile from process.core.io.process_funcs import ( check_in_dat, set_variable_in_indat, diff --git a/process/core/io/process_funcs.py b/process/core/io/process_funcs.py index 7b45a37b0..949ed4782 100644 --- a/process/core/io/process_funcs.py +++ b/process/core/io/process_funcs.py @@ -9,8 +9,8 @@ from time import sleep from process.core.io.data_structure_dicts import get_dicts -from process.core.io.in_dat import InDat -from process.core.io.mfile import MFile +from process.core.io.in_dat.base import InDat +from process.core.io.mfile.mfile import MFile from process.data_structure import numerics logger = logging.getLogger(__name__) diff --git a/process/core/io/tools.py b/process/core/io/tools.py new file mode 100644 index 000000000..47699b565 --- /dev/null +++ b/process/core/io/tools.py @@ -0,0 +1,75 @@ +import importlib + +import click + + +def mfile_opt(exists: bool = False): + return click.option( + "-f", + "--mfile", + "mfile", + default="MFILE.DAT", + type=click.Path(exists=exists), + help="The mfile to read", + ) + + +mfile_arg = click.argument("mfiles", nargs=-1, type=click.Path(exists=True)) + + +def indat_opt(default="IN.DAT"): + return click.option( + "-i", + "--input", + "indat", + type=click.Path(exists=True), + help="The path to the input file", + default=default, + ) + + +def save(help_): + return click.option("-s", "--save", "save", default=False, is_flag=True, help=help_) + + +def split_callback(ctx: click.Context, param, value: str | None) -> list[str] | None: # noqa: ARG001 + return value.split(":") if isinstance(value, str) else value + + +### Taken from click documentation +class LazyGroup(click.Group): + def __init__(self, *args, lazy_subcommands=None, **kwargs): + super().__init__(*args, **kwargs) + # lazy_subcommands is a map of the form: + # + # {command-name} -> {module-name}.{command-object-name} + # + self.lazy_subcommands = lazy_subcommands or {} + + def list_commands(self, ctx): + base = super().list_commands(ctx) + lazy = sorted(self.lazy_subcommands.keys()) + return sorted(base + lazy) + + def get_command(self, ctx, cmd_name): + if cmd_name in self.lazy_subcommands: + return self._lazy_load(cmd_name) + return super().get_command(ctx, cmd_name) + + def _lazy_load(self, cmd_name): + # lazily loading a command, first get the module name and attribute name + import_path = self.lazy_subcommands[cmd_name] + modname, cmd_object_name = import_path.rsplit(".", 1) + # do the import + mod = importlib.import_module(modname) + # get the Command object from that module + cmd_object = getattr(mod, cmd_object_name) + # check the result to make debugging easier + if not isinstance(cmd_object, click.Command): + raise ValueError( + f"Lazy loading of {import_path} failed by returning a non-command object" + ) + return cmd_object + + +### diff --git a/process/main.py b/process/main.py index 8165ede28..3e5099698 100644 --- a/process/main.py +++ b/process/main.py @@ -39,22 +39,21 @@ Box file T&M/PKNIGHT/PROCESS (from 24/01/12) """ -import argparse import logging import os from pathlib import Path -from typing import Any, Protocol +from typing import Protocol + +import click import process import process.core.init as init import process.data_structure as data_structure from process.core import constants -from process.core.io import ( - mfile, - plot_plotly_sankey, - plot_proc, -) from process.core.io import obsolete_vars as ov +from process.core.io.mfile import mfile +from process.core.io.plot import plot_proc +from process.core.io.plot.sankey import plot_plotly_sankey # For VaryRun from process.core.io.process_config import RunProcessConfig @@ -67,6 +66,7 @@ process_warnings, vary_iteration_variables, ) +from process.core.io.tools import LazyGroup, indat_opt from process.core.log import logging_model_handler, show_errors from process.core.process_output import OutputFileManager, oheadr from process.core.scan import Scan @@ -124,145 +124,113 @@ logger = logging.getLogger("process") -class Process: - """The main Process class.""" - - def __init__(self, args: list[Any] | None = None): - """Run Process. +@click.group( + cls=LazyGroup, + lazy_subcommands={ + "mfile": "process.io.mfile.cli.mfile", + "plot": "process.io.plot.cli.plot", + "indat": "process.io.in_dat.cli.new_indat", + }, + invoke_without_command=True, + no_args_is_help=True, +) +@click.version_option() +@indat_opt(default=None) +@click.option( + "-s", + "--solver", + default="vmcon", + type=str, + help="Specify which solver to use: only 'vmcon' at the moment", +) +@click.option( + "-v", + "--varyiterparams", + is_flag=True, + help="Vary iteration parameters", +) +@click.option( + "-c", + "--varyiterparamsconfig", + "config_file", + default="run_process.conf", + help="configuration file for varying iteration parameters", +) +@click.option( + "-m", + "--mfile", + "mfile_path", + default="MFILE.DAT", + help="mfile for post-processing/plotting", +) +@click.option( + "-mj", + "--mfilejson", + is_flag=True, + help="Produce a filled json from --mfile arg in working dir", +) +@click.option( + "--update-obsolete", + is_flag=True, + help="Automatically update obsolete variables in the IN.DAT file", +) +@click.option( + "--full-output", + is_flag=True, + help="Run all summary plotting scripts for the output", +) +@click.pass_context +def process_cli( + ctx, + indat, + solver, + varyiterparams, + config_file, + mfile_path, + mfilejson, + update_obsolete, + full_output, +): + """ + \b + PROCESS + Power Reactor Optimisation Code + Copyright (c) [2023] [United Kingdom Atomic Energy Authority] - :param args: Arguments to parse, defaults to None - """ - self.parse_args(args) - self.run_mode() - self.post_process() + \b + Contact + James Morris : james.morris2@ukaea.uk + Jonathan Maddock : jonathan.maddock@ukaea.uk - def parse_args(self, args: list[Any] | None): - """Parse the command-line arguments, such as the input filename. + GitHub : https://github.com/ukaea/PROCESS + """ + if ctx.invoked_subcommand is None: + if indat is None: + raise click.BadParameter("IN.DAT not specified") + if varyiterparams: + runtype = VaryRun(config_file, solver) + else: + runtype = SingleRun(indat, solver, update_obsolete=update_obsolete) - Parameters - ---------- - args : - Arguments to parse - """ - parser = argparse.ArgumentParser( - formatter_class=argparse.RawDescriptionHelpFormatter, - description=( - "PROCESS\n" - "Power Reactor Optimisation Code\n" - "Copyright (c) [2023] [United Kingdom Atomic Energy Authority]\n" - "\n" - "Contact\n" - "James Morris : james.morris2@ukaea.uk\n" - "Jonathan Maddock : jonathan.maddock@ukaea.uk\n" - "\n" - "GitHub : https://github.com/ukaea/PROCESS\n" - ), - ) + runtype.run() - # Optional args - parser.add_argument( - "-i", - "--input", - default="IN.DAT", - metavar="input_file_path", - type=str, - help="The path to the input file that Process runs on", - ) - parser.add_argument( - "-s", - "--solver", - default="vmcon", - metavar="solver_name", - type=str, - help="Specify which solver to use: only 'vmcon' at the moment", - ) - parser.add_argument( - "-v", - "--varyiterparams", - action="store_true", - help="Vary iteration parameters", - ) - parser.add_argument( - "-c", - "--varyiterparamsconfig", - metavar="config_file", - default="run_process.conf", - help="configuration file for varying iteration parameters", - ) - parser.add_argument( - "-m", - "--mfile", - default="MFILE.DAT", - help="mfile for post-processing/plotting", - ) - parser.add_argument( - "-mj", - "--mfilejson", - action="store_true", - help="Produce a filled json from --mfile arg in working dir", - ) - parser.add_argument( - "--version", - action="store_true", - help="Print the version of PROCESS to the terminal", - ) - parser.add_argument( - "--update-obsolete", - action="store_true", - help="Automatically update obsolete variables in the IN.DAT file", - ) - parser.add_argument( - "--full-output", - action="store_true", - help="Run all summary plotting scripts for the output", - ) - - # If args is not None, then parse the supplied arguments. This is likely - # to come from the test suite when testing command-line arguments; the - # method is being run from the test suite. - # If args is None, then use actual command-line arguments (e.g. - # sys.argv), as the method is being run from the command-line. - self.args = parser.parse_args(args) - # Store namespace object of the args - - def run_mode(self): - """Determine how to run Process.""" - if self.args.version: - print(process.__version__) - return - # Store run object: useful for testing - if self.args.varyiterparams: - self.run = VaryRun(self.args.varyiterparamsconfig, self.args.solver) - else: - self.run = SingleRun( - self.args.input, - self.args.solver, - update_obsolete=self.args.update_obsolete, - ) - self.run.run() - - def post_process(self): - """Perform post-run actions, like plotting the mfile.""" - # TODO Currently, Process will always run on an input file beforehand. - # It would be better to not require this, so just plot_proc could be - # run, for example. - if self.args.mfilejson: + if mfilejson: # Produce a json file containing mfile output, useful for VVUQ work. - mfile_path = Path(self.args.mfile) + mfile_path = Path(mfile_path) mfile_data = mfile.MFile(filename=mfile_path) mfile_data.open_mfile() mfile_data.write_to_json() - if self.args.full_output: + + if full_output: # Run all summary plotting scripts for the output - mfile_path = Path(str(self.args.input).replace("IN.DAT", "MFILE.DAT")) - mfile_str = str(mfile_path.resolve()) - print(f"Plotting mfile {mfile_str}") + mfile_path = Path(mfile_path) if mfile_path.exists(): - plot_proc.main(args=["-f", mfile_str]) + mfile_str = mfile_path.resolve().as_posix() + print(f"Plotting mfile {mfile_str}") + plot_proc.setup_plot(mfile_path) plot_plotly_sankey.main(args=["-m", mfile_str]) - else: - logger.error("mfile to be used for plotting doesn't exist") + logger.error(f"Cannot find mfile for plotting {mfile_path}") class VaryRun: @@ -788,24 +756,3 @@ def setup_loggers(working_directory_log_path: Path | None = None): logging_file_input_location_handler.setLevel(logging.INFO) logging_file_input_location_handler.setFormatter(logging_formatter) logger.addHandler(logging_file_input_location_handler) - - -def main(args: list[Any] | None = None): - """Run Process. - - The args parameter is used to control command-line arguments when running - tests. Optional args can be supplied by different tests, which are then - used instead of command-line arguments by argparse. This allows testing of - different command-line arguments from the test suite. - - Parameters - ---------- - args : - Arguments to parse, defaults to None - """ - - Process(args) - - -if __name__ == "__main__": - main() diff --git a/pyproject.toml b/pyproject.toml index 8653807cc..1ab12f00e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ lint = ["pre-commit>=2.16.0", "ruff==0.9.3", "ty"] all = ["process[test,docs,lint,examples,plotly]"] [project.scripts] -process = "process.main:main" +process = "process.main:process_cli" [tool.hatch.build.targets.wheel] artifacts = [ diff --git a/tracking/tracking_data.py b/tracking/tracking_data.py index ce35f3e87..29bb2bdd9 100644 --- a/tracking/tracking_data.py +++ b/tracking/tracking_data.py @@ -31,7 +31,6 @@ e.g. FOO.bar says `bar`'s parent module is `FOO`. """ -import argparse import datetime import itertools import json @@ -559,39 +558,3 @@ def plot_entrypoint(arguments): output=arguments.out, tracking_variables_file=arguments.tracking_variables_file, ) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - - parser.add_argument("mode", type=str, choices=["track", "plot"]) - - parser.add_argument("db", type=str) - parser.add_argument("-o", "--out", type=str, default=None) - parser.add_argument("-m", "--mfile", type=str, default=None) - parser.add_argument( - "--commit", - type=str, - default=None, - help="The current commit message. If not provided, the code attempts to query to Git repository.", - ) - parser.add_argument( - "--hash", - type=str, - default=None, - help="The current commit hash. If not provided, the code attempts to query to Git repository.", - ) - parser.add_argument( - "--tracking-variables-file", - type=pathlib.Path, - default=None, - help="A JSON file containing a list of variables to track." - "See the description of DEFAULT_TRACKING_VARIABLES for details on formatting the strings in the list.", - ) - - arguments = parser.parse_args() - - if arguments.mode == "track": - track_entrypoint(arguments) - elif arguments.mode == "plot": - plot_entrypoint(arguments) From 01898045724f01d75ebbad523a6b5984d73b8f13 Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Tue, 24 Feb 2026 14:39:31 +0000 Subject: [PATCH 03/17] plot_scans --- process/core/io/plot/cli.py | 77 ++++++++-- process/core/io/plot/plot_scans.py | 216 ++++++++++++----------------- 2 files changed, 154 insertions(+), 139 deletions(-) diff --git a/process/core/io/plot/cli.py b/process/core/io/plot/cli.py index 2d28ffdb3..020804cbe 100644 --- a/process/core/io/plot/cli.py +++ b/process/core/io/plot/cli.py @@ -3,8 +3,9 @@ import click from process.io.plot.plot_proc import setup_plot +from process.io.plot.plot_scans import plot_scan from process.io.plot.plot_stress_tf import plot_stress -from process.io.tools import LazyGroup, mfile_arg, mfile_opt +from process.io.tools import LazyGroup, mfile_arg, mfile_opt, split_callback @click.group( @@ -24,16 +25,20 @@ def plot(): @click.option( "-yv", "--y-vars", + "output_names", + callback=split_callback, required=True, help=( "Select the output variables\nMore than one output can be plotted " - "eg: -yv 'var1 var2'\nA separate plot will be created for each " + "eg: -yv 'var1:var2'\nA separate plot will be created for each " "inputs" ), ) @click.option( "-yv2", "--y-vars2", + "output_names2", + callback=split_callback, default="", help=( "Select the 2nd axis output variable\n " @@ -45,33 +50,31 @@ def plot(): "-o", "--outputdir", default=Path.cwd(), + type=click.Path(), help="Output directory for plots, defaults to current working directory.", ) @click.option( "-out", - "--term_output", + "--term-output", is_flag=True, help="Option to show scans values on terminal", ) @click.option( "-sf", "--save-format", - nargs="?", default="pdf", help="Output format (default='pdf') ", ) @click.option( "-afs", "--axis-font-size", - nargs="?", default=18, help="Axis label font size selection (default=18)", type=int, ) @click.option( "-ats", - "--axis-ticklabel-size", - nargs="?", + "--axis-tick-size", default=16, help="Axis tick label font size selection (default=16)", type=int, @@ -85,14 +88,17 @@ def plot(): @click.option( "-xm", "--x-axis-max", + callback=split_callback, default="", help=( - "Used to set the x value corresponding to 100 percent when \nconverting from absolute to percent values." + "Used to set the x value corresponding to 100 percent when " + "converting from absolute to percent values. Multiple values separated with :" ), ) @click.option( "-xr", "--x-axis-range", + callback=split_callback, default="", help=("Used to set the range for x axis"), ) @@ -105,6 +111,7 @@ def plot(): @click.option( "-y2%", "--y-axis2-percent", + "y_axis_percent2", is_flag=True, help=( "Used to set the y axis ticks to percentages in place of absolute \nvalues. For the twinned axis if present." @@ -113,6 +120,7 @@ def plot(): @click.option( "-ym", "--y-axis-max", + callback=split_callback, default="", help=( "Used to set the y value corresponding to 100 percent when \nconverting from absolute to percent values." @@ -121,6 +129,7 @@ def plot(): @click.option( "-ym2", "--y-axis2-max", + callback=split_callback, default="", help=( "Used to set the y value corresponding to 100 percent when \nconverting from absolute to percent values." @@ -130,12 +139,15 @@ def plot(): @click.option( "-yr", "--y-axis-range", + callback=split_callback, default="", help=("Used to set the range for y axis"), ) @click.option( "-yr2", "--y-axis2-range", + "y_axis_range2", + callback=split_callback, default="", help=("Used to set the range for y axis. For the twinned axis if present."), ) @@ -143,6 +155,7 @@ def plot(): "-ln", "--label-name", default="", + callback=split_callback, help=( "Label names for plot legend. If multiple input files used then \n" "list the same number of label names eg: -nl 'leg1 leg2'\n" @@ -152,6 +165,7 @@ def plot(): @click.option( "-2DC", "--two-dimensional-contour", + "twoD_contour", is_flag=True, help=( "Option to plot 2D scans as a coloured contour plot instead of a line plot \n " @@ -168,10 +182,51 @@ def plot(): "Variables will be plotted in order of input" ), ) -def plot_scans(): +def plot_scans_cli( + mfiles, + output_names, + output_names2, + outputdir, + term_output, + save_format, + axis_font_size, + axis_tick_size, + x_axis_percent, + x_axis_max, + x_axis_range, + y_axis_percent, + y_axis_percent2, + y_axis_max, + y_axis2_max, + y_axis_range, + y_axis_range2, + label_name, + twod_contour, + stack_plots, +): """Plot optimisation information""" - - return parser.parse_args(args) + return plot_scan( + mfiles, + output_names, + output_names2, + outputdir, + term_output, + save_format, + axis_font_size, + axis_tick_size, + x_axis_percent, + list(map(float, x_axis_max)), + list(map(float, x_axis_range)), + y_axis_percent, + y_axis_percent2, + list(map(float, y_axis_max)), + list(map(float, y_axis2_max)), + list(map(float, y_axis_range)), + list(map(float, y_axis_range2)), + label_name, + twod_contour, + stack_plots, + ) @plot.command("tf-stress", no_args_is_help=True) diff --git a/process/core/io/plot/plot_scans.py b/process/core/io/plot/plot_scans.py index 4f62a2abc..d0cbd6f0a 100644 --- a/process/core/io/plot/plot_scans.py +++ b/process/core/io/plot/plot_scans.py @@ -23,63 +23,50 @@ """ import math -import os +from pathlib import Path import matplotlib.pyplot as plt import matplotlib.ticker as mtick import numpy as np -# PROCESS libraries -import process.core.io.mfile as mf +import process.core.io.mfile.mfile as mf from process.core.io.variable_metadata import var_dicts as meta -def main(args=None): - """Main plot scans script. - - Parameters - ---------- - args : list, optional - optional command-line args from test function, defaults to None - """ - args = parse_args(args) - - # Parameters to be used as function input - # --------------------------------------- - input_files = str(args.input_files) - output_names = str(args.y_vars) - output_names2 = str(args.y_vars2) - save_format = str(args.save_format) - term_output = args.term_output - label_name = str(args.label_name) - x_axis_percentage = args.x_axis_percent - x_max_input = list(filter(None, args.x_axis_max.split(" "))) - y_axis_percentage = args.y_axis_percent - y_max_input = list(filter(None, args.y_axis_max.split(" "))) - y_axis_percentage2 = args.y_axis_2_percent - y_max2_input = list(filter(None, args.y_axis_2_max.split(" "))) - two_dimensional_contour = args.two_dimensional_contour - stack_plots = args.stack_plots - # --------------------------------------- - - # Input checks - # ------------ - # Formting the inputs - output_names = list(filter(None, output_names.split(" "))) - output_names2 = list(filter(None, output_names2.split(" "))) - input_files = list(filter(None, input_files.split(" "))) - label_name = list(filter(None, label_name.split(" "))) +def plot_scan( + mfiles: list[Path], + output_names, + output_names2, + outputdir: Path, + term_output, + save_format, + axis_font_size, + axis_tick_size, + x_axis_percent, + x_axis_max: list[float], + x_axis_range, + y_axis_percent, + y_axis_percent2, + y_axis_max, + y_axis2_max, + y_axis_range, + y_axis_range2, + label_name, + twod_contour, + stack_plots, +): + """Main plot scans script.""" + + input_files = mfiles + x_max_input = x_axis_max + + y_max_input = y_axis_max + y_max2_input = y_axis2_max # If the input file is a directory, add MFILE.DAT for ii in range(len(input_files)): - if os.path.isdir(input_files[ii]): - input_files[ii] = input_files[ii].replace("/", "") - input_files[ii] = input_files[ii] + "/MFILE.DAT" - - # Check for the existence of the MFILE - if not os.path.isfile(input_files[ii]): - print(f"ERROR : The {input_files[ii]} MFILE does not exist, skipping it") - input_files.remove(input_files[ii]) + if input_files[ii].is_dir(): + input_files[ii] = input_files[ii] / "MFILE.DAT" # nsweep varible dict # ------------------- @@ -220,62 +207,38 @@ def main(args=None): # Plot settings # ------------- # Plot cosmetic settings - axis_tick_size = args.axis_ticklabel_size - legend_size = 12 - axis_font_size = args.axis_font_size - x_axis_range = list(filter(None, args.x_axis_range.split(" "))) - if x_axis_range != []: - x_axis_range = list(np.float64(x_axis_range)) - y_axis_range = list(filter(None, args.y_axis_range.split(" "))) - if y_axis_range != []: - y_axis_range = list(np.float64(y_axis_range)) - y_axis_range2 = list(filter(None, args.y_axis_2_range.split(" "))) - if y_axis_range2 != []: - y_axis_range2 = list(np.float64(y_axis_range2)) - - if len(x_max_input) != len(output_names): + def _format_lists(inp, output_names): x_max = [] - for i in range(len(output_names)): - if x_max_input != []: - j = 0 - try: - x_max += [float(x_max_input[i])] - j += 1 - except IndexError: - x_max += [float(x_max_input[j])] - else: - x_max += [None] - else: - x_max = np.float64(x_max_input) - if len(y_max_input) != len(output_names): - y_max = [] - for i in range(len(output_names)): - if y_max_input != []: + if inp != []: + for i in range(len(output_names)): j = 0 try: - y_max += [float(y_max_input[i])] + x_max += [float(inp[i])] j += 1 except IndexError: - y_max += [float(y_max_input[j])] - else: - y_max += [None] - else: - y_max = np.float64(y_max_input) - if output_names2 != []: - if len(y_max2_input) != len(output_names): - y_max2 = [] - for i in range(len(output_names)): - if y_max2_input != []: - j = 0 - try: - y_max2 += [float(y_max2_input[i])] - j += 1 - except IndexError: - y_max2 += [float(y_max2_input[j])] - else: - y_max2 += [None] + x_max += [float(inp[j])] else: - y_max2 = np.float64(y_max2_input) + x_max = [None] * len(output_names) + + return x_max + + legend_size = 12 + x_max = ( + _format_lists(x_max_input, output_names) + if len(x_max_input) != len(output_names) + else np.float64(x_max_input) + ) + y_max = ( + _format_lists(y_max_input, output_names) + if len(y_max_input) != len(output_names) + else np.float64(y_max_input) + ) + if output_names2 != []: + y_max2 = ( + _format_lists(y_max2_input, output_names) + if len(y_max2_input) != len(output_names) + else np.float64(y_max2_input) + ) else: y_max2 = y_max2_input # ------------- @@ -417,12 +380,7 @@ def main(args=None): for input_file in input_files: # Legend label formating if label_name == []: - labl = input_file - if "/MFILE.DAT" in input_file: - labl = input_file[:-10] - elif "MFILE.DAT" in input_file: - labl = input_file[:-9] - labl = labl.replace("_", " ") + labl = input_file.name else: labl = label_name[kk] kk = kk + 1 @@ -438,7 +396,7 @@ def main(args=None): ) if y_axis_range != []: y_divisions = (y_axis_range[1] - y_axis_range[0]) / 10 - if y_axis_percentage: + if y_axis_percent: if y_max[index] is None: y_max[index] = max( np.abs(output_arrays[input_file][output_name]) @@ -454,13 +412,13 @@ def main(args=None): ) ax.yaxis.set_major_formatter(yticks) if y_axis_range != []: - if y_axis_percentage is False: + if y_axis_percent is False: y_range = y_axis_range ax.set_ylim(y_range[0], y_range[1]) ax.yaxis.set_major_locator(mtick.MultipleLocator(y_divisions)) if x_axis_range != []: x_divisions = (x_axis_range[1] - x_axis_range[0]) / 10 - if x_axis_percentage: + if x_axis_percent: if x_max[index] is None: x_max[index] = max(np.abs(scan_var_array[input_file])) xticks = mtick.PercentFormatter(x_max[index]) @@ -476,7 +434,7 @@ def main(args=None): plt.rc("xtick", labelsize=axis_tick_size) plt.rc("ytick", labelsize=axis_tick_size) if x_axis_range != []: - if x_axis_percentage is False: + if x_axis_percent is False: x_range = x_axis_range plt.xlim(x_range[0], x_range[1]) ax.xaxis.set_major_locator(mtick.MultipleLocator(x_divisions)) @@ -492,7 +450,7 @@ def main(args=None): ) if y_axis_range != []: y_divisions = (y_axis_range[1] - y_axis_range[0]) / 10 - if y_axis_percentage: + if y_axis_percent: if y_max[index] is None: y_max[index] = max( np.abs(output_arrays[input_file][output_name]) @@ -510,7 +468,7 @@ def main(args=None): output_names.index(output_name) ].yaxis.set_major_formatter(yticks) if y_axis_range != []: - if y_axis_percentage is False: + if y_axis_percent is False: y_range = y_axis_range axs[output_names.index(output_name)].set_ylim( y_range[0], y_range[1] @@ -520,7 +478,7 @@ def main(args=None): ) if x_axis_range != []: x_divisions = (x_axis_range[1] - x_axis_range[0]) / 10 - if x_axis_percentage: + if x_axis_percent: if x_max[index] is None: x_max[index] = max(np.abs(scan_var_array[input_file])) xticks = mtick.PercentFormatter(x_max[index]) @@ -536,7 +494,7 @@ def main(args=None): output_names.index(output_name) ].xaxis.set_major_formatter(xticks) if x_axis_range != []: - if x_axis_percentage is False: + if x_axis_percent is False: x_range = x_axis_range plt.xlim(x_range[0], x_range[1]) axs[output_names.index(output_name)].xaxis.set_major_locator( @@ -555,7 +513,7 @@ def main(args=None): ) if y_axis_range != []: y_divisions = (y_axis_range[1] - y_axis_range[0]) / 10 - if y_axis_percentage: + if y_axis_percent: if y_max[index] is None: y_max[index] = max( np.abs(output_arrays[input_file][output_name]) @@ -571,7 +529,7 @@ def main(args=None): ) ax.yaxis.set_major_formatter(yticks) if y_axis_range != []: - if y_axis_percentage is False: + if y_axis_percent is False: y_range = y_axis_range ax.set_ylim(y_range[0], y_range[1]) ax.yaxis.set_major_locator( @@ -579,7 +537,7 @@ def main(args=None): ) if x_axis_range != []: x_divisions = (x_axis_range[1] - x_axis_range[0]) / 10 - if x_axis_percentage: + if x_axis_percent: if x_max[index] is None: x_max[index] = max(np.abs(scan_var_array[input_file])) xticks = mtick.PercentFormatter(x_max[index]) @@ -593,7 +551,7 @@ def main(args=None): ) ax.xaxis.set_major_formatter(xticks) if x_axis_range != []: - if x_axis_percentage is False: + if x_axis_percent is False: x_range = x_axis_range plt.xlim(x_range[0], x_range[1]) ax.xaxis.set_major_locator( @@ -621,7 +579,7 @@ def main(args=None): ) if y_axis_range2 != []: y_divisions2 = (y_axis_range2[1] - y_axis_range2[0]) / 10 - if y_axis_percentage2: + if y_axis_percent2: if y_max2[index] is None: y_max2[index] = max( np.abs(output_arrays2[input_file][output_name2]) @@ -637,7 +595,7 @@ def main(args=None): ) ax2.yaxis.set_major_formatter(yticks2) if y_axis_range2 != []: - if y_axis_percentage2 is False: + if y_axis_percent2 is False: y_range2 = y_axis_range2 ax2.set_ylim(y_range2[0], y_range2[1]) ax2.yaxis.set_major_locator(mtick.MultipleLocator(y_divisions2)) @@ -749,7 +707,7 @@ def main(args=None): extra_str = f"{output_name}{f'_vs_{output_name2}' if output_names2 != [] else ''}" plt.savefig( - f"{args.outputdir}/scan_{scan_var_name}_vs_{extra_str}.{save_format}", + outputdir / f"scan_{scan_var_name}_vs_{extra_str}.{save_format}", dpi=300, ) if not stack_plots: # Display plot (used in Jupyter notebooks) @@ -802,7 +760,7 @@ def main(args=None): # Declaring the outputs output_arrays = [] - if two_dimensional_contour: + if twod_contour: output_contour_z = np.zeros((n_scan_1, n_scan_2)) x_contour = [ m_file.data[scan_2_var_name].get_scan(i + 1) for i in range(n_scan_2) @@ -861,7 +819,7 @@ def main(args=None): ) if y_axis_range != []: y_divisions = (y_axis_range[1] - y_axis_range[0]) / 10 - if y_axis_percentage: + if y_axis_percent: if y_max[index] is None: y_max[index] = max(np.abs(y_contour)) yticks = mtick.PercentFormatter(y_max[index]) @@ -873,13 +831,13 @@ def main(args=None): ) ax.yaxis.set_major_formatter(yticks) if y_axis_range != []: - if y_axis_percentage is False: + if y_axis_percent is False: y_range = y_axis_range ax.set_ylim(y_range[0], y_range[1]) ax.yaxis.set_major_locator(mtick.MultipleLocator(y_divisions)) if x_axis_range != []: x_divisions = (x_axis_range[1] - x_axis_range[0]) / 10 - if x_axis_percentage: + if x_axis_percent: if x_max[index] is None: x_max[index] = max(np.abs(x_contour)) xticks = mtick.PercentFormatter(x_max[index]) @@ -891,7 +849,7 @@ def main(args=None): ) ax.xaxis.set_major_formatter(xticks) if x_axis_range != []: - if x_axis_percentage is False: + if x_axis_percent is False: x_range = x_axis_range plt.xlim(x_range[0], x_range[1]) ax.xaxis.set_major_locator(mtick.MultipleLocator(x_divisions)) @@ -899,7 +857,8 @@ def main(args=None): plt.rc("ytick", labelsize=axis_tick_size) plt.tight_layout() plt.savefig( - f"{args.outputdir}/scan_{output_name}_vs_{scan_var_name}_{scan_2_var_name}.{save_format}" + outputdir + / f"scan_{output_name}_vs_{scan_var_name}_{scan_2_var_name}.{save_format}" ) plt.grid(True) plt.show() @@ -953,7 +912,7 @@ def main(args=None): ] if y_axis_range != []: y_divisions = (y_axis_range[1] - y_axis_range[0]) / 10 - if y_axis_percentage: + if y_axis_percent: if y_max[index] is None: y_max[index] = max(np.abs(y_data)) yticks = mtick.PercentFormatter(y_max[index]) @@ -965,7 +924,7 @@ def main(args=None): ) ax.yaxis.set_major_formatter(yticks) if y_axis_range != []: - if y_axis_percentage is False: + if y_axis_percent is False: y_range = y_axis_range ax.set_ylim(y_range[0], y_range[1]) ax.yaxis.set_major_locator(mtick.MultipleLocator(y_divisions)) @@ -974,7 +933,7 @@ def main(args=None): ] if x_axis_range != []: x_divisions = (x_axis_range[1] - x_axis_range[0]) / 10 - if x_axis_percentage: + if x_axis_percent: if x_max[index] is None: x_max[index] = max(np.abs(x_data)) xticks = mtick.PercentFormatter(x_max[index]) @@ -986,7 +945,7 @@ def main(args=None): ) ax.xaxis.set_major_formatter(xticks) if x_axis_range != []: - if x_axis_percentage is False: + if x_axis_percent is False: x_range = x_axis_range plt.xlim(x_range[0], x_range[1]) ax.xaxis.set_major_locator(mtick.MultipleLocator(x_divisions)) @@ -994,7 +953,8 @@ def main(args=None): plt.rc("ytick", labelsize=8) plt.tight_layout() plt.savefig( - f"{args.outputdir}/scan_{output_name}_vs_{scan_var_name}_{scan_2_var_name}.{save_format}" + outputdir + / f"scan_{output_name}_vs_{scan_var_name}_{scan_2_var_name}.{save_format}" ) # Display plot (used in Jupyter notebooks) From 493fb08b60f8fc35cba2f0bdaa03b24065b25b84 Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Tue, 24 Feb 2026 15:35:04 +0000 Subject: [PATCH 04/17] sankey --- process/core/io/plot/cli.py | 2 +- process/core/io/plot/sankey/cli.py | 24 ++ .../core/io/plot/sankey/plot_plotly_sankey.py | 401 ------------------ process/core/io/plot/sankey/plot_sankey.py | 41 -- process/core/io/plot/sankey/sankey_funcs.py | 392 +++++++++-------- process/main.py | 4 +- 6 files changed, 220 insertions(+), 644 deletions(-) create mode 100644 process/core/io/plot/sankey/cli.py delete mode 100644 process/core/io/plot/sankey/plot_plotly_sankey.py delete mode 100644 process/core/io/plot/sankey/plot_sankey.py diff --git a/process/core/io/plot/cli.py b/process/core/io/plot/cli.py index 020804cbe..e68c94c3d 100644 --- a/process/core/io/plot/cli.py +++ b/process/core/io/plot/cli.py @@ -12,7 +12,7 @@ cls=LazyGroup, lazy_subcommands={ "costs": "process.io.plot.costs.cli.costs", - # "sankey": "process.io.plot.sankey.cli", + "sankey": "process.io.plot.sankey.cli.sankey", }, ) def plot(): diff --git a/process/core/io/plot/sankey/cli.py b/process/core/io/plot/sankey/cli.py new file mode 100644 index 000000000..817c9e0c8 --- /dev/null +++ b/process/core/io/plot/sankey/cli.py @@ -0,0 +1,24 @@ +""" +Code to display the power flow of a PROCESS run in a Sankey diagram + +Input file: +MFILE.DAT +""" + +import click + +from process.io.plot.sankey.sankey_funcs import plot_sankey, plot_sankey_plotly +from process.io.tools import mfile_opt + + +@click.command("sankey", no_args_is_help=True) +@mfile_opt +@click.option("-fmt", "--format", "format_", default="pdf", help="file format []") +def sankey(mfile, format_): + """Plot the power flow in PROCESS using a Sankey diagram.""" + if format_ in {"html", "plotly"}: + out = plot_sankey_plotly(mfile) + if out is not None: + return out + + return plot_sankey(mfile, format_) diff --git a/process/core/io/plot/sankey/plot_plotly_sankey.py b/process/core/io/plot/sankey/plot_plotly_sankey.py deleted file mode 100644 index dfafabf47..000000000 --- a/process/core/io/plot/sankey/plot_plotly_sankey.py +++ /dev/null @@ -1,401 +0,0 @@ -import argparse -import pathlib -import re - -try: - import plotly.graph_objects as go - - PLOT_SANKEY = True -except ImportError: - PLOT_SANKEY = False - -<<<<<<<< HEAD:process/core/io/plot_plotly_sankey.py -from process.core.io.mfile import MFile -======== -from process.io.mfile.mfile import MFile ->>>>>>>> 71bdc991 (Overhall CI):process/core/io/plot/sankey/plot_plotly_sankey.py - - -def main(args=None): - if not PLOT_SANKEY: - print( - "\nPlotly is not installed, unable to create sankey diagram!\n" - "Install plotly by installing the optional 'plotly' dependency " - "e.g. \"pip install -e '.[plotly]'\"" - ) - return - - parser = argparse.ArgumentParser( - description="Program to plot the power flow in PROCESS using a Sankey diagram." - ) - - parser.add_argument("-e", "--end", default="pdf", help="file format, default = pdf") - - parser.add_argument( - "-m", "--mfile", default="MFILE.DAT", help="mfile name, default = MFILE.DAT" - ) - - args = parser.parse_args(args) - - plot_power_balance_sankey(args.mfile) - - -def plot_power_balance_sankey(m_file): - m_file = MFile(m_file) - p_hcd_injected_total_mw = m_file.data["p_hcd_injected_total_mw"].get_scan(-1) - p_plasma_ohmic_mw = m_file.data["p_plasma_ohmic_mw"].get_scan(-1) - p_alpha_total_mw = m_file.data["p_alpha_total_mw"].get_scan(-1) - p_neutron_total_mw = m_file.data["p_neutron_total_mw"].get_scan(-1) - p_plasma_rad_mw = m_file.data["p_plasma_rad_mw"].get_scan(-1) - p_fw_rad_total_mw = m_file.data["p_fw_rad_total_mw"].get_scan(-1) - p_fw_alpha_mw = p_alpha_total_mw * ( - 1 - m_file.data["f_p_alpha_plasma_deposited"].get_scan(-1) - ) - p_blkt_nuclear_heat_total_mw = m_file.data["p_blkt_nuclear_heat_total_mw"].get_scan( - -1 - ) - - # Define node labels (linearized flow) - labels = [ - "H&CD injector", # 0 - "Ohmic", # 1 - "Plasma Fusion Power", # 2 - "Alpha particles", # 3 - "Neutrons", # 4 - "Radiation", # 5 - "First Wall", # 6 - "Blanket", # 7 - "Divertor", # 8 - "FW+Blkt", # 9 - "Primary Thermal", # 10 - "Turbine", # 11 - "Gross Electric", # 12 - "Net Electric", # 13 - "HCD Electric Power", # 14 - "HCD electric losses", # 15 - "Core systems", # 16 - "Cryo plant", # 17 - "Base plant load", # 18 - "TF power supplies", # 19 - "PF power supplies", # 20 - "Vacuum pumps", # 21 - "Tritium plant", # 22 - "Coolant pumps electric", # 23 - "Coolant pump electric losses", # 24 - "Divertor pump", # 25 - "FW+Blkt pumps", # 26 - "Shield pump", # 27 - "Shield", # 28 - "Secondary heat", # 29 - "TF nuclear heat", # 30 - "H&CD & Diagnostics", # 31 - "Total Secondary Heat", # 32 - "Turbine Loss", # 33 - "Blanket neutron multiplication", # 34 - ] - - # Define links (source, target, value) for a more linear flow - sources = [ - 0, # 0: H&CD to Fusion - 1, # 1: Ohmic to Fusion - 2, # 2: Fusion to Alpha - 2, # 3: Fusion to Neutrons - 2, # 4: Fusion to Radiation - 3, # 5: Alpha to First Wall - 4, # 6: Neutrons to Blanket - 5, # 7: Radiation to First Wall - 4, # 8: Neutrons to Divertor - 5, # 9: Radiation to Divertor - 6, # 10: First Wall to FW+Blkt - 7, # 11: Blanket to FW+Blkt - 8, # 12: Divertor to FW+Blkt - 9, # 13: FW+Blkt to Primary Thermal - 10, # 14: Primary Thermal to Turbine - 11, # 15: Turbine to Gross Electric - 12, # 16: Gross Electric to Net Electric - 12, # 17: Gross Electric to HCD Electric Power - 14, # 18: HCD Electric Power to HCD electric losses - 14, # 19: HCD Electric Power to H&CD - 12, # 20: Gross Electric to Core systems - 16, # 21: Core systems to Cryo plant - 16, # 22: Core systems to Base plant load - 16, # 23: Core systems to TF coils - 16, # 24: Core systems to PF coils - 16, # 25: Core systems to Vacuum pumps - 16, # 26: Core systems to Tritium plant - 12, # 27: Gross Electric to Coolant pumps electric - 23, # 28: Coolant pumps electric to Coolant pump electric losses - 23, # 29: Coolant pumps electric to Divertor pump - 23, # 30: Coolant pumps electric to FW+Blkt pumps - 26, # 31: FW+Blkt pumps to FW+Blkt - 25, # 32: Divertor pump to Divertor - 23, # 33: Coolant pumps electric to Shield pump - 27, # 34: Shield pump to Shield - 28, # 35: Shield to primary thermal - 4, # 36: Neutrons to shield - 17, # 37: Cryo plant to secondary heat - 18, # 38: Base plant load to secondary heat - 19, # 39: TF coils to secondary heat - 20, # 40: PF coils to secondary heat - 21, # 41: Vacuum pumps to secondary heat - 22, # 42: Tritium plant to secondary heat - 4, # 43: Neutrons to tf - 30, # 44: TF nuclear heat to secondary heat - 15, # 45: HCD electric losses to secondary heat - 24, # 46: Coolant pumps electric to secondary heat - 6, # 47: FW pump to primary heat, Should only show if FW and Bkt pumps are separate - 7, # 48: Blkt pump to primary heat, Should only show if FW and Blkt pumps are separate - 2, # 49 Should show in beams are present - 2, # 50: Should show in beams are present - 4, # 51 Neutrons to CP shield, should only show if CP shield is present - 2, # 52 Plasma separatrix power to divertor - 8, # 53 Divertor secondary heat, - 28, # 54 Shield secondary heat - 4, # 55 Neutron power to H&CD & Diagnostics - 5, # 56: Radiation to H&CD & Diagnostics - 29, # 57: Total Secondary Heat - 31, # 58: H&CD & Diagnostics secondary heat - 11, # 59: Turbine Loss - 4, # 60: FW nuclear heat - 3, # 61: Alpha particles back to plasma - 34, # 62: Blanket neutron multiplication - ] - targets = [ - 2, # 0: H&CD to Fusion - 2, # 1: Ohmic to Fusion - 3, # 2: Fusion to Alpha - 4, # 3: Fusion to Neutrons - 5, # 4: Fusion to Radiation - 6, # 5: Alpha to First Wall - 7, # 6: Neutrons to Blanket - 6, # 7: Radiation to First Wall - 8, # 8: Neutrons to Divertor - 8, # 9: Radiation to Divertor - 9, # 10: First Wall to FW+Blkt - 9, # 11: Blanket to FW+Blkt - 10, # 12: Divertor to FW+Blkt - 10, # 13: FW+Blkt to Primary Thermal - 11, # 14: Primary Thermal to Turbine - 12, # 15: Turbine to Gross Electric - 13, # 16: Gross Electric to Net Electric - 14, # 17: Gross Electric to HCD Electric Power - 15, # 18: HCD Electric Power to HCD electric losses - 0, # 19: HCD Electric Power to H&CD - 16, # 20: Gross Electric to Core systems - 17, # 21: Core systems to Cryo plant - 18, # 22: Core systems to Base plant load - 19, # 23: Core systems to TF coils - 20, # 24: Core systems to PF coils - 21, # 25: Core systems to Vacuum pumps - 22, # 26: Core systems to Tritium plant - 23, # 27: Gross Electric to Coolant pumps electric - 24, # 28: Coolant pumps electric to Coolant pump electric losses - 25, # 29: Coolant pumps electric to Divertor pump - 26, # 30: Coolant pumps electric to FW+Blkt pumps - 9, # 31: FW+Blkt pumps to FW+Blkt - 8, # 32: Divertor pump to Divertor - 27, # 33: Coolant pumps electric to Shield pump - 28, # 34: Shield pump to Shield - 10, # 35: Shield to primary thermal - 28, # 36: Neutrons to shield - 29, # 37: Cryo plant to secondary heat - 29, # 38: Base plant load to secondary heat - 29, # 39: TF coils to secondary heat - 29, # 40: PF coils to secondary heat - 29, # 41: Vacuum pumps to secondary heat - 29, # 42: Tritium plant to secondary heat - 30, # 43: Neutrons to tf - 29, # 44: TF nuclear heat to secondary heat - 29, # 45: HCD electric losses to secondary heat - 29, # 46: Coolant pumps electric to secondary heat - 9, # 47: FW pump to primary heat, Should only show if FW and Bkt pumps are separate - 9, # 48: Blkt pump to primary heat, Should only show if FW and Blkt pumps are separate - 6, # 49 Should show in beams are present - 6, # 50: Should show in beams are present - 28, # 51 Neutrons to CP shield, should only show if CP shield is present - 8, # 52 Plasma separatrix power to divertor - 29, # 53 Divertor secondary heat, - 29, # 54 Shield secondary heat - 31, # 55 Neutron power to H&CD & Diagnostics - 31, # 56: Radiation to H&CD & Diagnostics - 32, # 57: Total Secondary Heat - 32, # 58: H&CD & Diagnostics secondary heat - 33, # 59: Turbine Loss - 6, # 60: FW nuclear heat - 2, # 61: Alpha particles back to plasma - 7, # 62: Blanket neutron multiplication - ] - values = [ - p_hcd_injected_total_mw, # 0 - p_plasma_ohmic_mw, # 1 - p_alpha_total_mw, # 2 - p_neutron_total_mw, # 3 - p_plasma_rad_mw, # 4 - p_fw_alpha_mw, # 5 - p_blkt_nuclear_heat_total_mw - - m_file.data["p_blkt_multiplication_mw"].get_scan(-1), # 6 - p_fw_rad_total_mw, # 7 - m_file.data["p_div_nuclear_heat_total_mw"].get_scan(-1), # 8 - m_file.data["p_div_rad_total_mw"].get_scan(-1), # 9 - m_file.data["p_fw_heat_deposited_mw"].get_scan(-1), # 10 - m_file.data["p_blkt_heat_deposited_mw"].get_scan(-1), # 11 - m_file.data["p_div_heat_deposited_mw"].get_scan(-1), # 12 - m_file.data["p_fw_blkt_heat_deposited_mw"].get_scan(-1), # 13 - m_file.data["p_plant_primary_heat_mw"].get_scan(-1), # 14 - m_file.data["p_plant_electric_gross_mw"].get_scan(-1), # 15 - m_file.data["p_plant_electric_net_mw"].get_scan(-1), # 16 - m_file.data["p_hcd_electric_total_mw"].get_scan(-1), # 17 - m_file.data["p_hcd_electric_loss_mw"].get_scan(-1), # 18 - p_hcd_injected_total_mw, # 19 - m_file.data["p_plant_core_systems_elec_mw"].get_scan(-1), # 20 - m_file.data["p_cryo_plant_electric_mw"].get_scan(-1), # 21 - m_file.data["p_plant_electric_base_total_mw"].get_scan(-1), # 22 - m_file.data["p_tf_electric_supplies_mw"].get_scan(-1), # 23 - m_file.data["p_pf_electric_supplies_mw"].get_scan(-1), # 24 - m_file.data["vachtmw"].get_scan(-1), # 25 - m_file.data["p_tritium_plant_electric_mw"].get_scan(-1), # 26 - m_file.data["p_coolant_pump_elec_total_mw"].get_scan(-1), # 27 - m_file.data["p_coolant_pump_loss_total_mw"].get_scan(-1), # 28 - m_file.data["p_div_coolant_pump_mw"].get_scan(-1), # 29 - m_file.data["p_fw_blkt_coolant_pump_mw"].get_scan(-1), # 30 - m_file.data["p_fw_blkt_coolant_pump_mw"].get_scan(-1), # 31 - m_file.data["p_div_coolant_pump_mw"].get_scan(-1), # 32 - m_file.data["p_shld_coolant_pump_mw"].get_scan(-1), # 33 - m_file.data["p_shld_coolant_pump_mw"].get_scan(-1), # 34 - m_file.data["p_shld_heat_deposited_mw"].get_scan(-1), # 35 - m_file.data["p_shld_nuclear_heat_mw"].get_scan(-1), # 36 - m_file.data["p_cryo_plant_electric_mw"].get_scan(-1), # 37 - m_file.data["p_plant_electric_base_total_mw"].get_scan(-1), # 38 - m_file.data["p_tf_electric_supplies_mw"].get_scan(-1), # 39 - m_file.data["p_pf_electric_supplies_mw"].get_scan(-1), # 40 - m_file.data["vachtmw"].get_scan(-1), # 41 - m_file.data["p_tritium_plant_electric_mw"].get_scan(-1), # 42 - m_file.data["p_tf_nuclear_heat_mw"].get_scan(-1), # 43 - m_file.data["p_tf_nuclear_heat_mw"].get_scan(-1), # 44 - m_file.data["p_hcd_electric_loss_mw"].get_scan(-1), # 45 - m_file.data["p_coolant_pump_loss_total_mw"].get_scan(-1), # 46 - m_file.data["p_fw_coolant_pump_mw"].get_scan( - -1 - ), # 47 Should only show if FW and Bkt pumps are seperate - m_file.data["p_blkt_coolant_pump_mw"].get_scan( - -1 - ), # 48 Should only show if FW and Blkt pumps are seperate - m_file.data["p_beam_shine_through_mw"].get_scan( - -1 - ), # 49 Should show in beams are present - m_file.data["p_beam_orbit_loss_mw"].get_scan( - -1 - ), # 50 Should show in beams are present - m_file.data["p_cp_shield_nuclear_heat_mw"].get_scan( - -1 - ), # 51 Neutrons to CP shield, should only show if CP shield is present - m_file.data["p_plasma_separatrix_mw"].get_scan( - -1 - ), # 52 Plasma separatrix power to divertor - m_file.data["p_div_secondary_heat_mw"].get_scan( - -1 - ), # 53 Divertor secondary heat, - m_file.data["p_shld_secondary_heat_mw"].get_scan(-1), # 54 Shield secondary heat - m_file.data["p_fw_hcd_nuclear_heat_mw"].get_scan( - -1 - ), # 55 Neutron power to H&CD & Diagnostics - m_file.data["p_fw_hcd_rad_total_mw"].get_scan( - -1 - ), # 56: Radiation to H&CD & Diagnostics - m_file.data["p_plant_secondary_heat_mw"].get_scan( - -1 - ), # 57: Total Secondary Heat - m_file.data["p_hcd_secondary_heat_mw"].get_scan( - -1 - ), # 58: H&CD & Diagnostics secondary heat - m_file.data["p_turbine_loss_mw"].get_scan(-1), # 59: Turbine Loss - m_file.data["p_fw_nuclear_heat_total_mw"].get_scan(-1), # 60: FW nuclear heat - p_alpha_total_mw - * m_file.data["f_p_alpha_plasma_deposited"].get_scan( - -1 - ), # 61: Alpha particles back to plasma - m_file.data["p_blkt_multiplication_mw"].get_scan(-1), - ] - - # Define colors for each node (hex or rgba) - node_colors = [ - "#1f77b4", # 0: H&CD injector - "#ff7f0e", # 1: Ohmic - "#2ca02c", # 2: Plasma Fusion Power - "#d62728", # 3: Alpha particles - "#9467bd", # 4: Neutrons - "#8c564b", # 5: Radiation - "#e377c2", # 6: First Wall - "#7f7f7f", # 7: Blanket - "#bcbd22", # 8: Divertor - "#17becf", # 9: FW+Blkt - "#aec7e8", # 10: Primary Thermal - "#ffbb78", # 11: Turbine - "#98df8a", # 12: Gross Electric - "#ff9896", # 13: Net Electric - "#c5b0d5", # 14: HCD Electric Power - "#c49c94", # 15: HCD electric losses - "#f7b6d2", # 16: Core systems - "#c7c7c7", # 17: Cryo plant - "#dbdb8d", # 18: Base plant load - "#9edae5", # 19: TF coils - "#393b79", # 20: PF coils - "#637939", # 21: Vacuum pumps - "#8c6d31", # 22: Tritium plant - "#843c39", # 23: Coolant pumps electric - "#7b4173", # 24: Coolant pump electric losses - "#5254a3", # 25: Divertor pump - "#6b6ecf", # 26: FW+Blkt pumps - "#b5cf6b", # 27: Shield pump - "#cedb9c", # 28: Shield - "#9c9ede", # 29: Secondary heat - "#e7ba52", # 30: TF nuclear heat - "#ad494a", # 31: H&CD & Diagnostics - "#a55194", # 32: Total Secondary Heat - "#393b79", # 33: Turbine Loss - "#637939", # 34: Blanket neutron multiplication - ] - - # Assign link colors to match their source node - link_colors = [node_colors[src] for src in sources] - - # Add value labels to the links - value_labels = [f"{v:.3f} MW" for v in values] - - sankey_dict = { - "type": "sankey", - "node": { - "pad": 30, - "thickness": 20, - "line": {"color": "black", "width": 0.5}, - "label": labels, - "color": node_colors, - }, - "link": { - "source": sources, - "target": targets, - "value": values, - "label": value_labels, - "color": link_colors, - }, - } - fig = go.Figure(data=[sankey_dict]) - - fig.update_layout({ - "title_text": "Fusion Power Balance Sankey Diagram", - "font_size": 7, - "autosize": True, - "margin": {"l": 40, "r": 40, "t": 40, "b": 40}, - }) - # Strip 'MFILE' from the filename for the HTML output - # Remove the character before "MFILE" and "MFILE" itself from the filename - html_output_path = pathlib.Path( - re.sub(r"(.)?[ \.\_]?MFILE", r"\1_plotly_sankey", m_file.filename) - ).with_suffix(".html") - fig.write_html(str(html_output_path)) - print(f"Interactive Sankey diagram saved to {html_output_path}") - return fig - - -if __name__ == "__main__": - main() diff --git a/process/core/io/plot/sankey/plot_sankey.py b/process/core/io/plot/sankey/plot_sankey.py deleted file mode 100644 index 619793973..000000000 --- a/process/core/io/plot/sankey/plot_sankey.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Code to display the power flow of a PROCESS run in a Sankey diagram - -Input file: -MFILE.DAT -""" - -import argparse -import pathlib - -from pylab import savefig, show - -from process.core.io.sankey_funcs import plot_sankey - - -def main(args=None): - parser = argparse.ArgumentParser( - description="Program to plot the power flow in PROCESS using a Sankey diagram." - ) - - parser.add_argument("-e", "--end", default="pdf", help="file format, default = pdf") - - parser.add_argument( - "-m", "--mfile", default="MFILE.DAT", help="mfile name, default = MFILE.DAT" - ) - - args = parser.parse_args(args) - - plot_sankey(args.mfile) - - # Get directory of mfile - mfile_path = pathlib.Path(args.mfile).resolve() - mfile_dir = mfile_path.parent - output_path = mfile_dir / f"SankeyPowerFlow.{args.end}" - savefig(str(output_path)) - - show() - - -if __name__ == "__main__": - main() diff --git a/process/core/io/plot/sankey/sankey_funcs.py b/process/core/io/plot/sankey/sankey_funcs.py index 41366b98e..7533402e4 100644 --- a/process/core/io/plot/sankey/sankey_funcs.py +++ b/process/core/io/plot/sankey/sankey_funcs.py @@ -2,12 +2,32 @@ Library of Sankey plotting routine """ +from pathlib import Path + import matplotlib.pyplot as plt import numpy as np from matplotlib.sankey import Sankey from numpy import sqrt -from process.core.io.mfile import MFile +from process.core.io.mfile.mfile import MFile + +try: + import plotly.graph_objects as go + + PLOT_SANKEY = True +except ImportError: + PLOT_SANKEY = False + + +def plot_sankey_plotly(m_file): + if not PLOT_SANKEY: + print( + "\nPlotly is not installed, unable to create sankey diagram!\n" + "Install plotly by installing the optional 'plotly' dependency " + "e.g. \"pip install -e '.[plotly]'\"" + ) + return None + return plotly(power_balance_sankey(m_file), m_file) def power_balance_sankey(m_file): @@ -333,7 +353,7 @@ def power_balance_sankey(m_file): } -def plotly(sankey_dict, mfile): +def plotly(sankey_dict, m_file): fig = go.Figure(data=[sankey_dict]) fig.update_layout({ @@ -343,9 +363,8 @@ def plotly(sankey_dict, mfile): "margin": {"l": 40, "r": 40, "t": 40, "b": 40}, }) # Strip 'MFILE' from the filename for the HTML output - # Remove the character before "MFILE" and "MFILE" itself from the filename - html_output_path = pathlib.Path( - re.sub(r"(.)?[ \.\_]?MFILE", r"\1_plotly_sankey", m_file.filename) + html_output_path = m_file.with_stem( + m_file.stem.replace("MFILE", "plotly_sankey") ).with_suffix(".html") fig.write_html(str(html_output_path)) print(f"Interactive Sankey diagram saved to {html_output_path}") @@ -358,95 +377,84 @@ def plot_full_sankey( # ------------------------------- Pulling values from the MFILE ------------------------------- m_file = MFile(mfilename) + variables = [ + # Used in [PLASMA] + "p_fusion_total_mw", # Fusion Power (MW) + "p_hcd_injected_total_mw", # Total auxiliary injected Power (MW) + "p_plasma_ohmic_mw", # Ohmic heating Power (MW) + "p_neutron_total_mw", # Neutron fusion power (MW) + "p_non_alpha_charged_mw", # Non-alpha charged particle power (MW) + "p_alpha_total_mw", # Alpha power (MW) + # Used in [NEUTRONICS] + "p_blkt_multiplication_mw", # Energy multiplication in blanket (MW) + "p_blkt_nuclear_heat_total_mw", # Total Nuclear heating in the blanket (MW) + "p_div_nuclear_heat_total_mw", # Nuclear heating in the divertor (MW) + "p_fw_nuclear_heat_total_mw", # Nuclear heating in the first wall (MW) + "p_shld_nuclear_heat_mw", # Nuclear heating in the shield (MW) + "p_tf_nuclear_heat_mw", # Nuclear heating in the TF coil (MW) + # Used in [CHARGEP] + "p_plasma_separatrix_mw", # Charged particle power deposited on divertor (MW) + "f_p_alpha_plasma_deposited", # Fraction of alpha power deposited in plasma + "p_plasma_rad_mw", # Total radiation Power (MW) + # Used in [RADIATION] + "f_ster_div_single" + "f_a_fw_outboard_hcd" + # Used in [DIVERTOR] + "p_div_coolant_pump_mw", # Divertor coolant pumping power + "p_div_heat_deposited_mw", # Total power extracted from divertor (MW) + # Used in [FIRST_WALL] + "p_fw_blkt_heat_deposited_mw", # Power extracted blanket & FW (MW) + "p_fw_blkt_coolant_pump_mw", # Pump Power in FW and blanket (MW) + ] + ( + p_fusion_total_mw, + p_hcd_injected_total_mw, + p_plasma_ohmic_mw, + p_neutron_total_mw, + p_non_alpha_charged_mw, + p_alpha_total_mw, + p_blkt_multiplication_mw, + p_blkt_nuclear_heat_total_mw, + p_div_nuclear_heat_total_mw, + p_fw_nuclear_heat_total_mw, + p_shld_nuclear_heat_mw, + p_tf_nuclear_heat_mw, + p_plasma_separatrix_mw, + f_p_alpha_plasma_deposited, + p_plasma_rad_mw, + f_ster_div_single, + f_a_fw_outboard_hcd, + p_div_coolant_pump_mw, + p_div_heat_deposited_mw, + p_fw_blkt_heat_deposited_mw, + p_fw_blkt_coolant_pump_mw, + ) = m_file.get_variables(*variables, scan=-1) # Used in [PLASMA] - p_fusion_total_mw = m_file.get("p_fusion_total_mw", scan=-1) # Fusion power (MW) - p_hcd_injected_total_mw = m_file.get( - "p_hcd_injected_total_mw", scan=-1 - ) # Total auxiliary injected power (MW) - p_plasma_ohmic_mw = m_file.get( - "p_plasma_ohmic_mw", scan=-1 - ) # Ohmic heating power (MW) - totalplasma = ( - p_fusion_total_mw + p_hcd_injected_total_mw + p_plasma_ohmic_mw - ) # Total Power in plasma (MW) - p_neutron_total_mw = m_file.get( - "p_neutron_total_mw", scan=-1 - ) # Neutron fusion power (MW) - p_non_alpha_charged_mw = m_file.get( - "p_non_alpha_charged_mw", scan=-1 - ) # Non-alpha charged particle power (MW) - pcharohmmw = ( - p_non_alpha_charged_mw + p_plasma_ohmic_mw - ) # The ohmic and charged particle power (MW) - p_alpha_total_mw = m_file.get("p_alpha_total_mw", scan=-1) # Alpha power (MW) - palpinjmw = ( - p_alpha_total_mw + p_hcd_injected_total_mw - ) # Alpha particle and HC&D power (MW) + # Total Power in plasma (MW) + totalplasma = p_fusion_total_mw + p_hcd_injected_total_mw + p_plasma_ohmic_mw + # The ohmic and charged particle power (MW) + pcharohmmw = p_non_alpha_charged_mw + p_plasma_ohmic_mw + # Alpha particle and HC&D power (MW) + palpinjmw = p_alpha_total_mw + p_hcd_injected_total_mw # Used in [NEUTRONICS] - p_blkt_multiplication_mw = m_file.get( - "p_blkt_multiplication_mw", scan=-1 - ) # Energy multiplication in blanket (MW) - p_blkt_nuclear_heat_total_mw = m_file.get( - "p_blkt_nuclear_heat_total_mw", scan=-1 - ) # Total Nuclear heating in the blanket (MW) - pnucemblkt = ( - p_blkt_nuclear_heat_total_mw - p_blkt_multiplication_mw - ) # External nuclear heating in blanket (MW) - p_div_nuclear_heat_total_mw = m_file.get( - "p_div_nuclear_heat_total_mw", scan=-1 - ) # Nuclear heating in the divertor (MW) - p_fw_nuclear_heat_total_mw = m_file.get( - "p_fw_nuclear_heat_total_mw", scan=-1 - ) # Nuclear heating in the first wall (MW) - p_shld_nuclear_heat_mw = m_file.get( - "p_shld_nuclear_heat_mw", scan=-1 - ) # Nuclear heating in the shield (MW) - p_tf_nuclear_heat_mw = m_file.get( - "p_tf_nuclear_heat_mw", scan=-1 - ) # Nuclear heating in the TF coil (MW) + # External nuclear heating in blanket (MW) + pnucemblkt = p_blkt_nuclear_heat_total_mw - p_blkt_multiplication_mw # Used in [CHARGEP] - p_plasma_separatrix_mw = m_file.get( - "p_plasma_separatrix_mw", scan=-1 - ) # Charged particle power deposited on divertor (MW) - f_p_alpha_plasma_deposited = m_file.get( - "f_p_alpha_plasma_deposited", scan=-1 - ) # Fraction of alpha power deposited in plasma - p_fw_alpha_mw = p_alpha_total_mw * ( - 1 - f_p_alpha_plasma_deposited - ) # Alpha particles hitting first wall (MW) - p_plasma_rad_mw = m_file.get( - "p_plasma_rad_mw", scan=-1 - ) # Total radiation Power (MW) + # Alpha particles hitting first wall (MW) + p_fw_alpha_mw = p_alpha_total_mw * (1 - f_p_alpha_plasma_deposited) # Used in [RADIATION] - p_div_rad_total_mw = p_plasma_rad_mw * m_file.get( - "f_ster_div_single", scan=-1 - ) # Radiation deposited on the divertor (MW) - p_fw_hcd_rad_total_mw = p_plasma_rad_mw * m_file.data[ - "f_a_fw_outboard_hcd" - ].get_scan(-1) # Radiation deposited on HCD (MW) - p_fw_rad_total_mw = ( - p_plasma_rad_mw - p_div_rad_total_mw - p_fw_hcd_rad_total_mw - ) # Radiation deposited in the FW (MW) - - # Used in [DIVERTOR] - p_div_coolant_pump_mw = m_file.get( - "p_div_coolant_pump_mw", scan=-1 - ) # Divertor coolant pumping power - p_div_heat_deposited_mw = m_file.get( - "p_div_heat_deposited_mw", scan=-1 - ) # Total power extracted from divertor (MW) + # Radiation deposited on the divertor (MW) + p_div_rad_total_mw = p_plasma_rad_mw * f_ster_div_single + # Radiation deposited on HCD (MW) + p_fw_hcd_rad_total_mw = p_plasma_rad_mw * f_a_fw_outboard_hcd + # Radiation deposited in the FW (MW) + p_fw_rad_total_mw = p_plasma_rad_mw - p_div_rad_total_mw - p_fw_hcd_rad_total_mw # Used in [FIRST_WALL] - p_fw_blkt_heat_deposited_mw = m_file.get( - "p_fw_blkt_heat_deposited_mw", scan=-1 - ) # Power extracted blanket & FW (MW) - p_fw_blkt_coolant_pump_mw = m_file.get( - "p_fw_blkt_coolant_pump_mw", scan=-1 - ) # Pump Power in FW and blanket (MW) htpmwblkt = p_fw_blkt_coolant_pump_mw / 2 # Pump power in blanket (MW) htpmwfw = p_fw_blkt_coolant_pump_mw / 2 # Pump power in FW (MW) p_fw_heat_deposited_mw = ( @@ -834,76 +842,101 @@ def plot_full_sankey( y += 1""" -def plot_sankey(mfilename="MFILE.DAT"): # Plot simplified power flow Sankey Diagram +def plot_sankey( + mfilename=Path("MFILE.DAT"), format_: str = "pdf" +): # Plot simplified power flow Sankey Diagram # ------------------------------- Pulling values from the MFILE ------------------------------- - + mfilename = Path(mfilename) m_file = MFile(mfilename) - # Used in [PLASMA] - p_fusion_total_mw = m_file.get("p_fusion_total_mw", scan=-1) # Fusion Power (MW) - p_hcd_injected_total_mw = m_file.get( - "p_hcd_injected_total_mw", scan=-1 - ) # Total auxiliary injected Power (MW) - p_plasma_ohmic_mw = m_file.get( - "p_plasma_ohmic_mw", scan=-1 - ) # Ohmic heating Power (MW) - totalplasma = ( - p_fusion_total_mw + p_hcd_injected_total_mw + p_plasma_ohmic_mw - ) # Total Power in plasma (MW) - - # Used in [DEPOSITION] - p_plasma_rad_mw = m_file.get( - "p_plasma_rad_mw", scan=-1 - ) # Total radiation Power (MW) - f_ster_div_single = m_file.get( - "f_ster_div_single", scan=-1 - ) # Area fraction taken up by divertor - fdiv_2 = m_file.get( - "2*f_ster_div_single", scan=-1 - ) # Area fraction taken up by double null divertor + variables = [ + # Used in [PLASMA] + "p_fusion_total_mw", # Fusion Power (MW) + "p_hcd_injected_total_mw", # Total auxiliary injected Power (MW) + "p_plasma_ohmic_mw", # Ohmic heating Power (MW) + # Used in [DEPOSITION] + "p_plasma_rad_mw", # Total radiation Power (MW) + "f_ster_div_single", # Area fraction taken up by divertor + "2*f_ster_div_single", # Area fraction taken up by double null divertor + "f_a_fw_outboard_hcd", # Area fraction covered by HCD and diagnostics + "p_plasma_separatrix_mw", # power to conducted to the divertor region (MW) + "p_div_nuclear_heat_total_mw", # nuclear heating in the divertor (MW) + "p_fw_nuclear_heat_total_mw", # nuclear heating in the first wall (MW) + "p_blkt_nuclear_heat_total_mw", # nuclear heating in the blanket (MW) + "p_shld_nuclear_heat_mw", # nuclear heating in the shield (MW) + "p_cp_shield_nuclear_heat_mw", # nuclear heating in the CP shield (MW) + "p_blkt_multiplication_mw", # Blanket energy multiplication (MW) + "p_alpha_total_mw", # Alpha power (MW) + "f_p_alpha_plasma_deposited", # Fraction of alpha power deposited in plasma + "itart", # switch for spherical tokamak (ST) models + # Used in [BLANKETSETC] + "p_fw_blkt_heat_deposited_mw", # Heat for electricity (MW) + "p_fw_blkt_coolant_pump_mw", # 1st wall & blanket pumping (MW) + # Used in [PRIMARY] + "p_plant_electric_gross_mw", # gross electric power (MW) + # Used in [NET] + "p_plant_electric_net_mw", # net electric power (MW) + # Used in [RECIRC] + "p_cryo_plant_electric_mw", # cryogenic plant power (MW) + "fachtmw", # facility heat removal (MW) + "p_tf_electric_supplies_mw", # total steady state TF coil AC power demand (MW) + "p_tritium_plant_electric_mw", # power required for tritium processing (MW) + "vachtmw", # vacuum pump power (MW) + "p_pf_electric_supplies_mw", # Total mean wall plug power for PFC & CS (MW) + "p_hcd_electric_total_mw", # injector wall plug power (MW) + "p_coolant_pump_elec_total_mw", # heat transport system electrical pump power (MW) + "p_cp_coolant_pump_elec", # pumping power + ] + ( + p_fusion_total_mw, + p_hcd_injected_total_mw, + p_plasma_ohmic_mw, + p_plasma_rad_mw, + f_ster_div_single, + fdiv_2, + f_a_fw_outboard_hcd, + p_plasma_separatrix_mw, + p_div_nuclear_heat_total_mw, + p_fw_nuclear_heat_total_mw, + p_blkt_nuclear_heat_total_mw, + p_shld_nuclear_heat_mw, + p_cp_shield_nuclear_heat_mw, + p_blkt_multiplication_mw, + p_alpha_total_mw, + f_p_alpha_plasma_deposited, + itart, + p_fw_blkt_heat_deposited_mw, + p_fw_blkt_coolant_pump_mw, + p_plant_electric_gross_mw, + p_plant_electric_net_mw, + p_plant_electric_recirc_mw, + p_cryo_plant_electric_mw, + fachtmw, + p_tf_electric_supplies_mw, + p_tritium_plant_electric_mw, + vachtmw, + p_pf_electric_supplies_mw, + p_hcd_electric_total_mw, + p_coolant_pump_elec_total_mw, + p_cp_coolant_pump_elec, + ) = m_file.get_variables(*variables, scan=-1) + + p_cp_coolant_pump_elec_mw = p_cp_coolant_pump_elec / 1e6 + + # Total Power in plasma (MW) + totalplasma = p_fusion_total_mw + p_hcd_injected_total_mw + p_plasma_ohmic_mw + if fdiv_2 > 0: # Takes into account old MFILE representation of double null divertor f_ster_div_single = fdiv_2 - p_div_rad_total_mw = ( - p_plasma_rad_mw * f_ster_div_single - ) # Radiation deposited on the divertor (MW) - f_a_fw_outboard_hcd = m_file.get( - "f_a_fw_outboard_hcd", scan=-1 - ) # Area fraction covered by HCD and diagnostics - p_fw_hcd_rad_total_mw = ( - p_plasma_rad_mw * f_a_fw_outboard_hcd - ) # Radiation deposited on HCD and diagnostics (MW) - p_fw_rad_total_mw = ( - p_plasma_rad_mw - p_div_rad_total_mw - p_fw_hcd_rad_total_mw - ) # Radiation deposited in the blanket (MW) - p_plasma_separatrix_mw = m_file.get( - "p_plasma_separatrix_mw", scan=-1 - ) # power to conducted to the divertor region (MW) - p_div_nuclear_heat_total_mw = m_file.get( - "p_div_nuclear_heat_total_mw", scan=-1 - ) # nuclear heating in the divertor (MW) - p_fw_nuclear_heat_total_mw = m_file.get( - "p_fw_nuclear_heat_total_mw", scan=-1 - ) # nuclear heating in the first wall (MW) - p_blkt_nuclear_heat_total_mw = m_file.get( - "p_blkt_nuclear_heat_total_mw", scan=-1 - ) # nuclear heating in the blanket (MW) - p_shld_nuclear_heat_mw = m_file.get( - "p_shld_nuclear_heat_mw", scan=-1 - ) # nuclear heating in the shield (MW) - p_cp_shield_nuclear_heat_mw = m_file.get( - "p_cp_shield_nuclear_heat_mw", scan=-1 - ) # nuclear heating in the CP shield (MW) - p_blkt_multiplication_mw = m_file.get( - "p_blkt_multiplication_mw", scan=-1 - ) # Blanket energy multiplication (MW) - p_alpha_total_mw = m_file.get("p_alpha_total_mw", scan=-1) # Alpha power (MW) - f_p_alpha_plasma_deposited = m_file.get( - "f_p_alpha_plasma_deposited", scan=-1 - ) # Fraction of alpha power deposited in plasma - p_fw_alpha_mw = p_alpha_total_mw * ( - 1 - f_p_alpha_plasma_deposited - ) # Alpha power hitting 1st wall (MW) - itart = m_file.get("itart", scan=-1) # switch for spherical tokamak (ST) models + + # Radiation deposited on the divertor (MW) + p_div_rad_total_mw = p_plasma_rad_mw * f_ster_div_single + # Radiation deposited on HCD and diagnostics (MW) + p_fw_hcd_rad_total_mw = p_plasma_rad_mw * f_a_fw_outboard_hcd + # Radiation deposited in the blanket (MW) + p_fw_rad_total_mw = p_plasma_rad_mw - p_div_rad_total_mw - p_fw_hcd_rad_total_mw + # Alpha power hitting 1st wall (MW) + p_fw_alpha_mw = p_alpha_total_mw * (1 - f_p_alpha_plasma_deposited) # Power deposited on divertor (MW) totaldivetc = ( @@ -926,48 +959,11 @@ def plot_sankey(mfilename="MFILE.DAT"): # Plot simplified power flow Sankey Dia # Power deposited in CP (MW) totalcpetc = p_cp_shield_nuclear_heat_mw - # Used in [BLANKETSETC] - p_fw_blkt_heat_deposited_mw = m_file.get( - "p_fw_blkt_heat_deposited_mw", scan=-1 - ) # Heat for electricity (MW) - p_fw_blkt_coolant_pump_mw = m_file.get( - "p_fw_blkt_coolant_pump_mw", scan=-1 - ) # 1st wall & blanket pumping (MW) - pthermmw_p = ( - p_fw_blkt_heat_deposited_mw - p_fw_blkt_coolant_pump_mw - ) # Heat - pumping power (MW) - - # Used in [PRIMARY] - p_plant_electric_gross_mw = m_file.get( - "p_plant_electric_gross_mw", scan=-1 - ) # gross electric power (MW) - - # Used in [NET] - p_plant_electric_net_mw = m_file.get( - "p_plant_electric_net_mw", scan=-1 - ) # net electric power (MW) - p_plant_electric_recirc_mw = ( - p_plant_electric_gross_mw - p_plant_electric_net_mw - ) # Recirculating power (MW) - - # Used in [RECIRC] - p_cryo_plant_electric_mw = m_file.get( - "p_cryo_plant_electric_mw", scan=-1 - ) # cryogenic plant power (MW) - fachtmw = m_file.get("fachtmw", scan=-1) # facility heat removal (MW) - p_tf_electric_supplies_mw = m_file.get( - "p_tf_electric_supplies_mw", scan=-1 - ) # total steady state TF coil AC power demand (MW) - p_tritium_plant_electric_mw = m_file.get( - "p_tritium_plant_electric_mw", scan=-1 - ) # power required for tritium processing (MW) - vachtmw = m_file.get("vachtmw", scan=-1) # vacuum pump power (MW) - p_pf_electric_supplies_mw = m_file.get( - "p_pf_electric_supplies_mw", scan=-1 - ) # Total mean wall plug power for PFC & CS (MW) - p_cp_coolant_pump_elec_mw = ( - m_file.get("p_cp_coolant_pump_elec", scan=-1) / 1e6 - ) # Set pumping power to MW by dividing by 1e6 + # Heat - pumping power (MW) + pthermmw_p = p_fw_blkt_heat_deposited_mw - p_fw_blkt_coolant_pump_mw + + # Recirculating power (MW) + p_plant_electric_recirc_mw = p_plant_electric_gross_mw - p_plant_electric_net_mw # Energy required for rest of power plant (MW) p_plant_core_systems_elec_mw = ( @@ -979,12 +975,6 @@ def plot_sankey(mfilename="MFILE.DAT"): # Plot simplified power flow Sankey Dia + p_pf_electric_supplies_mw + p_cp_coolant_pump_elec_mw ) - p_hcd_electric_total_mw = m_file.get( - "p_hcd_electric_total_mw", scan=-1 - ) # injector wall plug power (MW) - p_coolant_pump_elec_total_mw = m_file.get( - "p_coolant_pump_elec_total_mw", scan=-1 - ) # heat transport system electrical pump power (MW) # Initialising x and y variables for adjusting 'Plasma Heating' branch tip location x_adj, y_adj = 0, 0 @@ -1163,7 +1153,7 @@ def plot_sankey(mfilename="MFILE.DAT"): # Plot simplified power flow Sankey Dia labels=[None, "Plasma Heating", "Losses"], ) - # Colelcting Sankey diagram and applying a condensed layout + # Collecting Sankey diagram and applying a condensed layout diagrams = sankey.finish() fig.tight_layout() @@ -1251,3 +1241,9 @@ def plot_sankey(mfilename="MFILE.DAT"): # Plot simplified power flow Sankey Dia * ((p_hcd_electric_total_mw - p_hcd_injected_total_mw) / totalplasma) - 0.2, )) + + # Get directory of mfile + fig.savefig(mfilename.parent / f"SankeyPowerFlow.{format_}") + + plt.show() + return fig diff --git a/process/main.py b/process/main.py index 3e5099698..574697271 100644 --- a/process/main.py +++ b/process/main.py @@ -54,8 +54,6 @@ from process.core.io.mfile import mfile from process.core.io.plot import plot_proc from process.core.io.plot.sankey import plot_plotly_sankey - -# For VaryRun from process.core.io.process_config import RunProcessConfig from process.core.io.process_funcs import ( check_input_error, @@ -228,7 +226,7 @@ def process_cli( mfile_str = mfile_path.resolve().as_posix() print(f"Plotting mfile {mfile_str}") plot_proc.setup_plot(mfile_path) - plot_plotly_sankey.main(args=["-m", mfile_str]) + plot_sankey_plotly(mfile_path) else: logger.error(f"Cannot find mfile for plotting {mfile_path}") From e0d06afbda101ad83d076e17e29818b88fdc27cc Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Tue, 24 Feb 2026 16:38:50 +0000 Subject: [PATCH 05/17] fix costs --- process/core/io/plot/costs/cli.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/process/core/io/plot/costs/cli.py b/process/core/io/plot/costs/cli.py index d282c52c7..f1ed3df54 100644 --- a/process/core/io/plot/costs/cli.py +++ b/process/core/io/plot/costs/cli.py @@ -5,7 +5,7 @@ import process.io.mfile.mfile as mf from process.io.plot.costs.costs_bar import cost_comp_1990, cost_comp_2014 from process.io.plot.costs.costs_pie import new_cost_model, orig_cost_model -from process.io.tools import mfile_arg, mfile_opt, save +from process.io.tools import mfile_arg, save save = save("Save figure") @@ -16,20 +16,20 @@ def costs(): @costs.command("pie", no_args_is_help=True) -@mfile_opt(exists=True) +@mfile_arg @save -def pie_plot(mfile, save): +def pie_plot(mfiles, save): """Displays the cost breakdown as a pie chart.""" + for m_file in mfiles: + m_file = mf.MFile(m_file) - m_file = mf.MFile(mfile) - - # Check which cost model is being used - if "c21" in m_file.data: - orig_cost_model(m_file, save) - elif "s01" in m_file.data: - new_cost_model(m_file, save) - else: - print("ERROR: Cannot identify cost data, check MFILE!") + # Check which cost model is being used + if "c21" in m_file.data: + orig_cost_model(m_file, save) + elif "s01" in m_file.data: + new_cost_model(m_file, save) + else: + print("ERROR: Cannot identify cost data, check MFILE!") @costs.command("bar", no_args_is_help=True) @@ -42,13 +42,13 @@ def pie_plot(mfile, save): help="Inflation Factor (multiplies costs)", default=1.0, ) -def bar_plot(mfile, save, inflate): +def bar_plot(mfiles, save, inflate): """Displays the cost breakdown as a bar chart. Multiple MFILEs can be given and will be plotted on the same chart. """ # Get file names - mfile_list = [mf.MFile(filename=item) for item in mfile] + mfile_list = [mf.MFile(filename=item) for item in mfiles] # Check which cost model is being used if "c21" in mfile_list[0].data: From 69e8cde1d19e1fe3f4b1b339e9d94dd2f2756ad4 Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Wed, 25 Feb 2026 10:19:54 +0000 Subject: [PATCH 06/17] fix some bugs --- process/core/io/plot/cli.py | 2 +- process/core/io/plot/plot_proc.py | 40 ++++++++++-------------- process/core/io/plot/plot_stress_tf.py | 43 ++++++++++++++------------ 3 files changed, 41 insertions(+), 44 deletions(-) diff --git a/process/core/io/plot/cli.py b/process/core/io/plot/cli.py index e68c94c3d..5e7c0331e 100644 --- a/process/core/io/plot/cli.py +++ b/process/core/io/plot/cli.py @@ -234,7 +234,7 @@ def plot_scans_cli( "-p", "--plot-selec", multiple=True, - default="all", + default=["all"], type=click.Choice(["all", "sig", "disp", "strain", "sm_sig"]), help="""\b Plot selection string : diff --git a/process/core/io/plot/plot_proc.py b/process/core/io/plot/plot_proc.py index 3001b18ac..da1a9f5e1 100644 --- a/process/core/io/plot/plot_proc.py +++ b/process/core/io/plot/plot_proc.py @@ -27,21 +27,13 @@ from scipy.interpolate import interp1d import process.core.constants as constants -import process.core.io.mfile as mf +import process.core.io.mfile.mfile as mf import process.data_structure.pfcoil_variables as pfcoil_variables -<<<<<<<< HEAD:process/core/io/plot_proc.py -======== -import process.io.mfile.mfile as mf ->>>>>>>> 71bdc991 (Overhall CI):process/core/io/plot/plot_proc.py import process.models.physics.confinement_time as confine import process.models.tfcoil.superconducting as sctf -from process.core.io.mfile import MFileErrorClass +from process.core.io.mfile.mfile import MFileErrorClass from process.core.solver.objectives import OBJECTIVE_NAMES from process.data_structure import impurity_radiation_module, physics_variables -<<<<<<<< HEAD:process/core/io/plot_proc.py -======== -from process.io.mfile.mfile import MFileErrorClass ->>>>>>>> 71bdc991 (Overhall CI):process/core/io/plot/plot_proc.py from process.models.build import Build from process.models.geometry.blanket import ( blanket_geometry_double_null, @@ -405,7 +397,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi # =========================================== # Load the plasma image - with resources.path("process.core.io", "plasma.png") as img_path: + with resources.path("process.core.io.plot.images", "plasma.png") as img_path: plasma = mpimg.imread(img_path.open("rb")) # Display the plasma image over the figure, not the axes @@ -427,7 +419,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi fontsize=11, ) # Load the neutron image - with resources.path("process.core.io", "neutron.png") as img_path: + with resources.path("process.core.io.plot.images", "neutron.png") as img_path: neutron = mpimg.imread(img_path.open("rb")) new_ax = axis.inset_axes( @@ -573,7 +565,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi ) # Load the alpha particle image - with resources.path("process.core.io", "alpha_particle.png") as img_path: + with resources.path("process.core.io.plot.images", "alpha_particle.png") as img_path: alpha = mpimg.imread(img_path.open("rb")) # Display the alpha particle image over the figure, not the axes @@ -659,7 +651,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi ) # Load the HCD injector image - with resources.path("process.core.io", "hcd_injector.png") as img_path: + with resources.path("process.core.io.plot.images", "hcd_injector.png") as img_path: hcd_injector_1 = hcd_injector_2 = mpimg.imread(img_path.open("rb")) # Display the injector image over the figure, not the axes @@ -927,7 +919,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi # =========================================== # Load the turbine image - with resources.path("process.core.io", "turbine.png") as img_path: + with resources.path("process.core.io.plot.images", "turbine.png") as img_path: turbine = mpimg.imread(img_path.open("rb")) # Display the turbine image over the figure, not the axes @@ -983,7 +975,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi ) # Load the generator image - with resources.path("process.core.io", "generator.png") as img_path: + with resources.path("process.core.io.plot.images", "generator.png") as img_path: generator = mpimg.imread(img_path.open("rb")) # Display the generator image over the figure, not the axes @@ -1052,7 +1044,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi ) # Load the pylon image - with resources.path("process.core.io", "pylon.png") as img_path: + with resources.path("process.core.io.plot.images", "pylon.png") as img_path: pylon = mpimg.imread(img_path.open("rb")) # Display the pylon image over the figure, not the axes @@ -1261,7 +1253,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi # ================================ # Load the first wall image - with resources.path("process.core.io", "fw.png") as img_path: + with resources.path("process.core.io.plot.images", "fw.png") as img_path: fw = mpimg.imread(img_path.open("rb")) # Display the first wall image over the figure, not the axes @@ -1517,7 +1509,9 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi ) # Load the blanket image - with resources.path("process.core.io", "blanket_with_coolant.png") as img_path: + with resources.path( + "process.core.io.plot.images", "blanket_with_coolant.png" + ) as img_path: blanket = mpimg.imread(img_path.open("rb")) # Display the blanket image over the figure, not the axes @@ -1560,7 +1554,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi ) # Load the vacuum vessel image - with resources.path("process.core.io", "vv.png") as img_path: + with resources.path("process.core.io.plot.images", "vv.png") as img_path: vv = mpimg.imread(img_path.open("rb")) # Display the vacuum vessel image over the figure, not the axes @@ -1664,7 +1658,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi ) # Load the divertor image - with resources.path("process.core.io", "divertor.png") as img_path: + with resources.path("process.core.io.plot.images", "divertor.png") as img_path: divertor = mpimg.imread(img_path.open("rb")) # Display the divertor image over the figure, not the axes @@ -2792,7 +2786,7 @@ def plot_main_plasma_information( # Load the neutron image with resources.path( - "process.core.io", "alpha_particle.png" + "process.core.io.plot.images", "alpha_particle.png" ) as alpha_particle_image_path: # Use importlib.resources to locate the image alpha_particle = mpimg.imread(alpha_particle_image_path.open("rb")) @@ -2827,7 +2821,7 @@ def plot_main_plasma_information( ) # ========================================= - with resources.path("process.core.io", "neutron.png") as neutron_image_path: + with resources.path("process.core.io.plot.images", "neutron.png") as neutron_image_path: neutron = mpimg.imread(neutron_image_path.open("rb")) new_ax = axis.inset_axes( [0.975, 0.75, 0.075, 0.075], transform=axis.transAxes, zorder=10 diff --git a/process/core/io/plot/plot_stress_tf.py b/process/core/io/plot/plot_stress_tf.py index ce9930471..da82c2be1 100644 --- a/process/core/io/plot/plot_stress_tf.py +++ b/process/core/io/plot/plot_stress_tf.py @@ -35,8 +35,8 @@ def plot_stress( plot_selection, save_format, axis_font_size, - input_file, term_output, + input_file, plot_conf: StressPlotConfig | dict | None = None, ): if plot_conf is None: @@ -46,10 +46,10 @@ def plot_stress( # Boolean swiches for plot selection # ----------------------------------- - plot_sig = ("sig" in plot_selection) or plot_selection == "all" - plot_disp = ("disp" in plot_selection) or plot_selection == "all" - plot_strain = ("strain" in plot_selection) or plot_selection == "all" - plot_sm_sig = ("sm_sig" in plot_selection) or plot_selection == "all" + plot_sig = ("sig" in plot_selection) or ("all" in plot_selection) + plot_disp = ("disp" in plot_selection) or ("all" in plot_selection) + plot_strain = ("strain" in plot_selection) or ("all" in plot_selection) + plot_sm_sig = ("sm_sig" in plot_selection) or ("all" in plot_selection) # Step 1 : Data extraction # ---------------------------------------------------------------------------------------------- @@ -67,21 +67,24 @@ def plot_stress( # Assumes n_layers >= 1 # Physical quantities : full vectors - radius = [[] * n_layers] - radial_smeared_stress = [[] * n_layers] - toroidal_smeared_stress = [[] * n_layers] - vertical_smeared_stress = [[] * n_layers] - tresca_smeared_stress = [[] * n_layers] - radial_stress = [[] * n_layers] - toroidal_stress = [[] * n_layers] - vertical_stress = [[] * n_layers] - vm_stress = [[] * n_layers] - tresca_stress = [[] * n_layers] - cea_tresca_stress = [[] * n_layers] - radial_strain = [[] * n_layers] - toroidal_strain = [[] * n_layers] - vertical_strain = [[] * n_layers] - radial_displacement = [[] * n_layers] + def _empty_lists(n_l): + return [[] for _ in range(n_l)] + + radius = _empty_lists(n_layers) + radial_smeared_stress = _empty_lists(n_layers) + toroidal_smeared_stress = _empty_lists(n_layers) + vertical_smeared_stress = _empty_lists(n_layers) + tresca_smeared_stress = _empty_lists(n_layers) + radial_stress = _empty_lists(n_layers) + toroidal_stress = _empty_lists(n_layers) + vertical_stress = _empty_lists(n_layers) + vm_stress = _empty_lists(n_layers) + tresca_stress = _empty_lists(n_layers) + cea_tresca_stress = _empty_lists(n_layers) + radial_strain = _empty_lists(n_layers) + toroidal_strain = _empty_lists(n_layers) + vertical_strain = _empty_lists(n_layers) + radial_displacement = _empty_lists(n_layers) # Physical quantity : WP stress wp_vertical_stress = [] From 6a2fd1bbd70cb7800067cbfc36844968cecf74bc Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Wed, 25 Feb 2026 10:53:30 +0000 Subject: [PATCH 07/17] sankey fix --- process/core/io/plot/cli.py | 19 +++++++++++---- .../{sankey/sankey_funcs.py => sankey.py} | 1 - process/core/io/plot/sankey/cli.py | 24 ------------------- process/main.py | 2 +- 4 files changed, 16 insertions(+), 30 deletions(-) rename process/core/io/plot/{sankey/sankey_funcs.py => sankey.py} (99%) delete mode 100644 process/core/io/plot/sankey/cli.py diff --git a/process/core/io/plot/cli.py b/process/core/io/plot/cli.py index 5e7c0331e..5af14f767 100644 --- a/process/core/io/plot/cli.py +++ b/process/core/io/plot/cli.py @@ -5,20 +5,31 @@ from process.io.plot.plot_proc import setup_plot from process.io.plot.plot_scans import plot_scan from process.io.plot.plot_stress_tf import plot_stress +from process.io.plot.sankey import plot_sankey, plot_sankey_plotly from process.io.tools import LazyGroup, mfile_arg, mfile_opt, split_callback @click.group( cls=LazyGroup, - lazy_subcommands={ - "costs": "process.io.plot.costs.cli.costs", - "sankey": "process.io.plot.sankey.cli.sankey", - }, + lazy_subcommands={"costs": "process.io.plot.costs.cli.costs"}, ) def plot(): """Plotting utilities for PROCESS""" +@plot.command("sankey", no_args_is_help=True) +@mfile_opt(exists=True) +@click.option("-fmt", "--format", "format_", default="pdf", help="file format []") +def sankey(mfile, format_): + """Plot the power flow in PROCESS using a Sankey diagram.""" + if format_ in {"html", "plotly"}: + out = plot_sankey_plotly(mfile) + if out is not None: + return out + + return plot_sankey(mfile, format_) + + @plot.command("scans", no_args_is_help=True) @mfile_arg # At least one output variable must be supplied in order to plot diff --git a/process/core/io/plot/sankey/sankey_funcs.py b/process/core/io/plot/sankey.py similarity index 99% rename from process/core/io/plot/sankey/sankey_funcs.py rename to process/core/io/plot/sankey.py index 7533402e4..ea3c9062a 100644 --- a/process/core/io/plot/sankey/sankey_funcs.py +++ b/process/core/io/plot/sankey.py @@ -909,7 +909,6 @@ def plot_sankey( p_fw_blkt_coolant_pump_mw, p_plant_electric_gross_mw, p_plant_electric_net_mw, - p_plant_electric_recirc_mw, p_cryo_plant_electric_mw, fachtmw, p_tf_electric_supplies_mw, diff --git a/process/core/io/plot/sankey/cli.py b/process/core/io/plot/sankey/cli.py deleted file mode 100644 index 817c9e0c8..000000000 --- a/process/core/io/plot/sankey/cli.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Code to display the power flow of a PROCESS run in a Sankey diagram - -Input file: -MFILE.DAT -""" - -import click - -from process.io.plot.sankey.sankey_funcs import plot_sankey, plot_sankey_plotly -from process.io.tools import mfile_opt - - -@click.command("sankey", no_args_is_help=True) -@mfile_opt -@click.option("-fmt", "--format", "format_", default="pdf", help="file format []") -def sankey(mfile, format_): - """Plot the power flow in PROCESS using a Sankey diagram.""" - if format_ in {"html", "plotly"}: - out = plot_sankey_plotly(mfile) - if out is not None: - return out - - return plot_sankey(mfile, format_) diff --git a/process/main.py b/process/main.py index 574697271..361a6c5ce 100644 --- a/process/main.py +++ b/process/main.py @@ -51,7 +51,7 @@ import process.data_structure as data_structure from process.core import constants from process.core.io import obsolete_vars as ov -from process.core.io.mfile import mfile +from process.core.io.mfile.mfile import mfile from process.core.io.plot import plot_proc from process.core.io.plot.sankey import plot_plotly_sankey from process.core.io.process_config import RunProcessConfig From d675f7f147ac3a5fe9c6d568373974055881c9a0 Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Thu, 26 Feb 2026 16:34:02 +0000 Subject: [PATCH 08/17] rewrite sankey --- process/core/io/plot/cli.py | 2 +- process/core/io/plot/sankey.py | 978 ++++++++++++--------------------- 2 files changed, 350 insertions(+), 630 deletions(-) diff --git a/process/core/io/plot/cli.py b/process/core/io/plot/cli.py index 5af14f767..46824e277 100644 --- a/process/core/io/plot/cli.py +++ b/process/core/io/plot/cli.py @@ -27,7 +27,7 @@ def sankey(mfile, format_): if out is not None: return out - return plot_sankey(mfile, format_) + return plot_sankey(mfile) @plot.command("scans", no_args_is_help=True) diff --git a/process/core/io/plot/sankey.py b/process/core/io/plot/sankey.py index ea3c9062a..7cdb99975 100644 --- a/process/core/io/plot/sankey.py +++ b/process/core/io/plot/sankey.py @@ -2,12 +2,15 @@ Library of Sankey plotting routine """ +from collections.abc import Iterable +from copy import deepcopy from pathlib import Path import matplotlib.pyplot as plt import numpy as np from matplotlib.sankey import Sankey from numpy import sqrt +from scipy.optimize import minimize from process.core.io.mfile.mfile import MFile @@ -363,483 +366,199 @@ def plotly(sankey_dict, m_file): "margin": {"l": 40, "r": 40, "t": 40, "b": 40}, }) # Strip 'MFILE' from the filename for the HTML output - html_output_path = m_file.with_stem( - m_file.stem.replace("MFILE", "plotly_sankey") - ).with_suffix(".html") + html_output_path = ( + Path(m_file) + .with_stem(Path(m_file).stem.replace("MFILE", "plotly_sankey")) + .with_suffix(".html") + ) fig.write_html(str(html_output_path)) print(f"Interactive Sankey diagram saved to {html_output_path}") return fig -def plot_full_sankey( - mfilename="MFILE.DAT", -): # Plots the power flow from PROCESS as a Sankey Diagram - # ------------------------------- Pulling values from the MFILE ------------------------------- - - m_file = MFile(mfilename) - variables = [ - # Used in [PLASMA] - "p_fusion_total_mw", # Fusion Power (MW) - "p_hcd_injected_total_mw", # Total auxiliary injected Power (MW) - "p_plasma_ohmic_mw", # Ohmic heating Power (MW) - "p_neutron_total_mw", # Neutron fusion power (MW) - "p_non_alpha_charged_mw", # Non-alpha charged particle power (MW) - "p_alpha_total_mw", # Alpha power (MW) - # Used in [NEUTRONICS] - "p_blkt_multiplication_mw", # Energy multiplication in blanket (MW) - "p_blkt_nuclear_heat_total_mw", # Total Nuclear heating in the blanket (MW) - "p_div_nuclear_heat_total_mw", # Nuclear heating in the divertor (MW) - "p_fw_nuclear_heat_total_mw", # Nuclear heating in the first wall (MW) - "p_shld_nuclear_heat_mw", # Nuclear heating in the shield (MW) - "p_tf_nuclear_heat_mw", # Nuclear heating in the TF coil (MW) - # Used in [CHARGEP] - "p_plasma_separatrix_mw", # Charged particle power deposited on divertor (MW) - "f_p_alpha_plasma_deposited", # Fraction of alpha power deposited in plasma - "p_plasma_rad_mw", # Total radiation Power (MW) - # Used in [RADIATION] - "f_ster_div_single" - "f_a_fw_outboard_hcd" - # Used in [DIVERTOR] - "p_div_coolant_pump_mw", # Divertor coolant pumping power - "p_div_heat_deposited_mw", # Total power extracted from divertor (MW) - # Used in [FIRST_WALL] - "p_fw_blkt_heat_deposited_mw", # Power extracted blanket & FW (MW) - "p_fw_blkt_coolant_pump_mw", # Pump Power in FW and blanket (MW) - ] - ( - p_fusion_total_mw, - p_hcd_injected_total_mw, - p_plasma_ohmic_mw, - p_neutron_total_mw, - p_non_alpha_charged_mw, - p_alpha_total_mw, - p_blkt_multiplication_mw, - p_blkt_nuclear_heat_total_mw, - p_div_nuclear_heat_total_mw, - p_fw_nuclear_heat_total_mw, - p_shld_nuclear_heat_mw, - p_tf_nuclear_heat_mw, - p_plasma_separatrix_mw, - f_p_alpha_plasma_deposited, - p_plasma_rad_mw, - f_ster_div_single, - f_a_fw_outboard_hcd, - p_div_coolant_pump_mw, - p_div_heat_deposited_mw, - p_fw_blkt_heat_deposited_mw, - p_fw_blkt_coolant_pump_mw, - ) = m_file.get_variables(*variables, scan=-1) - - # Used in [PLASMA] - # Total Power in plasma (MW) - totalplasma = p_fusion_total_mw + p_hcd_injected_total_mw + p_plasma_ohmic_mw - # The ohmic and charged particle power (MW) - pcharohmmw = p_non_alpha_charged_mw + p_plasma_ohmic_mw - # Alpha particle and HC&D power (MW) - palpinjmw = p_alpha_total_mw + p_hcd_injected_total_mw - - # Used in [NEUTRONICS] - # External nuclear heating in blanket (MW) - pnucemblkt = p_blkt_nuclear_heat_total_mw - p_blkt_multiplication_mw - - # Used in [CHARGEP] - # Alpha particles hitting first wall (MW) - p_fw_alpha_mw = p_alpha_total_mw * (1 - f_p_alpha_plasma_deposited) - - # Used in [RADIATION] - # Radiation deposited on the divertor (MW) - p_div_rad_total_mw = p_plasma_rad_mw * f_ster_div_single - # Radiation deposited on HCD (MW) - p_fw_hcd_rad_total_mw = p_plasma_rad_mw * f_a_fw_outboard_hcd - # Radiation deposited in the FW (MW) - p_fw_rad_total_mw = p_plasma_rad_mw - p_div_rad_total_mw - p_fw_hcd_rad_total_mw - - # Used in [FIRST_WALL] - htpmwblkt = p_fw_blkt_coolant_pump_mw / 2 # Pump power in blanket (MW) - htpmwfw = p_fw_blkt_coolant_pump_mw / 2 # Pump power in FW (MW) - p_fw_heat_deposited_mw = ( - p_fw_blkt_heat_deposited_mw - htpmwblkt - p_blkt_nuclear_heat_total_mw - ) # Power extracted 1st wall (MW) - # porbitloss = m_file.data['porbitloss'].get_scan(-1) # Charged P. on FW before thermalising - # p_beam_shine_through_mw = m_file.data['p_beam_shine_through_mw'].get_scan(-1) # Injection shine-through to 1st wall - - # Initialising x and y variables for adjusting 'Plasma Heating' branch tip location - y_adj_1 = 0 - y_adj_2 = 0 - - # Loop 1 to get 'Plasma Heating' branch tip coords; loop 2 to match 'PLASMA' branch - for _ in range(2): - # The visual settings of the Sankey Plot - plt.rcParams.update({"font.size": 9}) - fig = plt.figure() - ax = fig.add_subplot(1, 1, 1, xticks=[], yticks=[], frameon=False) - sankey = Sankey( - ax=ax, unit="MW", margin=0.5, format="%1.0f", scale=1.0 / (totalplasma) - ) - - # --------------------------------------- PLASMA - 0 -------------------------------------- - - # Fusion, Injected, Ohmic, -Charged P.-Ohmic, -Alphas-Injected, -Neutrons - plasma = [ - p_fusion_total_mw, - p_hcd_injected_total_mw, - p_plasma_ohmic_mw, - -pcharohmmw, - -palpinjmw, - -p_neutron_total_mw, - ] - sankey.add( - flows=plasma, - # [left(in), down(in), down(in), up(out), up(out), right(out)] - orientations=[0, -1, -1, 1, 1, 0], - trunklength=0.5, - pathlengths=[0.5, 0.25, 0.25, 0.75, 0.25 + 0.5 * y_adj_1, 0.0], - # labels=["Fusion","H&CD", "Ohmic", "Charged P.", "Alphas", "Neutrons"]) - labels=[None, None, None, None, None, None], - ) - - # Check to see if the fusion components balance - if _ == 0 and sqrt(sum(plasma) ** 2) > 0.1: - print("FUSION power balance =", sum(plasma), "\n") - exit() - - if _ == 1: - print(sankey.finish()[0]) - if _ == 1: - print(sankey.finish()[0].patch) - if _ == 1: - print(type(sankey.finish()[0].patch)) - - # ------------------------------------- NEUTRONICS - 1 ------------------------------------ - - # Neutrons, -Divertor, -1st wall, -Shield, -TF coils, -Blanket+Energy Mult. - neutrons = [ - p_neutron_total_mw, - -p_div_nuclear_heat_total_mw, - -p_fw_nuclear_heat_total_mw, - -p_shld_nuclear_heat_mw, - -p_tf_nuclear_heat_mw, - -pnucemblkt, - ] - sankey.add( - flows=neutrons, - # left(in), up(out), up(out), up(out), up(out), right(out) - orientations=[0, 1, 1, 1, 1, 0], - trunklength=0.5, - pathlengths=[0.3, 0.25, 0.25, 0.25, 0.25, 0.15], - prior=0, # PLASMA - connect=(5, 0), # Neutrons - # labels=["Neutrons", "Divertor", "1st Wall", "Shield", "TF coils", "Blanket"]) - labels=[None, None, None, None, None, None], - ) - - # Checking to see if the neutronics components balance - if _ == 0 and sqrt(sum(neutrons) ** 2) > 0.1: - print("NEUTRONS power balance =", sum(neutrons), "\n") - exit() - - # Check to see if connections balance - if _ == 0: - check = sankey.finish() - diff1_1 = check[0].flows[5] + check[1].flows[0] - plt.close() - if diff1_1 > 0.1: - print("Neutrons [0][5] and [1][0] difference =", diff1_1) - exit() - - # --------------------------------- CHARGED PARTICLES - 2 --------------------------------- - - # Charge P.+Ohmic, Alpha+Injected, -Divertor, -1st Wall, -Photons - chargedp = [ - pcharohmmw, - palpinjmw, - -p_plasma_separatrix_mw, - -p_fw_alpha_mw, - -p_plasma_rad_mw, - ] - sankey.add( - flows=chargedp, - # down(in), down(in), up(out), up(out), right(out) - orientations=[-1, -1, 1, -1, 0], - trunklength=0.5, - pathlengths=[0.75, 0.25 + 0.5 * y_adj_1, 0.25, 0.25, 0.25], - prior=0, # PLASMA - connect=(3, 0), # Charged P.+Ohmic - # labels=["Charged P.", "Alphas", "Divertor", "1st Wall", "Photons"]) - labels=[None, None, None, None, None], - ) - - if _ == 0 and sqrt(sum(chargedp) ** 2) > 0.1: - print("CHARGEDP power balance =", sum(chargedp)) - exit() - - # Check to see if connections balance - if _ == 0: - check = sankey.finish() - diff2_1 = check[0].flows[3] + check[2].flows[0] - diff2_2 = check[0].flows[4] + check[2].flows[1] - plt.close() - if diff2_1 > 0.1: - print("Charged P.+Ohmic [0][3] and [2][0] difference =", diff2_1) - exit() - if diff2_2 > 0.1: - print("Alphas+Injected [0][4] and [2][1] difference =", diff2_2) - exit() - - # ------------------------------------- RADIATION - 3 ------------------------------------- - - # Photons, -1st Wall, -Divertor, -H&CD - radiation = [ - p_plasma_rad_mw, - -p_fw_rad_total_mw, - -p_div_rad_total_mw, - -p_fw_hcd_rad_total_mw, - ] - sankey.add( - flows=radiation, - # right(in), up(out), up(out), up(out) - orientations=[ - 0, - -1, - 1, - 1, - ], - trunklength=0.5, - pathlengths=[0.25, 0.25, 0.25, 0.25], - prior=2, # CHARGED PARTICLES - connect=(4, 0), # Charged P. - # labels=["Photons", "1st Wall", "Divertor", "H&CD"]) - labels=[None, None, None, None], - ) +class SuperSankey(Sankey): + """ + Originally from Bluemira + + A sub-class of the Sankey diagram class from matplotlib, which is capable + of connecting two blocks, instead of just one. This is done using a cute + sledgehammer approach, using optimisation. Basically, the Sankey object + is quite complex, and it makes it very hard to calculate the exact lengths + required to connect two sub-diagrams. + """ + + def add( + self, + patchlabel: str = "", + flows: Iterable[float] | None = None, + orientations: Iterable[float] | None = None, + labels: str | list[str | None] | None = "", + trunklength: float = 1.0, + pathlengths: float | list[float | None] = 0.25, + prior: int | None = None, + future: int | None = None, + connect: tuple[int, int] | list[tuple[int, int]] = (0, 0), + rotation: float = 0, + **kwargs, + ): + __doc__ = super().__doc__ # noqa: F841, A001 + # Here we first check if the "add" method has received arguments that + # the Sankey class can't handle. + if future is None: + # There is only one connection, Sankey knows how to do this + super().add( + patchlabel, + flows, + orientations, + labels, + trunklength, + pathlengths, + prior, + connect, + rotation, + **kwargs, + ) + else: + # There are two connections, use new method + self._double_connect( + patchlabel, + flows, + orientations, + labels, + trunklength, + pathlengths, + prior, + future, + connect, + rotation, + **kwargs, + ) - if _ == 0 and sqrt(sum(radiation) ** 2) > 0.1: - print("RADIATION power balance =", sum(radiation)) - exit() - - if _ == 0: - check = sankey.finish() - diff3_1 = check[2].flows[4] + check[3].flows[0] - plt.close() - if diff3_1 > 0.1: - print("Photons [2][4] and [3][0] difference =", diff3_1) - exit() - - # -------------------------------------- DIVERTOR - 4 ------------------------------------- - - # Charged P., Neutrons, Photons, Coolant Pumping, Total Divertor - divertor = [ - p_plasma_separatrix_mw, - p_div_nuclear_heat_total_mw, - p_div_rad_total_mw, - p_div_coolant_pump_mw, - -p_div_heat_deposited_mw, - ] - sankey.add( - flows=divertor, - # down(in), up(in), down(in), up(in), right(out) - orientations=[-1, -1, -1, -1, 0], - trunklength=0.5, - pathlengths=[0.25, 0.25, 0.25, 0.25 - 0.5 * y_adj_2, 0.25], - prior=2, # CHARGED PARTICLES - connect=(2, 0), # Charged P. --> None - # labels=["Charged P.", "Neutrons", "Photons", "Coolant Pumping", "Divertor Power"]) - labels=[None, None, None, None, None], + def _double_connect( + self, + patchlabel: str, + flows: Iterable[float] | None, + orientations: Iterable[float] | None, + labels: str | list[str | None] | None, + trunklength: float, + pathlengths: list[float], + prior: int | None, + future: int | None, + connect: list[tuple[int, int]], + rotation: float, + **kwargs, + ): + """ + Handles two connections in a Sankey diagram. + + Parameters + ---------- + future: + The index of the diagram to connect to + connect: + The list of (int, int) connections. + - connect[0] is a (prior, this) tuple indexing the flow of the + prior diagram and the flow of this diagram to connect. + - connect[1] is a (future, this) tuple indexing of the flow of the + future diagram and the flow of this diagram to connect. + + See Also + -------- + Sankey.add for a full description of the various args and kwargs + + """ + # Get the optimum deltas + dx, dy = self._opt_connect( + flows, orientations, prior, future, connect, trunklength=trunklength ) - - if _ == 0 and sqrt(sum(divertor) ** 2) > 0.1: - print("DIVERTOR power balance =", sum(divertor)) - exit() - - if _ == 0: - check = sankey.finish() - diff4_1 = check[1].flows[1] + check[4].flows[0] - diff4_2 = check[2].flows[3] + check[4].flows[3] - plt.close() - if diff4_1 > 0.1: - print("Neutrons [1][1] and [4][0] difference =", diff4_1) - exit() - if diff4_2 > 0.1: - print("Charged P. [2][3] and [4][3] difference =", diff4_2) - exit() - - # ---------------------------------------- 1ST WALL - 5 --------------------------------------- - - # Alphas, Neutrons, Photons, Coolant Pumping, Total 1st Wall - first_wall = [ - p_fw_alpha_mw, - p_fw_nuclear_heat_total_mw, - p_fw_rad_total_mw, - htpmwfw, - -p_fw_heat_deposited_mw, - ] - sankey.add( - flows=first_wall, - orientations=[0, -1, 1, -1, 0], - trunklength=0.5, - pathlengths=[0.25, 0.25, 0.25, 0.25, 0.25], - prior=1, - connect=(2, 1), - # labels=["Alphas", "Neutrons", "Radiation", "Coolant Pumping", "FW Power"]) - labels=[None, None, None, None, None], + # Replace + pathlengths[0] = dx + pathlengths[-1] = dy + self.add( + patchlabel=patchlabel, + labels=labels, + flows=flows, + orientations=orientations, + prior=prior, + connect=connect[0], + trunklength=trunklength, + pathlengths=pathlengths, + rotation=rotation, + facecolor=kwargs.get("facecolor"), ) - if _ == 0 and sqrt(sum(first_wall) ** 2) > 0.1: - print("FIRST_WALL power balance =", sum(first_wall)) - exit() - """# -------------------------------------- BLANKET - 6 -------------------------------------- - - # Blanket - Energy mult., Energy Mult., pumping power, Blanket - BLANKET = [pnucemblkt, p_blkt_multiplication_mw, htpmwblkt, -p_blkt_heat_deposited_mw] - sankey.add(flows=BLANKET, - # left(in), down(in), down(in), right(out) - orientations=[0, -1, -1, 0], - trunklength=0.5, - pathlengths=[0.25, 0.25, 0.25, 0.25], - #prior=1, # NEUTRONICS - #connect=(1, 0), # Blanket --> None - labels=[None, "Energy Mult.", "Coolant Pumping", "Blanket"]) - - # Checking to see if the blanket components balance - if _ == 0: - if sqrt(sum(BLANKET)**2) > 0.1: - print("BLANKET power balance =", sum(BLANKET), "\n") - exit() - - # Check to see if connections balance - if _ == 0: - check = sankey.finish() - diff = check[1].flows[1]+check[3].flows[0] - if diff > 0.1: - print("The difference between [1][1] and [3][0] =", diff) - exit()""" - """# --------------------------------------- SHIELD - 7 -------------------------------------- - - # Neutrons, Coolant pumping, Total power - SHIELD = [p_shld_nuclear_heat_mw, p_shld_coolant_pump_mw, -p_shld_heat_deposited_mw] - sankey.add(flows=SHIELD, - orientations=[-1, -1, 1], - trunklength=0.5, - pathlengths=[0.25, 0.25 ,0.25], - #prior=2, - #connect=(5, 0), - labels=["Neutrons", "Coolant Pumping", "Shield Power"]) - - if _ == 0: - if sqrt(sum(SHIELD)**2) > 0.1: - print("SHIELD power balance =", sum(SHIELD)) - exit()""" - """# ------------------------------------ PRIMARY HEAT - 7 ----------------------------------- - - # 1st wall, Blanket, Shield, Divertor, Total thermal power - HEAT = [p_fw_heat_deposited_mw, p_blkt_heat_deposited_mw, p_shld_heat_deposited_mw, p_div_heat_deposited_mw, -p_plant_primary_heat_mw] - sankey.add(flows=HEAT, - orientations=[1, 0, -1, 1, 0], - trunklength=0.5, - pathlengths=[0.25, 0.25 ,0.25, 0.25, 0.25], - #prior=2, - #connect=(5, 0), - labels=["1st Wall", "Blanket", "Shield", "Divertor", "Total Power"]) - - if _ == 0: - if sqrt(sum(HEAT)**2) > 0.1: - print("PRIMARY power balance =", sum(HEAT)) - exit()""" - """# ------------------------------- ELECTRICITY CONVERSION - 8 ------------------------------ - - # Total thermal, Elctricty conversion loss, Gross Electricity - GROSS = [p_plant_primary_heat_mw, -pelectloss, -p_plant_electric_gross_mw] - sankey.add(flows=GROSS, - orientations=[0, -1, 0], - trunklength=0.5, - pathlengths=[0.25, 0.25 ,0.25], - #prior=2, - #connect=(5, 0), - labels=["Thermal Power", "Conversion loss", "Gross Electricity"]) - - if _ == 0: - if sqrt(sum(GROSS)**2) > 0.1: - print("GROSS power balance =", sum(GROSS)) - exit()""" - - # ------------------------------ RECIRCULATED ELECTRICITY - 9 ----------------------------- - """# ---------------------------------------- HCD - 11 ---------------------------------------- - - # HCD loss + injected, -injected, -HCD loss - HCD = [p_hcd_electric_loss_mw+p_hcd_injected_total_mw, -p_hcd_injected_total_mw, -p_hcd_electric_loss_mw] - assert(sum(HCD)**2 < 0.5) - sankey.add(flows=HCD, - # [down(in), up(out), down(out)] - orientations=[-1, 1, -1], - #prior=0, # PLASMA - #connect=(1, 1), # H&CD --> None - trunklength=0.5, - pathlengths=[0.25, 0.25, 0.25], - labels=['H&CD power', None, 'H&CD loss'])""" - - fig.tight_layout() - - if _ == 0: - plt.close() - - # Matching PLASMA and CHARGED PARTICLES 'Alphas' branches - # x_adj_1, y_adj_1 = diagrams[2].tips[1] - diagrams[0].tips[4] - # Matching CHARGED PARTICLES and DIVERTOR 'Charged P.' branches - # x_adj_2, y_adj_2 = diagrams[4].tips[3] - diagrams[2].tips[3] - # x_adj_3, y_adj_3 = diagrams[3].tips[3] - diagrams[4].tips[0] - - # --------------------------------------- Label Positioning --------------------------------------- + def _opt_connect( + self, + flows: Iterable[float] | None, + orient: Iterable[float] | None, + prior: int | None, + future: int | None, + connect: list[tuple[int, int]], + trunklength: float, + ) -> tuple[float, float]: + """ + Optimises the second connection between Sankey diagrams. + + Returns + ------- + dx: + The x pathlength to use to match the tips + dy: + The y pathlength to use to match the tips + + Notes + ----- + This is because Sankey is very complicated, and makes it hard to work + out the positions of things prior to adding them to the diagrams. + Because we are bizarrely using a plotting function as a minimisation + objective, we need to make sure we clean the plot on every call. + """ + future_index, this_f_index = connect[1] + labels = [None] * len(flows) + pathlengths = [0.0] * len(flows) + + # Make a local copy of the Sankey.extent attribute to override any + # modifications during optimisation + extent = deepcopy(self.extent) + + def minimise_dxdy(x_opt): + """ + Minimisation function for the spatial difference between the target + tip and the actual tip. + + Parameters + ---------- + x_opt: array_like + The vector of d_x, d_y delta-vectors to match tip positions + + Returns + ------- + delta: float + The sum of the absolute differences + """ + tip2 = self.diagrams[future].tips[future_index] + pathlengths[0] = x_opt[0] + pathlengths[-1] = x_opt[1] + self.add( + trunklength=trunklength, + pathlengths=pathlengths, + flows=flows, + prior=prior, + connect=connect[0], + orientations=orient, + labels=labels, + facecolor="#00000000", + ) + new_tip = self.diagrams[-1].tips[this_f_index].copy() + # Clean sankey plot + self.diagrams.pop() + self.ax.patches[-1].remove() + return np.sum(np.abs(tip2 - new_tip)) - # Munipulating the positioning of the branch labels - # -ve to left and down; +ve to right and up - # pos[0] = x-axis; pos[1] = y-axis - """for d in diagrams: - y = 0 - for t in d.texts: - pos = tuple(np.ndarray.tolist(d.tips[y])) - t.set_position(pos) - if t == diagrams[0].texts[0]: # Fusion Power - t.set_horizontalalignment('right') - t.set_position((pos[0]-0.2,pos[1])) - if t == diagrams[0].texts[1]: # H&CD - t.set_horizontalalignment('right') - t.set_position((pos[0]-0.5*(p_hcd_injected_total_mw/totalplasma)-0.05,pos[1])) - if t == diagrams[0].texts[2]: # Ohmic - t.set_horizontalalignment('left') - t.set_position((pos[0]+0.5*(p_plasma_ohmic_mw/totalplasma)+0.05,pos[1])) - if t == diagrams[0].texts[3]: # Neutrons - t.set_horizontalalignment('right') - t.set_position((pos[0]-0.2,pos[1])) - if t == diagrams[0].texts[4]: # Charged Particles - t.set_horizontalalignment('right') - t.set_position((pos[0]-0.5*(p_non_alpha_charged_mw/totalplasma)-0.05,pos[1])) - if t == diagrams[0].texts[5]: # Alphas - t.set_horizontalalignment('left') - t.set_position((pos[0]+0.5*(p_alpha_total_mw/totalplasma)+0.05,pos[1]-0.1)) - if t == diagrams[1].texts[0]: # H&CD power - t.set_horizontalalignment('right') - t.set_position((pos[0]-0.5*((p_hcd_electric_loss_mw+p_hcd_injected_total_mw)/totalplasma)-0.05,pos[1])) - if t == diagrams[1].texts[2]: # H&CD losses - t.set_horizontalalignment('left') - t.set_position((pos[0]+(p_hcd_electric_loss_mw/totalplasma)+0.05,pos[1])) - if t == diagrams[2].texts[1]: # Energy Multiplication - t.set_horizontalalignment('center') - t.set_position((pos[0],pos[1]-0.2)) - if t == diagrams[2].texts[2]: # Blanket - t.set_horizontalalignment('right') - t.set_position((pos[0]-0.2,pos[1])) - if t == diagrams[2].texts[3]: # Divertor - t.set_horizontalalignment('right') - t.set_position((pos[0]-0.5*(p_div_nuclear_heat_total_mw/totalplasma)-0.1,pos[1])) - if t == diagrams[3].texts[2]: # Rad.FW - t.set_horizontalalignment('right') - t.set_position((pos[0],pos[1]+0.5*(p_fw_rad_total_mw/totalplasma)+0.15)) - if t == diagrams[3].texts[3]: # Charged P. - t.set_horizontalalignment('left') - t.set_position((pos[0]+0.5*((p_plasma_separatrix_mw+p_fw_alpha_mw)/totalplasma)+0.1,pos[1]+0.05)) - if t == diagrams[3].texts[4]: # Rad. Div. - t.set_horizontalalignment('right') - t.set_position((pos[0]-0.5*(p_div_rad_total_mw/totalplasma)-0.1,pos[1])) - y += 1""" + x0 = np.zeros(2) + result = minimize(minimise_dxdy, x0, method="SLSQP") + self.extent = extent # Finish clean-up + return result.x def plot_sankey( @@ -975,189 +694,190 @@ def plot_sankey( + p_cp_coolant_pump_elec_mw ) - # Initialising x and y variables for adjusting 'Plasma Heating' branch tip location - x_adj, y_adj = 0, 0 + # -------------------------------- Visual Settings ------------------------------------ - # Loop 1 to get 'Plasma Heating' branch tip coords; loop 2 to match 'PLASMA' branch - for _ in range(2): - # ------------------------------------ Visual Settings ------------------------------------ + plt.rcParams.update({"font.size": 9}) # Setting font size to 9 + fig = plt.figure() + ax = fig.add_subplot(1, 1, 1, xticks=[], yticks=[], frameon=False) + sankey = SuperSankey( + ax=ax, unit="MW", margin=0.0, format="%1.0f", scale=1.0 / (totalplasma) + ) + trunk = 0.7 + len1 = 0.5 + len2 = 0.8 + # --------------------------------------- PLASMA - 0 -------------------------------------- - plt.rcParams.update({"font.size": 9}) # Setting font size to 9 - fig = plt.figure() - ax = fig.add_subplot(1, 1, 1, xticks=[], yticks=[], frameon=False) - sankey = Sankey( - ax=ax, unit="MW", margin=0.0, format="%1.0f", scale=1.0 / (totalplasma) - ) + # Fusion power, Injected power + ohmic power, - total plasma power + plasma = [ + p_fusion_total_mw, + p_hcd_injected_total_mw + p_plasma_ohmic_mw, + -totalplasma, + ] + sankey.add( + flows=plasma, + orientations=[0, -1, 0], # [right(in), down(in), right(out)] + pathlengths=[ + len1, + len2, + -0.1 + len1, + ], # 'Plasma Heating' adjust + trunklength=trunk, + labels=["Fusion Power", None, "Plasma"], + ) - # --------------------------------------- PLASMA - 0 -------------------------------------- + # --------------------------------- ENERGY DEPOSITION - 1 --------------------------------- - # Fusion power, Injected power + ohmic power, - total plasma power - plasma = [ - p_fusion_total_mw, - p_hcd_injected_total_mw + p_plasma_ohmic_mw, - -totalplasma, - ] - sankey.add( - flows=plasma, - orientations=[0, -1, 0], # [right(in), down(in), right(out)] - pathlengths=[ - 0.5, - 0.8 + 0.5 * y_adj, - -0.1 + 0.5 * x_adj, - ], # 'Plasma Heating' adjust - labels=["Fusion Power", None, "Plasma"], + # Plasma power, - divertor deposited power, - blanket deposited power + deposition = [totalplasma, -totalblktetc - totaldivetc - totalcpetc] + # Check if difference >2 between plasma and divertor + blanket + if sqrt(sum(deposition) ** 2) > 2: + print( + "\ncomponents power balance difference =", + totalplasma - totaldivetc - totalblktetc - totalcpetc, ) + sankey.add( + flows=deposition, + orientations=[0, 0], # [right(in), up(in), right(out)] + prior=0, # PLASMA + connect=(2, 0), # Plasma --> None + pathlengths=[0.2, len2], # 'Plasma Heating' adjust + trunklength=trunk, + labels=[None, "Blanket/etc."], + ) - # --------------------------------- ENERGY DEPOSITION - 1 --------------------------------- + # -------------------------------------- BLANKET - 2 -------------------------------------- - # Plasma power, - divertor deposited power, - blanket deposited power - deposition = [totalplasma, -totalblktetc - totaldivetc - totalcpetc] - # Check if difference >2 between plasma and divertor + blanket - if _ == 1 and sqrt(sum(deposition) ** 2) > 2: - print( - "\ncomponents power balance difference =", - totalplasma - totaldivetc - totalblktetc - totalcpetc, - ) - sankey.add( - flows=deposition, - orientations=[0, 0], # [right(in), up(in), right(out)] - prior=0, # PLASMA - connect=(2, 0), # Plasma --> None - pathlengths=[0.2, 0.2 + 0.5 * x_adj], # 'Plasma Heating' adjust - labels=[None, "Blanket/etc."], + # Blanket deposited power, blanket energy multiplication, - primary heat + blanketsetc = [ + totalblktetc + totaldivetc + totalcpetc, + p_blkt_multiplication_mw, + -pthermmw_p - totaldivetc - totalcpetc - p_shld_nuclear_heat_mw, + ] + # Check if difference >2 between primary heat and blanket + blanket multiplication + if sqrt(sum(blanketsetc) ** 2) > 2: + print( + "blankets etc. power balance", + totalblktetc + p_blkt_multiplication_mw, + -pthermmw_p - p_shld_nuclear_heat_mw, ) + sankey.add( + flows=blanketsetc, + orientations=[0, -1, 0], # [right(in), down(in), right(out)] + prior=1, # DEPOSITION + connect=(1, 0), # Blanket/etc. --> None + pathlengths=[len1, len1 / 2, 0.0], + trunklength=trunk, + labels=[None, "Energy Mult.", "Primary Heat"], + ) - # -------------------------------------- BLANKET - 2 -------------------------------------- + # ------------------------------------- HEAT LOSS - 3 ------------------------------------- + + # Primary heat, -Gross electric power, -difference (loss) + primary = [ + pthermmw_p + totaldivetc + totalcpetc + p_shld_nuclear_heat_mw, + -p_plant_electric_gross_mw, + -pthermmw_p + + p_plant_electric_gross_mw + - totaldivetc + - totalcpetc + - p_shld_nuclear_heat_mw, + ] + sankey.add( + flows=primary, + orientations=[0, -1, 0], # [right(in), down(out), right(out)] + prior=2, # BLANKETSETC + connect=(2, 0), # Primary Heat --> None + pathlengths=[len2 / 4, len2, len1 / 2], + trunklength=trunk, + labels=[None, "Gross electric", "Losses"], + ) - # Blanket deposited power, blanket energy multiplication, - primary heat - blanketsetc = [ - totalblktetc + totaldivetc + totalcpetc, - p_blkt_multiplication_mw, - -pthermmw_p - totaldivetc - totalcpetc - p_shld_nuclear_heat_mw, + # ------------------------------------ ELECTRICITY - 4 ------------------------------------ + + # If net electric is +ve or -ve changes the flow organisation + if p_plant_electric_net_mw >= 0: # net electric is +ve + # Gross electric power, -net electric power, -recirculated power + net = [ + p_plant_electric_gross_mw, + -p_plant_electric_net_mw, + -p_plant_electric_recirc_mw, ] - # Check if difference >2 between primary heat and blanket + blanket multiplication - if _ == 1 and sqrt(sum(blanketsetc) ** 2) > 2: - print( - "blankets etc. power balance", - totalblktetc + p_blkt_multiplication_mw, - -pthermmw_p - p_shld_nuclear_heat_mw, - ) sankey.add( - flows=blanketsetc, - orientations=[0, -1, 0], # [right(in), down(in), right(out)] - prior=1, # DEPOSITION - connect=(1, 0), # Blanket/etc. --> None - pathlengths=[0.5, 0.25, 0.0], - labels=[None, "Energy Mult.", "Primary Heat"], + flows=net, + orientations=[0, 0, -1], # [down(in), down(out), left(out)] + prior=3, # PRIMARY + connect=(1, 0), # Gross electric --> None + pathlengths=[len2 / 4, len1 / 2, 3 * len1], + trunklength=trunk, + labels=[None, "Net elec.", "Recirc. Power"], ) - - # ------------------------------------- HEAT LOSS - 3 ------------------------------------- - - # Primary heat, -Gross electric power, -difference (loss) - primary = [ - pthermmw_p + totaldivetc + totalcpetc + p_shld_nuclear_heat_mw, - -p_plant_electric_gross_mw, - -pthermmw_p - + p_plant_electric_gross_mw - - totaldivetc - - totalcpetc - - p_shld_nuclear_heat_mw, + elif p_plant_electric_net_mw < 0: # net electric is -ve + # Gross electric power, -net electric power, -recirculated power + net = [ + -p_plant_electric_net_mw, + p_plant_electric_gross_mw, + -p_plant_electric_recirc_mw, ] sankey.add( - flows=primary, - orientations=[0, -1, 0], # [right(in), down(out), right(out)] - prior=2, # BLANKETSETC - connect=(2, 0), # Primary Heat --> None - pathlengths=[0.2, 0.7, 0.4], - labels=[None, "Gross electric", "Losses"], + flows=net, + orientations=[0, -1, 0], # [left(in), down(in), left(out)] + prior=3, # PRIMARY + connect=(1, 1), # Gross electric --> None + pathlengths=[len1 / 2, 2 * len1, len1], + trunklength=trunk, + labels=["Net elec.", None, "Recirc. Power"], ) - # ------------------------------------ ELECTRICITY - 4 ------------------------------------ - - # If net electric is +ve or -ve changes the flow organisation - if p_plant_electric_net_mw >= 0: # net electric is +ve - # Gross electric power, -net electric power, -recirculated power - net = [ - p_plant_electric_gross_mw, - -p_plant_electric_net_mw, - -p_plant_electric_recirc_mw, - ] - sankey.add( - flows=net, - orientations=[0, 0, -1], # [down(in), down(out), left(out)] - prior=3, # PRIMARY - connect=(1, 0), # Gross electric --> None - pathlengths=[0.1, 0.25, 1.5], - labels=[None, "Net elec.", "Recirc. Power"], - ) - elif p_plant_electric_net_mw < 0: # net electric is -ve - # Gross electric power, -net electric power, -recirculated power - net = [ - -p_plant_electric_net_mw, - p_plant_electric_gross_mw, - -p_plant_electric_recirc_mw, - ] - sankey.add( - flows=net, - orientations=[0, -1, 0], # [left(in), down(in), left(out)] - prior=3, # PRIMARY - connect=(1, 1), # Gross electric --> None - pathlengths=[0.25, 1.0, 0.5], - labels=["Net elec.", None, "Recirc. Power"], - ) + # -------------------------------- RECIRCULATING POWER - 5 -------------------------------- - # -------------------------------- RECIRCULATING POWER - 5 -------------------------------- - - # Recirculated power, -Core Systems, -Heating System - recirc = [ + # Recirculated power, -Core Systems, -Heating System + recirc = [ + p_plant_electric_recirc_mw, + -p_plant_core_systems_elec_mw - p_coolant_pump_elec_total_mw, + -p_hcd_electric_total_mw + p_cp_coolant_pump_elec_mw, + ] + # Check if difference >2 between recirculated power and the output sum + if sum(recirc) ** 2 > 2: + print( + "Recirc. Power Balance", p_plant_electric_recirc_mw, - -p_plant_core_systems_elec_mw - p_coolant_pump_elec_total_mw, - -p_hcd_electric_total_mw + p_cp_coolant_pump_elec_mw, - ] - # Check if difference >2 between recirculated power and the output sum - if sum(recirc) ** 2 > 2: - print( - "Recirc. Power Balance", - p_plant_electric_recirc_mw, - -p_plant_core_systems_elec_mw - + p_cp_coolant_pump_elec_mw - - p_hcd_electric_total_mw - - p_coolant_pump_elec_total_mw, - ) - sankey.add( - flows=recirc, - orientations=[0, 1, 0], # [left(in), down(out), left(out)] - prior=4, # NET - connect=(2, 0), # Recirc. Power --> None - pathlengths=[0.1, 0.25, 0.8], - labels=[None, "Core Systems", "Heating System"], + -p_plant_core_systems_elec_mw + + p_cp_coolant_pump_elec_mw + - p_hcd_electric_total_mw + - p_coolant_pump_elec_total_mw, ) + sankey.add( + flows=recirc, + orientations=[0, 1, 0], # [left(in), down(out), left(out)] + prior=4, # NET + connect=(2, 0), # Recirc. Power --> None + pathlengths=[0.1, len1 / 2, len2], + trunklength=trunk * 1.2, + labels=[None, "Core Systems", "Heating System"], + ) - # --------------------------------------- LOSSES - 6 -------------------------------------- - - # HCD: Heating system, -Plasma heating, -losses - hcd = [ - p_hcd_electric_total_mw - p_cp_coolant_pump_elec_mw, - -p_hcd_injected_total_mw, - -p_hcd_electric_total_mw - + p_hcd_injected_total_mw - + p_cp_coolant_pump_elec_mw, - ] - sankey.add( - flows=hcd, - orientations=[0, -1, 0], # [left(in), up(out), left(out)] - prior=5, # RECIRC - connect=(2, 0), # Heating System --> None - pathlengths=[0.5, 0.8 + 0.5 * y_adj, 0.4], # 'Plasma Heating' adjust - labels=[None, "Plasma Heating", "Losses"], - ) + # --------------------------------------- LOSSES - 6 -------------------------------------- - # Collecting Sankey diagram and applying a condensed layout - diagrams = sankey.finish() - fig.tight_layout() + # HCD: Heating system, -Plasma heating, -losses + hcd = [ + p_hcd_electric_total_mw - p_cp_coolant_pump_elec_mw, + -p_hcd_injected_total_mw, + -p_hcd_electric_total_mw + p_hcd_injected_total_mw + p_cp_coolant_pump_elec_mw, + ] + sankey.add( + flows=hcd, + orientations=[0, 0, -1], # [left(in), up(out), left(out)] + prior=5, # RECIRC + future=0, + connect=[(2, 0), (1, 2)], # Heating System --> None + pathlengths=[None, len1, None], # 'Plasma Heating' adjust + trunklength=trunk, + labels=[None, "Losses", "Plasma Heating"], + ) - # Difference in branch tip locations for 'Plasma Heating' - x_adj, y_adj = diagrams[0].tips[1] - diagrams[6].tips[1] + # Collecting Sankey diagram and applying a condensed layout + diagrams = sankey.finish() + fig.tight_layout() # --------------------------------------- Label Positioning --------------------------------------- @@ -1225,13 +945,13 @@ def plot_sankey( pos[0] + 0.15, pos[1] + 0.5 * (p_hcd_electric_total_mw / totalplasma) + 0.2, )) - if t == diagrams[6].texts[1]: # Plasma Heating + if t == diagrams[6].texts[2]: # Plasma Heating t.set_horizontalalignment("left") t.set_position(( pos[0] + 0.5 * (p_hcd_injected_total_mw / totalplasma) + 0.1, pos[1] - 0.05, )) - if t == diagrams[6].texts[2]: # Losses + if t == diagrams[6].texts[1]: # Losses t.set_horizontalalignment("left") t.set_position(( pos[0] + 0.15, From b219c5eab651483acd2151be09fa7022c97eb22d Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 11:31:30 +0000 Subject: [PATCH 09/17] rebase fixes --- process/core/io/in_dat/cli.py | 4 ++-- process/core/io/in_dat/create.py | 4 ++-- process/core/io/mfile/cli.py | 6 +++--- process/core/io/mfile/mfile_to_csv.py | 9 ++------- process/core/io/plot/cli.py | 12 ++++++------ process/core/io/plot/costs/cli.py | 8 ++++---- process/core/io/plot/costs/costs_bar.py | 2 +- process/core/io/plot/costs/costs_pie.py | 4 ---- process/core/io/plot/plot_proc.py | 13 ++++--------- process/main.py | 10 +++++----- 10 files changed, 29 insertions(+), 43 deletions(-) diff --git a/process/core/io/in_dat/cli.py b/process/core/io/in_dat/cli.py index 8c744f2ff..6e842afd8 100644 --- a/process/core/io/in_dat/cli.py +++ b/process/core/io/in_dat/cli.py @@ -1,7 +1,7 @@ import click -from process.io.in_dat.create import write_indat -from process.io.tools import indat_opt, mfile_opt +from process.core.io.in_dat.create import write_indat +from process.core.io.tools import indat_opt, mfile_opt @click.command("indat", no_args_is_help=True) diff --git a/process/core/io/in_dat/create.py b/process/core/io/in_dat/create.py index 46e5ec40b..649fea292 100644 --- a/process/core/io/in_dat/create.py +++ b/process/core/io/in_dat/create.py @@ -7,8 +7,8 @@ import re -import process.io.mfile.mfile as mf -from process.io.in_dat.base import InDat +import process.core.io.mfile.mfile as mf +from process.core.io.in_dat.base import InDat def feasible_point(filename, position: int): diff --git a/process/core/io/mfile/cli.py b/process/core/io/mfile/cli.py index b950d2f9e..d0a5762ea 100644 --- a/process/core/io/mfile/cli.py +++ b/process/core/io/mfile/cli.py @@ -1,8 +1,8 @@ import click -from process.io.mfile.mfile_comparison import compare_mfiles -from process.io.mfile.mfile_to_csv import to_csv -from process.io.tools import mfile_arg, mfile_opt, save +from process.core.io.mfile.mfile_comparison import compare_mfiles +from process.core.io.mfile.mfile_to_csv import to_csv +from process.core.io.tools import mfile_arg, mfile_opt, save @click.group() diff --git a/process/core/io/mfile/mfile_to_csv.py b/process/core/io/mfile/mfile_to_csv.py index f43eb32f4..7bfc68c47 100644 --- a/process/core/io/mfile/mfile_to_csv.py +++ b/process/core/io/mfile/mfile_to_csv.py @@ -16,14 +16,9 @@ from collections.abc import Sequence from pathlib import Path, PurePath -<<<<<<<< HEAD:process/core/io/mfile_to_csv.py -# PROCESS-specific modules -from process.core.io.mfile import MFile -======== import numpy as np ->>>>>>>> 71bdc991 (Overhall CI):process/core/io/mfile/mfile_to_csv.py -from process.io.mfile.mfile import MFile +from process.core.io.mfile.mfile import MFile default_vars = ( "minmax", @@ -149,7 +144,7 @@ def write_to_csv(csv_outfile, output_data=None): output_data or [], fmt="%.5e", delimiter=",", - header=", ".join(["Description", "Varname", "Value"]), + header="Description, Varname, Value", footer="", comments="", ) diff --git a/process/core/io/plot/cli.py b/process/core/io/plot/cli.py index 46824e277..ed404a5b1 100644 --- a/process/core/io/plot/cli.py +++ b/process/core/io/plot/cli.py @@ -2,16 +2,16 @@ import click -from process.io.plot.plot_proc import setup_plot -from process.io.plot.plot_scans import plot_scan -from process.io.plot.plot_stress_tf import plot_stress -from process.io.plot.sankey import plot_sankey, plot_sankey_plotly -from process.io.tools import LazyGroup, mfile_arg, mfile_opt, split_callback +from process.core.io.plot.plot_proc import setup_plot +from process.core.io.plot.plot_scans import plot_scan +from process.core.io.plot.plot_stress_tf import plot_stress +from process.core.io.plot.sankey import plot_sankey, plot_sankey_plotly +from process.core.io.tools import LazyGroup, mfile_arg, mfile_opt, split_callback @click.group( cls=LazyGroup, - lazy_subcommands={"costs": "process.io.plot.costs.cli.costs"}, + lazy_subcommands={"costs": "process.core.io.plot.costs.cli.costs"}, ) def plot(): """Plotting utilities for PROCESS""" diff --git a/process/core/io/plot/costs/cli.py b/process/core/io/plot/costs/cli.py index f1ed3df54..235454d2b 100644 --- a/process/core/io/plot/costs/cli.py +++ b/process/core/io/plot/costs/cli.py @@ -2,10 +2,10 @@ import click -import process.io.mfile.mfile as mf -from process.io.plot.costs.costs_bar import cost_comp_1990, cost_comp_2014 -from process.io.plot.costs.costs_pie import new_cost_model, orig_cost_model -from process.io.tools import mfile_arg, save +import process.core.io.mfile.mfile as mf +from process.core.io.plot.costs.costs_bar import cost_comp_1990, cost_comp_2014 +from process.core.io.plot.costs.costs_pie import new_cost_model, orig_cost_model +from process.core.io.tools import mfile_arg, save save = save("Save figure") diff --git a/process/core/io/plot/costs/costs_bar.py b/process/core/io/plot/costs/costs_bar.py index 1e556653a..ed7d44506 100644 --- a/process/core/io/plot/costs/costs_bar.py +++ b/process/core/io/plot/costs/costs_bar.py @@ -10,7 +10,7 @@ import matplotlib.pyplot as plt import numpy as np -import process.io.mfile.mfile as mf +import process.core.io.mfile.mfile as mf def _format_fig(ax, fig, label, save, filename, index, inflate, ylabel_suffix, n_mfiles): diff --git a/process/core/io/plot/costs/costs_pie.py b/process/core/io/plot/costs/costs_pie.py index e6a187cc6..5366c2bb5 100644 --- a/process/core/io/plot/costs/costs_pie.py +++ b/process/core/io/plot/costs/costs_pie.py @@ -4,10 +4,6 @@ import matplotlib.pyplot as plt -<<<<<<<< HEAD:process/core/io/costs_pie.py -import process.core.io.mfile as mf -======== ->>>>>>>> 71bdc991 (Overhall CI):process/core/io/plot/costs/costs_pie.py def orig_cost_model(m_file, save): """Plot pie chart for the orginal 1990 cost model. diff --git a/process/core/io/plot/plot_proc.py b/process/core/io/plot/plot_proc.py index da1a9f5e1..00a22b023 100644 --- a/process/core/io/plot/plot_proc.py +++ b/process/core/io/plot/plot_proc.py @@ -1,11 +1,4 @@ -""" - -PROCESS plot_proc using process_io_lib functions and MFILE.DAT - -24/11/2021: Global dictionary variables moved within the functions - to avoid cyclic dependencies. This is because the dicts - generation script imports, and inspects, process. -""" +"""PROCESS plot_proc""" import json import os @@ -2821,7 +2814,9 @@ def plot_main_plasma_information( ) # ========================================= - with resources.path("process.core.io.plot.images", "neutron.png") as neutron_image_path: + with resources.path( + "process.core.io.plot.images", "neutron.png" + ) as neutron_image_path: neutron = mpimg.imread(neutron_image_path.open("rb")) new_ax = axis.inset_axes( [0.975, 0.75, 0.075, 0.075], transform=axis.transAxes, zorder=10 diff --git a/process/main.py b/process/main.py index 361a6c5ce..292995bf0 100644 --- a/process/main.py +++ b/process/main.py @@ -46,14 +46,14 @@ import click -import process +import process # noqa: F401 import process.core.init as init import process.data_structure as data_structure from process.core import constants from process.core.io import obsolete_vars as ov from process.core.io.mfile.mfile import mfile from process.core.io.plot import plot_proc -from process.core.io.plot.sankey import plot_plotly_sankey +from process.core.io.plot.sankey import plot_sankey_plotly from process.core.io.process_config import RunProcessConfig from process.core.io.process_funcs import ( check_input_error, @@ -125,9 +125,9 @@ @click.group( cls=LazyGroup, lazy_subcommands={ - "mfile": "process.io.mfile.cli.mfile", - "plot": "process.io.plot.cli.plot", - "indat": "process.io.in_dat.cli.new_indat", + "mfile": "process.core.io.mfile.cli.mfile", + "plot": "process.core.io.plot.cli.plot", + "indat": "process.core.io.in_dat.cli.new_indat", }, invoke_without_command=True, no_args_is_help=True, From 5ec0691f187bbc678d7870e99b1c53bc502dfffd Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 11:54:35 +0000 Subject: [PATCH 10/17] path fixes --- process/core/io/plot/sankey.py | 14 ++++++-------- process/core/io/tools.py | 11 +++++++---- process/main.py | 2 +- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/process/core/io/plot/sankey.py b/process/core/io/plot/sankey.py index 7cdb99975..0a7639772 100644 --- a/process/core/io/plot/sankey.py +++ b/process/core/io/plot/sankey.py @@ -22,7 +22,7 @@ PLOT_SANKEY = False -def plot_sankey_plotly(m_file): +def plot_sankey_plotly(m_file: Path): if not PLOT_SANKEY: print( "\nPlotly is not installed, unable to create sankey diagram!\n" @@ -33,7 +33,7 @@ def plot_sankey_plotly(m_file): return plotly(power_balance_sankey(m_file), m_file) -def power_balance_sankey(m_file): +def power_balance_sankey(m_file: Path): m_file = MFile(m_file) p_hcd_injected_total_mw = m_file.get("p_hcd_injected_total_mw", scan=-1) p_plasma_ohmic_mw = m_file.get("p_plasma_ohmic_mw", scan=-1) @@ -356,7 +356,7 @@ def power_balance_sankey(m_file): } -def plotly(sankey_dict, m_file): +def plotly(sankey_dict, m_file: Path): fig = go.Figure(data=[sankey_dict]) fig.update_layout({ @@ -366,11 +366,9 @@ def plotly(sankey_dict, m_file): "margin": {"l": 40, "r": 40, "t": 40, "b": 40}, }) # Strip 'MFILE' from the filename for the HTML output - html_output_path = ( - Path(m_file) - .with_stem(Path(m_file).stem.replace("MFILE", "plotly_sankey")) - .with_suffix(".html") - ) + html_output_path = m_file.with_stem( + m_file.stem.replace("MFILE", "plotly_sankey") + ).with_suffix(".html") fig.write_html(str(html_output_path)) print(f"Interactive Sankey diagram saved to {html_output_path}") return fig diff --git a/process/core/io/tools.py b/process/core/io/tools.py index 47699b565..200d7e471 100644 --- a/process/core/io/tools.py +++ b/process/core/io/tools.py @@ -1,4 +1,5 @@ import importlib +from pathlib import Path import click @@ -9,20 +10,22 @@ def mfile_opt(exists: bool = False): "--mfile", "mfile", default="MFILE.DAT", - type=click.Path(exists=exists), + type=click.Path(exists=exists, path_type=Path), help="The mfile to read", ) -mfile_arg = click.argument("mfiles", nargs=-1, type=click.Path(exists=True)) +mfile_arg = click.argument( + "mfiles", nargs=-1, type=click.Path(exists=True, path_type=Path) +) -def indat_opt(default="IN.DAT"): +def indat_opt(default="IN.DAT", exists=True): return click.option( "-i", "--input", "indat", - type=click.Path(exists=True), + type=click.Path(exists, path_type=Path), help="The path to the input file", default=default, ) diff --git a/process/main.py b/process/main.py index 292995bf0..43c190863 100644 --- a/process/main.py +++ b/process/main.py @@ -51,7 +51,7 @@ import process.data_structure as data_structure from process.core import constants from process.core.io import obsolete_vars as ov -from process.core.io.mfile.mfile import mfile +from process.core.io.mfile import mfile from process.core.io.plot import plot_proc from process.core.io.plot.sankey import plot_sankey_plotly from process.core.io.process_config import RunProcessConfig From d9a939b20698459b4f1bed83b3b5c49c91d13d7d Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 11:56:55 +0000 Subject: [PATCH 11/17] add click dep --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 1ab12f00e..2a7df229d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,7 @@ requires-python = ">=3.10" license = "MIT" dynamic = ["version"] dependencies = [ + "click>=8.1.2", "numpy>=1.23", "scipy>=1.10", "cvxpy~=1.7.3", From e6f7574f5334c53f69fc06cdf4a41f0de418c118 Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 14:00:36 +0000 Subject: [PATCH 12/17] mfile cleanup --- process/core/io/mfile/cli.py | 24 +- process/core/io/mfile/mfile.py | 136 +++++-- process/core/io/mfile/mfile2dict.py | 389 ------------------- process/core/io/mfile/mfile_to_csv.py | 171 -------- process/core/io/mfile/mfile_to_csv_vars.json | 23 -- process/core/io/plot/costs/costs_pie.py | 4 +- process/core/io/plot/plot_proc.py | 177 ++++----- process/core/io/tools.py | 3 + process/main.py | 11 +- tests/unit/test_mfile.py | 30 ++ tests/unit/test_mfile2dict.py | 61 --- 11 files changed, 242 insertions(+), 787 deletions(-) delete mode 100644 process/core/io/mfile/mfile2dict.py delete mode 100644 process/core/io/mfile/mfile_to_csv.py delete mode 100644 process/core/io/mfile/mfile_to_csv_vars.json delete mode 100644 tests/unit/test_mfile2dict.py diff --git a/process/core/io/mfile/cli.py b/process/core/io/mfile/cli.py index d0a5762ea..2918ee5bc 100644 --- a/process/core/io/mfile/cli.py +++ b/process/core/io/mfile/cli.py @@ -1,8 +1,10 @@ +import json + import click +from process.core.io.mfile import MFile from process.core.io.mfile.mfile_comparison import compare_mfiles -from process.core.io.mfile.mfile_to_csv import to_csv -from process.core.io.tools import mfile_arg, mfile_opt, save +from process.core.io.tools import mfile_arg, mfile_opt, save, scan_opt @click.group() @@ -16,17 +18,27 @@ def mfile(): "-v", "--variables", type=str, - help="Optional list of variables or file with list of variables to extract", + help="Optional list of variables or json file with list of variables to extract", ) @click.option( "-fmt", "--format", "format_", - type=click.Choice(["json", "csv", "toml", "yaml", "pickle"]), + type=click.Choice(["json", "csv", "toml"]), ) -def convert(mfile, variables, format_): +@scan_opt +@click.option("--verbose", is_flag=True) +def convert(mfile, variables, format_, scan, verbose): """Convert MFile to other formats.""" - to_csv(mfile, variables) + if variables.endswith(".json"): + with open(variables) as f: + variables = json.load(f)["variables"] + else: + variables = list(filter(None, variables.replace(" ", ":").split(":"))) + + getattr(MFile(mfile), f"to_{format_}")( + keys_to_write=variables, scan=scan, verbose=verbose + ) @mfile.command("compare", no_args_is_help=True) diff --git a/process/core/io/mfile/mfile.py b/process/core/io/mfile/mfile.py index e3885f228..7ad81ce18 100644 --- a/process/core/io/mfile/mfile.py +++ b/process/core/io/mfile/mfile.py @@ -27,6 +27,8 @@ from collections import OrderedDict from typing import Any +import numpy as np + logger = logging.getLogger(__name__) @@ -337,52 +339,118 @@ def add_to_mfile_variable(self, des, name, value, unit, flag, scan=None): self.data[var_key] = var self.data[var_key].set_scan(1, value) - def write_to_json(self, keys_to_write=None, scan=-1, verbose=False): + def to_dict(self, keys=None, scan: int | None = -1, verbose=False) -> dict: + """Convert MFile to dictionary + + Parameters + ---------- + keys : + keys to select + scan : + scan to select + verbose : + verbosity of output + """ + + if keys is None: + keys = self.data.keys() + + def _get_data(item, dat_key): + data = self.data[item].get_scan(dat_key) + des = self.data[item].var_description.replace("_", " ") + return {"value": data, "description": des} if verbose else data + + save_range = ( + range(1, self.data["rmajor"].get_number_of_scans() + 1) + if scan is None + else [scan] + ) + output = { + f"scan-{i + 1}": { + item: _get_data( + item, -1 if self.data[item].get_number_of_scans() == 1 else i + ) + for item in keys + } + for i in save_range + } + return ( + output[next(iter(output.keys()))] + if len(output.keys()) == 1 and scan is not None + else output + ) + + def to_json(self, keys_to_write=None, scan: int | None = -1, verbose=False): """Write MFILE object to JSON file Parameters ---------- keys_to_write : - (Default value = None) + keys to select scan : - (Default value = -1) + scan to select verbose : - (Default value = False) + verbosity of output """ - if keys_to_write is None: - keys_to_write = self.data.keys() - filename = f"{self.filename}.json" - dict_to_write = {} - - if scan == 0: - for i in range(self.data["rmajor"].get_number_of_scans()): - sub_dict = {} - for item in keys_to_write: - dat_key = -1 if self.data[item].get_number_of_scans() == 1 else i + 1 - data = self.data[item].get_scan(dat_key) - des = self.data[item].var_description.replace("_", " ") - entry = {"value": data, "description": des} if verbose else data - sub_dict[item] = entry - dict_to_write[f"scan-{i + 1}"] = sub_dict - else: - for item in keys_to_write: - # Initialize dat_key properly based on the number of scans - if self.data[item].get_number_of_scans() == 1: - dat_key = -1 - else: - dat_key = ( - scan if scan > 0 else 1 - ) # Default to scan 1 if not specified - data = self.data[item].get_scan(dat_key) - des = self.data[item].var_description.replace("_", " ") - entry = {"value": data, "description": des} if verbose else data - dict_to_write[item] = entry - with open(filename, "w") as fp: - json.dump(dict_to_write, fp, indent=4) + json.dump(self.to_dict(keys_to_write, scan, verbose), fp, indent=4) + + def to_toml(self, keys_to_write=None, scan: int | None = -1, verbose=False): + """Write MFILE object to JSON file + + Parameters + ---------- + keys_to_write : + keys to select + scan : + scan to select + verbose : + verbosity of output + """ + import toml + + with open(f"{self.filename}.toml", "w") as file: + toml.dump(self.to_dict(keys_to_write, scan, verbose), file) + + def to_csv(self, keys_to_write=None, scan=-1, verbose=False): + """Write to csv file. + + Parameters + ---------- + args : string, list of tuples + input filename, variable data + csv_outfile : + + output_data : + (Default value = None) + """ + output_data = [] + if scan is None: + for scan_key, vals in self.to_dict( + keys_to_write, scan=scan, verbose=verbose + ).items(): + output_data.extend(( + (scan_key, "", ""), + ("Description", "Varname", "Value"), + )) + for k, v in vals.items(): + output_data.append((v["description"], k, v["value"])) + else: + output_data.append(("Description", "Varname", "Value")) + for k, v in self.to_dict(keys_to_write, scan=scan, verbose=verbose).items(): + output_data.append((v["description"], k, v["value"])) + np.savetxt( + f"{self.filename}.csv", + output_data or [], + fmt="%.5e", + delimiter=",", + header="PROCESS MFILE converted to csv", + footer="", + comments="", + ) def sort_value(value_words: list[str]) -> str | float: diff --git a/process/core/io/mfile/mfile2dict.py b/process/core/io/mfile/mfile2dict.py deleted file mode 100644 index 2a8f78048..000000000 --- a/process/core/io/mfile/mfile2dict.py +++ /dev/null @@ -1,389 +0,0 @@ -import logging -import os -import re -from collections import OrderedDict, abc -from pathlib import Path -from typing import Any - -MFILE_END = "# Copy of PROCESS Input Follows #" -VETO_LIST = [" # PROCESS found a feasible solution #"] -HEADER_MAPPING = {"Power Reactor Optimisation Code": "metadata"} - - -class MFILEParser(abc.MutableMapping): - """Parse an MFILE and extract output values.""" - - def __init__(self, input_mfile: str = ""): - self._input_file = input_mfile - self._mfile_data: OrderedDict = OrderedDict() - self._logger = logging.getLogger(self.__class__.__name__) - if self._input_file: - self.parse(self._input_file) - - def __iter__(self): - for group in self._mfile_data: - yield from self._mfile_data[group] - - def __len__(self): - return sum(len(self._mfile_data[g]) for g in self._mfile_data) - - def items(self): - for group in self._mfile_data: - for param, value in self._mfile_data[group].items(): - yield param, value["value"] - - def __getitem__(self, key): - for group in self._mfile_data: - if key in self._mfile_data[group]: - return self._mfile_data[group][key]["value"] - raise KeyError(f"No variable '{key}' found.") - - def get_info_dict(self): - """Get complete information dictionary. - - Returns - ------- - : - retrieve the full information dictionary containing values and - descriptions of extracted parameters - """ - return self._mfile_data - - def __delitem__(self, key): - for group in self._mfile_data: - if key in self._mfile_data[group]: - del self._mfile_data[group][key] - return - raise KeyError(f"No variable '{key}' found.") - - def __setitem__(self, key, value): - for group in self._mfile_data: - if key in self._mfile_data[group]: - self._mfile_data[group][key]["value"] = value - return - raise KeyError(f"No variable '{key}' found.") - - def get_parameter_value(self, param_name: str) -> Any: - for group in self._mfile_data: - if param_name in self._mfile_data[group]: - return self._mfile_data[group][param_name]["value"] - raise KeyError(f"No variable '{param_name}' found.") - - def _line_string_search(self, lines: list[str], search_str: str) -> list[int]: - """Search for substring in file lines. - - Parameters - ---------- - lines : - list of file lines to search - search_str: - search term to look for - - """ - return [i for i, line in enumerate(lines) if search_str in line] - - def _find_var_val_from_str(self, value_str: str) -> Any: - """Convert a string variable to float, int etc. - - This function parsers values given within the MFILE removing other - unneeded information such as the specific typing in PROCESS - - Parameters - ---------- - value_str : - value as a string - """ - for type_str in ["OP", "IP", "ITV"]: - value_str = value_str.replace(type_str, "") - try: - return int(value_str) - except ValueError: - pass - try: - return float(value_str) - except ValueError: - return value_str - - def _get_values(self, lines: list[str]) -> dict[str, Any]: - """Extracts value, description and variable name from MFILE lines. - - Parameters - ---------- - lines : - list of file lines to be parsed - """ - # Compile regex for converting underscores which are spaces into - # a space character - space_re = r"(\_{5,})" - var_re = r"(\([a-z0-9\-\+\*\/\_\%\]\[]+\))" - # TODO remove underscores - # Extract lines from the given line set that follow the variable - # statement convention of 'desc_______(varname)________ value' - lines_ = [line for line in lines if re.findall(var_re, line)] - - # Remove extra symbols such as quotation marks and split line into - # the three required components using regex - lines_ = [ - [ - i.replace('"', "").replace("`", "").strip() - for i in re.split(space_re, line) - if not (re.findall(space_re, i)) and i.strip() - ] - for line in lines_ - ] - - # If there are not three components in a given line, try splitting - # the components present by ' ' instead and append - for i, line in enumerate(lines_): - if len(line) != 3: - new_line = [] - for element in line: - if " " in element: - new_line += element.split(" ") - else: - new_line += element - lines_[i] = new_line[:3] - - # Use an ordered dictionary to match ordering in MFILE - vars_dict = OrderedDict() - - # Iterate through the resultant line sets and tidy them a little - # finally creating a dictionary entry for each with the required - # information - for line in lines_: - var_key = line[1][1:-1] - var_key = var_key.replace("%", ".") - if not var_key: - continue - value = line[2] - desc = line[0].replace("_-_", "-").replace("_", " ") - desc = desc.title().strip() - desc = desc.replace('"', "") - desc = re.sub(r"\s{2,}", " ", desc) - if var_key in vars_dict: - if not isinstance(vars_dict[var_key], list): - vars_dict[var_key]["value"] = [vars_dict[var_key]["value"]] - vars_dict[var_key]["value"].append(self._find_var_val_from_str(value)) - else: - vars_dict[var_key] = { - "description": desc, - "value": self._find_var_val_from_str(value), - } - - return vars_dict - - def parse(self, mfile_addr: str) -> dict: - """Parse an MFILE and extract output values. - - Parameters - ---------- - mfile_addr : - address of MFILE to parse - - Returns - ------- - : - dictionary of output values - - """ - if not os.path.exists(mfile_addr): - raise FileNotFoundError( - f"Could not open MFILE '{mfile_addr}', file does not exist." - ) - - self._logger.info("Parsing MFILE: %s", mfile_addr) - - with open(mfile_addr) as f: - lines = f.readlines() - - end_of_output = self._line_string_search(lines, MFILE_END)[0] - - self._logger.info("Extracting file headers") - header_indexes = [ - i for i, line in enumerate(lines) if line.strip() and i < end_of_output - ] - - header_indexes = [ - i - for i in header_indexes - if lines[i].strip()[0] == "#" and not any(k in lines[i] for k in VETO_LIST) - ] - - # Gets rid of multi-headers, taking the last one - header_indexes = [i for i in header_indexes if i + 1 not in header_indexes] - - self._logger.info("Retrieving output variable values") - # Iterate through the file headers processing the "block" between them - # extracting variable values. Where duplicate headers are found assume - # that a parameter sweep is occuring and append values in lists - for i in range(len(header_indexes) - 1): - key = lines[header_indexes[i]].replace("#", "").strip() - - new_vals = self._get_values( - lines[header_indexes[i] + 1 : header_indexes[i + 1]] - ) - - # The iscan variable is always present at start of sweep - # no matter what the first header in an iteration is - # need to move it into metadata - first_key = lines[header_indexes[0]] - check_iscan = self._mfile_data and "iscan" in new_vals - check_iscan = check_iscan and key != first_key - if check_iscan: - first_key = first_key.replace("#", "").strip() - iscan_var = self._mfile_data[first_key]["iscan"]["value"] - if not isinstance(iscan_var, list): - self._mfile_data[first_key]["iscan"]["value"] = [iscan_var] - self._mfile_data[first_key]["iscan"]["value"].append( - new_vals["iscan"]["value"] - ) - del new_vals["iscan"] - - # Add header to dictionary of not present - if key not in self._mfile_data: - self._mfile_data[key] = new_vals - - # If header already present, iterate through member parameters - # appending the new values to each - else: - for param, var_dict in self._mfile_data[key].items(): - if param not in new_vals: - self._logger.warning( - f"Expected parameter '{param}' in sweep, " - "but could not find entry" - " for this iteration" - ) - continue - value = new_vals[param]["value"] - if not isinstance(var_dict["value"], list): - self._mfile_data[key][param]["value"] = [ - self._mfile_data[key][param]["value"], - value, - ] - else: - # Need to check if the find variables function - # returned a single value for the parameter or multiple - # and handle the cases - if not isinstance(new_vals[param]["value"], list): - self._mfile_data[key][param]["value"].append(value) - else: - self._mfile_data[key][param]["value"] += value - - self._logger.info("Creating output dictionaries") - # Remove any cases where there are no parameters under a given header - self._mfile_data = {k: v for k, v in self._mfile_data.items() if v} - - # Use underscore keys and tidy them to be more computationally friendly - def _key_update(key): - key_ = key.lower() - key_ = key_.replace(" ", "_") - for sym in [":", "(", ")", "/"]: - key_ = key_.replace(sym, "") - return key_.replace("__", "_") - - # Apply header mappings and tidy headers - self._mfile_data = { - _key_update(k) if k not in HEADER_MAPPING else HEADER_MAPPING[k]: v - for k, v in self._mfile_data.items() - } - - if not self._mfile_data or len(self._mfile_data) == 0: - raise AssertionError("Failed to extract data from given MFILE") - - # Only run iscan check if iscan exists - try: - first_key = next(iter(self._mfile_data.keys())) - second_key = list(self._mfile_data.keys())[1] - second_key_fp = list(self._mfile_data[second_key])[8] - iscan_arr = self._mfile_data[first_key]["iscan"]["value"] - test_param = self._mfile_data[second_key][second_key_fp]["value"] - if len(test_param) != iscan_arr[-1]: - print(test_param) - raise AssertionError( - "Failed to retrieve all parameter sweep values, " - f"expected {iscan_arr[-1]} values for '{second_key}:{second_key_fp}' and got {len(test_param)}" - ) - except KeyError: - pass - - self._logger.info("Extraction completed successfully") - return self._mfile_data - - def write(self, output_filename: str): - """Write output to file. - - Parameters - ---------- - output_filename : str - path of output file, file type is determined from the type and can - be '.toml', '.yml', '.pckl', '.json' - """ - self._logger.info("Writing to output file '%s'", output_filename) - - suffix = os.path.splitext(output_filename)[1].lower() - - if suffix == ".toml": - self._logger.info("Output will be TOML file.") - try: - import tomlkit - except ImportError: - # If file suffix is TOML but TOMLKit is not installed - import toml - - print( - "WARNING: Python module 'tomlkit' not found, " - "file comments will not be written to created TOML file" - ) - with open(output_filename, "w") as file: - toml.dump(self._mfile_data, file) - exit(0) - - # If TOMLKit is present, write TOML file as normal but add in - # descriptions as docstrings instead and format - doc = tomlkit.document() - doc.add(tomlkit.comment("PROCESS Run Output")) - for group_name, data in self._mfile_data.items(): - new_dict = {} - for var_name, var_data in data.items(): - new_dict[var_name] = var_data["value"] - header = group_name.replace("_", " ").title() - ls = int((75 - len(header)) / 2) - rs = 75 - len(header) - ls - header = ls * "-" + " " + header + " " + rs * "-" - doc.add(tomlkit.comment(header)) - doc.add(group_name, new_dict) - doc.add(tomlkit.nl()) - doc.add(tomlkit.nl()) - - for group_name, data in self._mfile_data.items(): - for var_name in data: - doc[group_name][var_name].comment( - self._mfile_data[group_name][var_name]["description"] - ) - - Path(output_filename).write_text(tomlkit.dumps(doc)) - elif suffix == ".json": - # If file suffix is JSON - self._logger.info("Output will be JSON file.") - import json - - with open(output_filename, "w") as file: - json.dump(self._mfile_data, file) - elif suffix in [".yml", ".yaml"]: - self._logger.info("Output will be YAML file.") - # If file suffix is YAML - import yaml - - with open(output_filename, "w") as file: - yaml.dump(self._mfile_data, file) - elif suffix == ".pckl": - self._logger.info("Output will be Pickle file.") - # If file suffix is Pickle - import pickle - - with open(output_filename, "wb") as file: - pickle.dump(self._mfile_data, file) - else: - raise RuntimeError(f"Unrecognised file format '{suffix}'") - - self._logger.info("File was written successfully.") diff --git a/process/core/io/mfile/mfile_to_csv.py b/process/core/io/mfile/mfile_to_csv.py deleted file mode 100644 index 7bfc68c47..000000000 --- a/process/core/io/mfile/mfile_to_csv.py +++ /dev/null @@ -1,171 +0,0 @@ -""" -Code to read from a PROCESS MFILE and write values into a csv - -Input files: -mfile (default MFILE.DAT) as output from PROCESS -variable list (default mfile_to_csv_vars.json) as defined by user - -Instructions: -- command line call: python mfile_to_csv.py -f -v - -Output file: -.csv will be saved to the directory of the input file -""" - -import json -from collections.abc import Sequence -from pathlib import Path, PurePath - -import numpy as np - -from process.core.io.mfile.mfile import MFile - -default_vars = ( - "minmax", - "p_hcd_injected_max", - "p_plant_electric_net_required_mw", - "ripple_b_tf_plasma_edge_max", - "t_burn_min", - "alstroh", - "sig_tf_wp_max", - "dx_tf_turn_steel", - "f_j_cs_start_pulse_end_flat_top", - "alstroh", - "rmajor", - "dr_tf_inboard", - "dr_cs", - "c_tf_turn", - "dr_tf_wp_with_insulation", - "dr_cryostat", - "dr_shld_outboard", - "dz_divertor", - "rmajor", -) - - -def get_vars(vfile="mfile_to_csv_vars.json"): - """Returns variable names from identified file. - - Parameters - ---------- - args : string - input JSON filename - vfile : - (Default value = "mfile_to_csv_vars.json") - - Returns - ------- - list - variable names - """ - print("Fetching list of variables from", vfile) - - return json.loads(Path(vfile).read_text())["vars"] - - -def read_mfile(mfilename="MFILE.DAT", variables=None): - """Returns specified variable values from identified file. - - Parameters - ---------- - args : string, list - input filename, variable names - mfilename : - (Default value = "MFILE.DAT") - variables : - (Default value = None) - - Returns - ------- - list of tuples - variable descriptions, names, and values - """ - if variables is None: - variables = [] - print("Reading from MFILE:", mfilename) - - m_file = MFile(mfilename) - - output_vars = [] - - # for each variable named in the input varfile, get the description and data value - for var_name in variables: - if var_name not in m_file.data: - print(f"Variable '{var_name}' not in MFILE. Skipping and moving on...") - else: - # In case of a file containing multiple scans, (scan = -1) uses the last scan value - output_vars.append(( - m_file.data[var_name].var_description, - var_name, - m_file.get(var_name, scan=-1), - )) - - return output_vars - - -def get_savenamepath(mfilename="MFILE.DAT"): - """Returns path/filename.csv for file saving. - - Parameters - ---------- - args : string - input filename - mfilename : - (Default value = "MFILE.DAT") - - Returns - ------- - pathlib.PurePosixPath - output filename - """ - - # Either save it locally or output the csv file to the directory of the input file - dirname = Path.cwd() if mfilename == "MFILE.DAT" else PurePath(mfilename).parent - - csv_filename = PurePath(mfilename).stem - return PurePath(dirname, csv_filename + ".csv") - - -def write_to_csv(csv_outfile, output_data=None): - """Write to csv file. - - Parameters - ---------- - args : string, list of tuples - input filename, variable data - csv_outfile : - - output_data : - (Default value = None) - """ - print("Writing to csv file:", csv_outfile) - np.savetxt( - csv_outfile, - output_data or [], - fmt="%.5e", - delimiter=",", - header="Description, Varname, Value", - footer="", - comments="", - ) - - -def to_csv(mfile, variables: Sequence[str] | str = default_vars): - """Extract certain variables from an MFILE.DAT and output to CSV. - - Parameters - ---------- - mfile: - Mfile to convert - variables: - variable file with variables to extract - """ - write_to_csv( - get_savenamepath(mfile), - read_mfile( - mfile, get_vars(variables) if isinstance(variables, str) else variables - ), - ) - - # write final line to screen - print("Complete.") diff --git a/process/core/io/mfile/mfile_to_csv_vars.json b/process/core/io/mfile/mfile_to_csv_vars.json deleted file mode 100644 index e7e6849de..000000000 --- a/process/core/io/mfile/mfile_to_csv_vars.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "vars": [ - "minmax", - "p_hcd_injected_max", - "p_plant_electric_net_required_mw", - "ripple_b_tf_plasma_edge_max", - "t_burn_min", - "alstroh", - "sig_tf_wp_max", - "dx_tf_turn_steel", - "f_j_cs_start_pulse_end_flat_top", - "alstroh", - "rmajor", - "dr_tf_inboard", - "dr_cs", - "c_tf_turn", - "dr_tf_wp_with_insulation", - "dr_cryostat", - "dr_shld_outboard", - "dz_divertor", - "rmajor" - ] -} diff --git a/process/core/io/plot/costs/costs_pie.py b/process/core/io/plot/costs/costs_pie.py index 5366c2bb5..43dcaef63 100644 --- a/process/core/io/plot/costs/costs_pie.py +++ b/process/core/io/plot/costs/costs_pie.py @@ -5,7 +5,7 @@ import matplotlib.pyplot as plt -def orig_cost_model(m_file, save): +def cost_model_1990(m_file, save): """Plot pie chart for the orginal 1990 cost model. Two plots produced: (1) Breakdown of the direct costs and (2) Direct, indirect, etc. """ @@ -115,7 +115,7 @@ def orig_cost_model(m_file, save): plt.show() -def new_cost_model(m_file, save): +def cost_model_2014(m_file, save): """Plot pie chart for the new 2014 cost model.""" # Read Cost Values s09 = m_file.data["s09"].get_scan(-1) # Buildings diff --git a/process/core/io/plot/plot_proc.py b/process/core/io/plot/plot_proc.py index 00a22b023..1f260124c 100644 --- a/process/core/io/plot/plot_proc.py +++ b/process/core/io/plot/plot_proc.py @@ -20,11 +20,10 @@ from scipy.interpolate import interp1d import process.core.constants as constants -import process.core.io.mfile.mfile as mf import process.data_structure.pfcoil_variables as pfcoil_variables import process.models.physics.confinement_time as confine import process.models.tfcoil.superconducting as sctf -from process.core.io.mfile.mfile import MFileErrorClass +from process.core.io.mfile import MFile, MFileErrorClass from process.core.solver.objectives import OBJECTIVE_NAMES from process.data_structure import impurity_radiation_module, physics_variables from process.models.build import Build @@ -135,9 +134,7 @@ class RadialBuild: rtangle2 = 2 * rtangle -def plot_plasma( - axis: plt.Axes, mfile: mf.MFile, scan: int, colour_scheme: Literal[1, 2] -): +def plot_plasma(axis: plt.Axes, mfile: MFile, scan: int, colour_scheme: Literal[1, 2]): """Plots the plasma boundary arcs. Parameters @@ -203,7 +200,7 @@ def plot_plasma( axis.fill(pg.rs, pg.zs, color=PLASMA_COLOUR[colour_scheme - 1]) -def plot_centre_cross(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_centre_cross(axis: plt.Axes, mfile: MFile, scan: int): """Function to plot centre cross on plot Parameters @@ -223,7 +220,7 @@ def plot_centre_cross(axis: plt.Axes, mfile: mf.MFile, scan: int): ) -def cumulative_radial_build(section, mfile: mf.MFile, scan: int): +def cumulative_radial_build(section, mfile: MFile, scan: int): """Function for calculating the cumulative radial build up to and including the given section. @@ -263,7 +260,7 @@ def cumulative_radial_build(section, mfile: mf.MFile, scan: int): return cumulative_build -def cumulative_radial_build2(section, mfile: mf.MFile, scan: int): +def cumulative_radial_build2(section, mfile: MFile, scan: int): """Function for calculating the cumulative radial build up to and including the given section. @@ -305,7 +302,7 @@ def cumulative_radial_build2(section, mfile: mf.MFile, scan: int): def poloidal_cross_section( axis: plt.Axes, - mfile: mf.MFile, + mfile: MFile, scan: int, demo_ranges: bool, radial_build: RadialBuild, @@ -357,7 +354,7 @@ def poloidal_cross_section( # --- -def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Figure): +def plot_main_power_flow(axis: plt.Axes, mfile: MFile, scan: int, fig: plt.Figure): """Plots the main power flow diagram for the fusion reactor, including plasma, heating and current drive, first wall, blanket, vacuum vessel, divertor, coolant pumps, turbine, generator, and auxiliary systems. Annotates the diagram with power values and draws arrows to indicate power flows. @@ -2319,7 +2316,7 @@ def plot_main_power_flow(axis: plt.Axes, mfile: mf.MFile, scan: int, fig: plt.Fi def plot_main_plasma_information( axis: plt.Axes, - mfile: mf.MFile, + mfile: MFile, scan: int, colour_scheme: Literal[1, 2], fig: plt.Figure, @@ -2331,7 +2328,7 @@ def plot_main_plasma_information( ---------- axis : plt.Axes The matplotlib axis object to plot on. - mfile : mf.MFile + mfile : MFile The MFILE data object containing plasma parameters. scan : int The scan number to use for extracting data. @@ -3104,7 +3101,7 @@ def plot_main_plasma_information( ) -def plot_current_profiles_over_time(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_current_profiles_over_time(axis: plt.Axes, mfile: MFile, scan: int): """Plots the current profiles over time for PF circuits, CS coil, and plasma.""" t_plant_pulse_coil_precharge = mfile.get("t_plant_pulse_coil_precharge", scan=scan) t_plant_pulse_plasma_current_ramp_up = mfile.get( @@ -3203,9 +3200,7 @@ def plot_current_profiles_over_time(axis: plt.Axes, mfile: mf.MFile, scan: int): axis.grid(True, linestyle="--", alpha=0.6) -def plot_system_power_profiles_over_time( - axis: plt.Axes, mfile: mf.MFile, scan: int, fig -): +def plot_system_power_profiles_over_time(axis: plt.Axes, mfile: MFile, scan: int, fig): """Plots the power profiles over time for various systems.""" t_precharge = mfile.get("t_plant_pulse_coil_precharge", scan=scan) @@ -3365,9 +3360,7 @@ def plot_system_power_profiles_over_time( ) -def plot_cryostat( - axis: plt.Axes, mfile: mf.MFile, scan: int, colour_scheme: Literal[1, 2] -): +def plot_cryostat(axis: plt.Axes, mfile: MFile, scan: int, colour_scheme: Literal[1, 2]): """Function to plot cryostat in poloidal cross-section""" rects = cryostat_geometry( @@ -3387,7 +3380,7 @@ def plot_cryostat( ) -def color_key(axis: plt.Axes, mfile: mf.MFile, scan: int, colour_scheme: Literal[1, 2]): +def color_key(axis: plt.Axes, mfile: MFile, scan: int, colour_scheme: Literal[1, 2]): """Function to plot the colour key""" axis.set_ylim([0, 10]) @@ -3452,7 +3445,7 @@ def secs_to_hms(s): def toroidal_cross_section( axis: plt.Axes, - mfile: mf.MFile, + mfile: MFile, scan: int, demo_ranges: bool, colour_scheme: Literal[1, 2], @@ -3716,7 +3709,7 @@ def arc_fill(axis: plt.Axes, r1, r2, color="pink"): axis.add_patch(patch) -def plot_n_profiles(prof, demo_ranges: bool, mfile: mf.MFile, scan: int): +def plot_n_profiles(prof, demo_ranges: bool, mfile: MFile, scan: int): """Function to plot density profile Parameters @@ -3725,7 +3718,7 @@ def plot_n_profiles(prof, demo_ranges: bool, mfile: mf.MFile, scan: int): axis object to add plot to demo_ranges: bool : - mfile: mf.MFile : + mfile: MFile : scan: int : @@ -3941,14 +3934,14 @@ def plot_n_profiles(prof, demo_ranges: bool, mfile: mf.MFile, scan: int): # --- -def plot_jprofile(prof, mfile: mf.MFile, scan: int): +def plot_jprofile(prof, mfile: MFile, scan: int): """Function to plot density profile Parameters ---------- prof : axis object to add plot to - mfile: mf.MFile : + mfile: MFile : scan: int : @@ -4016,7 +4009,7 @@ def plot_jprofile(prof, mfile: mf.MFile, scan: int): prof.grid(True, which="both", linestyle="--", linewidth=0.5, alpha=0.2) -def plot_t_profiles(prof, demo_ranges: bool, mfile: mf.MFile, scan: int): +def plot_t_profiles(prof, demo_ranges: bool, mfile: MFile, scan: int): """Function to plot temperature profile Parameters @@ -4025,7 +4018,7 @@ def plot_t_profiles(prof, demo_ranges: bool, mfile: mf.MFile, scan: int): axis object to add plot to demo_ranges: bool : - mfile: mf.MFile : + mfile: MFile : scan: int : @@ -4137,7 +4130,7 @@ def plot_t_profiles(prof, demo_ranges: bool, mfile: mf.MFile, scan: int): # --- -def plot_qprofile(prof, demo_ranges: bool, mfile: mf.MFile, scan: int): +def plot_qprofile(prof, demo_ranges: bool, mfile: MFile, scan: int): """Function to plot q profile, formula taken from Nevins bootstrap model. Parameters @@ -4146,7 +4139,7 @@ def plot_qprofile(prof, demo_ranges: bool, mfile: mf.MFile, scan: int): axis object to add plot to demo_ranges: bool : - mfile: mf.MFile : + mfile: MFile : scan: int : @@ -4337,7 +4330,7 @@ def profiles_with_pedestal(mfile, scan: int): return rho, ne, te -def plot_radprofile(prof, mfile: mf.MFile, scan: int, impp, demo_ranges: bool): +def plot_radprofile(prof, mfile: MFile, scan: int, impp, demo_ranges: bool): """Function to plot radiation profile, formula taken from ???. Parameters @@ -4350,7 +4343,7 @@ def plot_radprofile(prof, mfile: mf.MFile, scan: int, impp, demo_ranges: bool): scan number to use impp : impurity path - mfile: mf.MFile : + mfile: MFile : scan: int : @@ -4585,7 +4578,7 @@ def plot_rad_contour(axis: "mpl.axes.Axes", mfile: "Any", scan: int, impp: str): def plot_vacuum_vessel_and_divertor( - axis, mfile: mf.MFile, scan, radial_build, colour_scheme + axis, mfile: MFile, scan, radial_build, colour_scheme ): """Function to plot vacuum vessel and divertor boxes @@ -4790,7 +4783,7 @@ def plot_vacuum_vessel_and_divertor( ) -def plot_shield(axis: plt.Axes, mfile: mf.MFile, scan: int, radial_build, colour_scheme): +def plot_shield(axis: plt.Axes, mfile: MFile, scan: int, radial_build, colour_scheme): """Function to plot shield Parameters @@ -4862,7 +4855,7 @@ def plot_shield(axis: plt.Axes, mfile: mf.MFile, scan: int, radial_build, colour ) -def plot_blanket(axis: plt.Axes, mfile: mf.MFile, scan, radial_build, colour_scheme): +def plot_blanket(axis: plt.Axes, mfile: MFile, scan, radial_build, colour_scheme): """Function to plot blanket Parameters @@ -4984,7 +4977,7 @@ def plot_blanket(axis: plt.Axes, mfile: mf.MFile, scan, radial_build, colour_sch ) -def plot_first_wall_top_down_cross_section(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_first_wall_top_down_cross_section(axis: plt.Axes, mfile: MFile, scan: int): # Import required variables radius_fw_channel = mfile.get("radius_fw_channel", scan=scan) * 100 dr_fw_wall = mfile.get("dr_fw_wall", scan=scan) * 100 @@ -5095,7 +5088,7 @@ def plot_first_wall_top_down_cross_section(axis: plt.Axes, mfile: mf.MFile, scan axis.set_ylim([-1, 2 * (dr_fw_wall + radius_fw_channel) + 1]) -def plot_first_wall_poloidal_cross_section(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_first_wall_poloidal_cross_section(axis: plt.Axes, mfile: MFile, scan: int): # Import required variables radius_fw_channel = mfile.get("radius_fw_channel", scan=scan) dr_fw_wall = mfile.get("dr_fw_wall", scan=scan) @@ -5216,9 +5209,7 @@ def plot_first_wall_poloidal_cross_section(axis: plt.Axes, mfile: mf.MFile, scan axis.set_ylim([-0.2, len_fw_channel + 0.2]) -def plot_firstwall( - axis: plt.Axes, mfile: mf.MFile, scan: int, radial_build, colour_scheme -): +def plot_firstwall(axis: plt.Axes, mfile: MFile, scan: int, radial_build, colour_scheme): """Function to plot first wall Parameters @@ -5324,9 +5315,7 @@ def plot_firstwall( ) -def plot_tf_coils( - axis: plt.Axes, mfile: mf.MFile, scan: int, colour_scheme: Literal[1, 2] -): +def plot_tf_coils(axis: plt.Axes, mfile: MFile, scan: int, colour_scheme: Literal[1, 2]): """Function to plot TF coils Parameters @@ -5435,7 +5424,7 @@ def plot_tf_coils( ) -def plot_superconducting_tf_wp(axis: plt.Axes, mfile: mf.MFile, scan: int, fig): +def plot_superconducting_tf_wp(axis: plt.Axes, mfile: MFile, scan: int, fig): """Plots inboard TF coil and winding pack. Parameters @@ -6133,7 +6122,7 @@ def plot_superconducting_tf_wp(axis: plt.Axes, mfile: mf.MFile, scan: int, fig): axis.legend(loc="upper left") -def plot_resistive_tf_wp(axis: plt.Axes, mfile: mf.MFile, scan: int, fig): +def plot_resistive_tf_wp(axis: plt.Axes, mfile: MFile, scan: int, fig): """Plots inboard TF coil and winding pack. Parameters @@ -6520,7 +6509,7 @@ def plot_resistive_tf_wp(axis: plt.Axes, mfile: mf.MFile, scan: int, fig): ) -def plot_resistive_tf_info(axis: plt.Axes, mfile: mf.MFile, scan: int, fig): +def plot_resistive_tf_info(axis: plt.Axes, mfile: MFile, scan: int, fig): # Add info about the steel casing surrounding the WP textstr_casing = ( f"$\\mathbf{{Casing:}}$\n \n" @@ -6641,7 +6630,7 @@ def plot_resistive_tf_info(axis: plt.Axes, mfile: mf.MFile, scan: int, fig): ) -def plot_tf_cable_in_conduit_turn(axis: plt.Axes, fig, mfile: mf.MFile, scan: int): +def plot_tf_cable_in_conduit_turn(axis: plt.Axes, fig, mfile: MFile, scan: int): """Plots inboard TF coil CICC individual turn structure. Parameters @@ -7227,7 +7216,7 @@ def _pack_strands_rectangular_with_obstacles( ) -def plot_cable_in_conduit_cable(axis: plt.Axes, fig, mfile: mf.MFile, scan: int): +def plot_cable_in_conduit_cable(axis: plt.Axes, fig, mfile: MFile, scan: int): """Plots TF coil CICC cable cross-section. Parameters @@ -7236,7 +7225,7 @@ def plot_cable_in_conduit_cable(axis: plt.Axes, fig, mfile: mf.MFile, scan: int) fig : - mfile: mf.MFile : + mfile: MFile : scan: int : @@ -7321,9 +7310,7 @@ def plot_cable_in_conduit_cable(axis: plt.Axes, fig, mfile: mf.MFile, scan: int) axis.set_ylabel("Y [mm]") -def plot_pf_coils( - axis: plt.Axes, mfile: mf.MFile, scan: int, colour_scheme: Literal[1, 2] -): +def plot_pf_coils(axis: plt.Axes, mfile: MFile, scan: int, colour_scheme: Literal[1, 2]): """Function to plot PF coils Parameters @@ -7417,7 +7404,7 @@ def plot_pf_coils( ) -def plot_info(axis: plt.Axes, data, mfile: mf.MFile, scan: int): +def plot_info(axis: plt.Axes, data, mfile: MFile, scan: int): """Function to plot data in written form on a matplotlib plot. Parameters @@ -7494,7 +7481,7 @@ def plot_info(axis: plt.Axes, data, mfile: mf.MFile, scan: int): ) -def plot_header(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_header(axis: plt.Axes, mfile: MFile, scan: int): """Function to plot header info: date, rutitle etc Parameters @@ -7614,7 +7601,7 @@ def plot_header(axis: plt.Axes, mfile: mf.MFile, scan: int): plot_info(axis, data2, mfile, scan) -def plot_geometry_info(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_geometry_info(axis: plt.Axes, mfile: MFile, scan: int): """Function to plot geometry info Parameters @@ -7663,7 +7650,7 @@ def plot_geometry_info(axis: plt.Axes, mfile: mf.MFile, scan: int): plot_info(axis, data, mfile, scan) -def plot_physics_info(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_physics_info(axis: plt.Axes, mfile: MFile, scan: int): """Function to plot geometry info Parameters @@ -7740,7 +7727,7 @@ def plot_physics_info(axis: plt.Axes, mfile: mf.MFile, scan: int): plot_info(axis, data, mfile, scan) -def plot_magnetics_info(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_magnetics_info(axis: plt.Axes, mfile: MFile, scan: int): """Function to plot magnet info Parameters @@ -7869,7 +7856,7 @@ def plot_magnetics_info(axis: plt.Axes, mfile: mf.MFile, scan: int): plot_info(axis, data, mfile, scan) -def plot_power_info(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_power_info(axis: plt.Axes, mfile: MFile, scan: int): """Function to plot power info Parameters @@ -7961,7 +7948,7 @@ def plot_power_info(axis: plt.Axes, mfile: mf.MFile, scan: int): plot_info(axis, data, mfile, scan) -def plot_current_drive_info(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_current_drive_info(axis: plt.Axes, mfile: MFile, scan: int): """Function to plot current drive info Parameters @@ -8222,7 +8209,7 @@ def plot_current_drive_info(axis: plt.Axes, mfile: mf.MFile, scan: int): plot_info(axis, data, mfile, scan) -def plot_bootstrap_comparison(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_bootstrap_comparison(axis: plt.Axes, mfile: MFile, scan: int): """Function to plot a scatter box plot of bootstrap current fractions. Parameters @@ -8312,7 +8299,7 @@ def plot_bootstrap_comparison(axis: plt.Axes, mfile: mf.MFile, scan: int): axis.set_facecolor("#f0f0f0") -def plot_h_threshold_comparison(axis: plt.Axes, mfile: mf.MFile, scan: int, u_seed=None): +def plot_h_threshold_comparison(axis: plt.Axes, mfile: MFile, scan: int, u_seed=None): """Function to plot a scatter box plot of L-H threshold power comparisons. Parameters @@ -8447,7 +8434,7 @@ def plot_h_threshold_comparison(axis: plt.Axes, mfile: mf.MFile, scan: int, u_se def plot_confinement_time_comparison( - axis: plt.Axes, mfile: mf.MFile, scan: int, u_seed=None + axis: plt.Axes, mfile: MFile, scan: int, u_seed=None ): """Function to plot a scatter box plot of confinement time comparisons. @@ -8786,7 +8773,7 @@ def plot_confinement_time_comparison( axis.set_facecolor("#f0f0f0") -def plot_radial_build(axis: plt.Axes, mfile: mf.MFile, colour_scheme: Literal[1, 2]): +def plot_radial_build(axis: plt.Axes, mfile: MFile, colour_scheme: Literal[1, 2]): """Plots the radial build of a fusion device on the given matplotlib axis. This function visualizes the different layers/components of the machine's radial build @@ -8802,7 +8789,7 @@ def plot_radial_build(axis: plt.Axes, mfile: mf.MFile, colour_scheme: Literal[1, ---------- axis : matplotlib.axes.Axes The matplotlib axis on which to plot the radial build. - mfile : mf.MFile + mfile : MFile An object containing the machine build data, with required fields for each radial component and the "i_tf_inside_cs" flag. colour_scheme: @@ -8967,7 +8954,7 @@ def plot_radial_build(axis: plt.Axes, mfile: mf.MFile, colour_scheme: Literal[1, def plot_lower_vertical_build( - axis: plt.Axes, mfile: mf.MFile, colour_scheme: Literal[1, 2] + axis: plt.Axes, mfile: MFile, colour_scheme: Literal[1, 2] ): """Plots the lower vertical build of a fusion device on the given matplotlib axis. @@ -9073,7 +9060,7 @@ def plot_lower_vertical_build( def plot_upper_vertical_build( - axis: plt.Axes, mfile: mf.MFile, colour_scheme: Literal[1, 2] + axis: plt.Axes, mfile: MFile, colour_scheme: Literal[1, 2] ): """Plots the upper vertical build of a fusion device on the given matplotlib axis. @@ -9225,7 +9212,7 @@ def plot_upper_vertical_build( axis.title.set_text("Upper Vertical Build") -def plot_density_limit_comparison(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_density_limit_comparison(axis: plt.Axes, mfile: MFile, scan: int): """Function to plot a scatter box plot of different density limit comparisons. Parameters @@ -9312,7 +9299,7 @@ def plot_density_limit_comparison(axis: plt.Axes, mfile: mf.MFile, scan: int): def plot_cs_coil_structure( - axis: plt.Axes, fig, mfile: mf.MFile, scan: int, colour_scheme=1 + axis: plt.Axes, fig, mfile: MFile, scan: int, colour_scheme=1 ): """Function to plot the coil structure of the CS. @@ -9517,7 +9504,7 @@ def plot_cs_coil_structure( axis.legend() -def plot_cs_turn_structure(axis: plt.Axes, fig, mfile: mf.MFile, scan: int): +def plot_cs_turn_structure(axis: plt.Axes, fig, mfile: MFile, scan: int): a_cs_turn = mfile.get("a_cs_turn", scan=scan) dz_cs_turn = mfile.get("dz_cs_turn", scan=scan) dr_cs_turn = mfile.get("dr_cs_turn", scan=scan) @@ -9622,7 +9609,7 @@ def plot_cs_turn_structure(axis: plt.Axes, fig, mfile: mf.MFile, scan: int): axis.grid(True, linestyle="--", alpha=0.3) -def plot_tf_coil_structure(axis: plt.Axes, mfile: mf.MFile, scan: int, colour_scheme=1): +def plot_tf_coil_structure(axis: plt.Axes, mfile: MFile, scan: int, colour_scheme=1): # Plot the TF coil poloidal cross-section plot_tf_coils(axis, mfile, scan, colour_scheme) @@ -10036,14 +10023,14 @@ def plot_tf_coil_structure(axis: plt.Axes, mfile: mf.MFile, scan: int, colour_sc axis.legend(labels, loc="upper center", bbox_to_anchor=(1.01, 0.85), ncol=1) -def plot_iteration_variables(axis: plt.Axes, m_file: mf.MFile, scan: int): +def plot_iteration_variables(axis: plt.Axes, m_file: MFile, scan: int): """Plot the iteration variables and where they lay in their bounds on a given axes Parameters ---------- axis: plt.Axes : - m_file: mf.MFile : + m_file: MFile : scan: int : @@ -10199,7 +10186,7 @@ def plot_iteration_variables(axis: plt.Axes, m_file: mf.MFile, scan: int): axis.legend(loc="upper left", bbox_to_anchor=(-0.15, 1.05), ncol=1) -def plot_tf_stress(axis: plt.Axes, mfile: mf.MFile): +def plot_tf_stress(axis: plt.Axes, mfile: MFile): """Function to plot the TF coil stress from the SIG_TF.json file. Input file: @@ -10209,7 +10196,7 @@ def plot_tf_stress(axis: plt.Axes, mfile: mf.MFile): ---------- axis: plt.Axes : - mfile: mf.MFile : + mfile: MFile : """ @@ -10811,7 +10798,7 @@ def plot_fw_90_deg_pipe_bend(ax, m_file, scan: int): ) -def plot_fusion_rate_profiles(axis: plt.Axes, fig, mfile: mf.MFile, scan: int): +def plot_fusion_rate_profiles(axis: plt.Axes, fig, mfile: MFile, scan: int): # Plot the fusion rate profiles on the given axis fusrat_plasma_dt_profile = [] fusrat_plasma_dd_triton_profile = [] @@ -11216,7 +11203,7 @@ def plot_fusion_rate_profiles(axis: plt.Axes, fig, mfile: mf.MFile, scan: int): def plot_cover_page( axis: plt.Axes, - mfile: mf.MFile, + mfile: MFile, scan: int, fig, radial_build: RadialBuild, @@ -11228,7 +11215,7 @@ def plot_cover_page( ---------- axis : plt.Axes The matplotlib axis object to plot on. - mfile : mf.MFile + mfile : MFile The MFILE data object containing run info. scan : int The scan number to use for extracting data. @@ -11379,7 +11366,7 @@ def plot_cover_page( inset_ax.axis("off") -def plot_plasma_pressure_profiles(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_plasma_pressure_profiles(axis: plt.Axes, mfile: MFile, scan: int): # Plot the plasma pressure profiles on the given axis n_plasma_profile_elements = int(mfile.get("n_plasma_profile_elements", scan=scan)) @@ -11469,7 +11456,7 @@ def plot_plasma_pressure_profiles(axis: plt.Axes, mfile: mf.MFile, scan: int): ) -def plot_plasma_pressure_gradient_profiles(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_plasma_pressure_gradient_profiles(axis: plt.Axes, mfile: MFile, scan: int): # Get the plasma pressure profiles n_plasma_profile_elements = int(mfile.get("n_plasma_profile_elements", scan=scan)) @@ -11517,7 +11504,7 @@ def plot_plasma_pressure_gradient_profiles(axis: plt.Axes, mfile: mf.MFile, scan axis.legend() -def plot_plasma_poloidal_pressure_contours(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_plasma_poloidal_pressure_contours(axis: plt.Axes, mfile: MFile, scan: int): """Plot plasma poloidal pressure contours inside the plasma boundary. This function visualizes the poloidal pressure distribution inside the plasma boundary @@ -11528,7 +11515,7 @@ def plot_plasma_poloidal_pressure_contours(axis: plt.Axes, mfile: mf.MFile, scan ---------- axis : matplotlib.axes.Axes Matplotlib axis object to plot on. - mfile : mfile: mf.MFile + mfile : mfile: MFile MFILE data object containing plasma and geometry data. scan : int Scan number to use for extracting data. @@ -11594,7 +11581,7 @@ def plot_plasma_poloidal_pressure_contours(axis: plt.Axes, mfile: mf.MFile, scan ) -def interp1d_profile(profile, mfile: mf.MFile, scan: int): +def interp1d_profile(profile, mfile: MFile, scan: int): # Get plasma geometry and boundary pg = plasma_geometry( rmajor=mfile.get("rmajor", scan=scan), @@ -11816,7 +11803,7 @@ def reaction_plot_grid( def plot_fusion_rate_contours( fig1, fig2, - mfile: mf.MFile, + mfile: MFile, scan: int, ): fusrat_plasma_dt_profile = [] @@ -11878,7 +11865,7 @@ def plot_fusion_rate_contours( reaction_plot_grid(rminor, rmajor, kappa, r_grid, z_grid, dhe3_grid, dhe3_axes) -def plot_magnetic_fields_in_plasma(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_magnetic_fields_in_plasma(axis: plt.Axes, mfile: MFile, scan: int): # Plot magnetic field profiles inside the plasma boundary n_plasma_profile_elements = int(mfile.get("n_plasma_profile_elements", scan=scan)) @@ -11958,7 +11945,7 @@ def plot_magnetic_fields_in_plasma(axis: plt.Axes, mfile: mf.MFile, scan: int): axis.set_xlim(rmajor - 1.25 * rminor, rmajor + 1.25 * rminor) -def plot_beta_profiles(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_beta_profiles(axis: plt.Axes, mfile: MFile, scan: int): # Plot the beta profiles on the given axis n_plasma_profile_elements = int(mfile.get("n_plasma_profile_elements", scan=scan)) @@ -11994,7 +11981,7 @@ def plot_beta_profiles(axis: plt.Axes, mfile: mf.MFile, scan: int): axis.set_ylim(bottom=0.0) -def plot_plasma_outboard_toroidal_ripple_map(fig, mfile: mf.MFile, scan: int): +def plot_plasma_outboard_toroidal_ripple_map(fig, mfile: MFile, scan: int): r_tf_outboard_mid = mfile.get("r_tf_outboard_mid", scan=scan) n_tf_coils = mfile.get("n_tf_coils", scan=scan) rmajor = mfile.get("rmajor", scan=scan) @@ -12305,7 +12292,7 @@ def plot_plasma_outboard_toroidal_ripple_map(fig, mfile: mf.MFile, scan: int): fig.tight_layout() -def plot_plasma_effective_charge_profile(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_plasma_effective_charge_profile(axis: plt.Axes, mfile: MFile, scan: int): n_plasma_profile_elements = int(mfile.get("n_plasma_profile_elements", scan=scan)) n_charge_plasma_effective_vol_avg = mfile.get( @@ -12340,7 +12327,7 @@ def plot_plasma_effective_charge_profile(axis: plt.Axes, mfile: mf.MFile, scan: axis.legend() -def plot_ion_charge_profile(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_ion_charge_profile(axis: plt.Axes, mfile: MFile, scan: int): n_plasma_profile_elements = int(mfile.get("n_plasma_profile_elements", scan=scan)) # find impurity densities @@ -12409,7 +12396,7 @@ def plot_ion_charge_profile(axis: plt.Axes, mfile: mf.MFile, scan: int): axis.grid(which="both", linestyle="--", alpha=0.5) -def plot_ebw_ecrh_coupling_graph(axis: plt.Axes, mfile: mf.MFile, scan: int): +def plot_ebw_ecrh_coupling_graph(axis: plt.Axes, mfile: MFile, scan: int): # Plot EBW and ECRH coupling efficiency graph ebw = ElectronBernstein(plasma_profile=0) ecrg = ElectronCyclotron(plasma_profile=0) @@ -12493,7 +12480,7 @@ def plot_ebw_ecrh_coupling_graph(axis: plt.Axes, mfile: mf.MFile, scan: int): axis.minorticks_on() -def plot_debye_length_profile(axis: plt.Axes, mfile_data: mf.MFile, scan: int): +def plot_debye_length_profile(axis: plt.Axes, mfile_data: MFile, scan: int): """Plot the Debye length profile on the given axis. Parameters @@ -12735,14 +12722,14 @@ def plot_plasma_coloumb_logarithms(axis, mfile_data, scan): axis.legend() -def plot_equality_constraint_equations(axis: plt.Axes, m_file_data: mf.MFile, scan: int): +def plot_equality_constraint_equations(axis: plt.Axes, m_file_data: MFile, scan: int): """Plot the equality constraints for a solution and their normalised residuals Parameters ---------- axis: plt.Axes : - m_file_data: mf.MFile : + m_file_data: MFile : scan: int : @@ -12826,14 +12813,14 @@ def plot_equality_constraint_equations(axis: plt.Axes, m_file_data: mf.MFile, sc axis.legend() -def plot_inequality_constraint_equations(axis: plt.Axes, m_file: mf.MFile, scan: int): +def plot_inequality_constraint_equations(axis: plt.Axes, m_file: MFile, scan: int): """Plot the inequality constraints for a solution and where they lay within their bounds Parameters ---------- axis: plt.Axes : - m_file: mf.MFile : + m_file: MFile : scan: int : @@ -12999,7 +12986,7 @@ def plot_inequality_constraint_equations(axis: plt.Axes, m_file: mf.MFile, scan: def main_plot( figs: list[Axes], - m_file: mf.MFile, + m_file: MFile, scan: int, imp: str = "../data/lz_non_corona_14_elements/", demo_ranges: bool = False, @@ -13416,7 +13403,7 @@ def setup_plot( # run main_plot main_plot( pages, - mf.MFile(mfile) if mfile != "" else mf.MFile("MFILE.DAT"), + MFile(mfile) if mfile != "" else MFile("MFILE.DAT"), scan=scan or -1, demo_ranges=demo_ranges, colour_scheme=colour, diff --git a/process/core/io/tools.py b/process/core/io/tools.py index 200d7e471..c1b2745a1 100644 --- a/process/core/io/tools.py +++ b/process/core/io/tools.py @@ -39,6 +39,9 @@ def split_callback(ctx: click.Context, param, value: str | None) -> list[str] | return value.split(":") if isinstance(value, str) else value +scan_opt = click.option("--scan", type=int, help="Scan to select") + + ### Taken from click documentation class LazyGroup(click.Group): def __init__(self, *args, lazy_subcommands=None, **kwargs): diff --git a/process/main.py b/process/main.py index 43c190863..047b46e12 100644 --- a/process/main.py +++ b/process/main.py @@ -47,11 +47,10 @@ import click import process # noqa: F401 -import process.core.init as init -import process.data_structure as data_structure -from process.core import constants +from process import data_structure +from process.core import constants, init from process.core.io import obsolete_vars as ov -from process.core.io.mfile import mfile +from process.core.io.mfile import MFile from process.core.io.plot import plot_proc from process.core.io.plot.sankey import plot_sankey_plotly from process.core.io.process_config import RunProcessConfig @@ -215,9 +214,9 @@ def process_cli( if mfilejson: # Produce a json file containing mfile output, useful for VVUQ work. mfile_path = Path(mfile_path) - mfile_data = mfile.MFile(filename=mfile_path) + mfile_data = MFile(filename=mfile_path) mfile_data.open_mfile() - mfile_data.write_to_json() + mfile_data.to_json() if full_output: # Run all summary plotting scripts for the output diff --git a/tests/unit/test_mfile.py b/tests/unit/test_mfile.py index 006a75b4d..45a141873 100644 --- a/tests/unit/test_mfile.py +++ b/tests/unit/test_mfile.py @@ -1,6 +1,11 @@ +import json import shutil +import tempfile from pathlib import Path +import pytest + +from process.core.io.mfile import MFile from process.core.io.mfile_utils import get_mfile_initial_ixc_values @@ -24,3 +29,28 @@ def test_get_mfile_initial_ixc_values(input_file, tmp_path): # A default not provided in the MFile assert iteration_variable_names[-4] == "f_nd_alpha_electron" assert iteration_variable_values[-4] == 0.1 + + +@pytest.fixture(scope="module") +def read_mfile(): + """Read-in MFILE for testing. + + :return: parsed mfile + :rtype: mfile2dict.MFILEParser + """ + data_path = Path(__file__).parent / "data" + + return MFile(data_path / "large_tokamak_MFILE.DAT") + + +@pytest.fixture(scope="module") +def temporary_dir(): + return tempfile.mkdtemp() + + +def test_write_json(read_mfile, temporary_dir): + json_f = Path(temporary_dir, "2017_baseline.json") + read_mfile.to_json(json_f) + assert json_f.is_file() + with open(json_f) as file: + assert json.load(file) diff --git a/tests/unit/test_mfile2dict.py b/tests/unit/test_mfile2dict.py deleted file mode 100644 index 08a4ed3f1..000000000 --- a/tests/unit/test_mfile2dict.py +++ /dev/null @@ -1,61 +0,0 @@ -import json -import os -import pickle -import tempfile -from pathlib import Path - -import pytest -import yaml - -from process.core.io import mfile2dict - - -@pytest.fixture(scope="module") -def read_mfile(): - """Read-in MFILE for testing. - - :return: parsed mfile - :rtype: mfile2dict.MFILEParser - """ - data_path = Path(__file__).parent / "data" - mfile_path = data_path / "large_tokamak_MFILE.DAT" - mfile_path_str = str(Path(mfile_path).resolve()) - - return mfile2dict.MFILEParser(mfile_path_str) - - -@pytest.fixture(scope="module") -def temporary_dir(): - return tempfile.mkdtemp() - - -def test_parser_succeed(read_mfile): - assert read_mfile._mfile_data # noqa: SLF001 - - -def test_value_read(read_mfile): - assert isinstance(read_mfile.get_parameter_value("lsa"), int) - - -def test_write_json(read_mfile, temporary_dir): - json_f = os.path.join(temporary_dir, "2017_baseline.json") - read_mfile.write(json_f) - assert os.path.exists(json_f) - with open(json_f) as file: - assert json.load(file) - - -def test_write_yaml(read_mfile, temporary_dir): - pckl_f = os.path.join(temporary_dir, "2017_baseline.yml") - read_mfile.write(pckl_f) - assert os.path.exists(pckl_f) - with open(pckl_f) as file: - assert yaml.load(file, Loader=yaml.BaseLoader) - - -def test_write_pickle(read_mfile, temporary_dir): - pckl_f = os.path.join(temporary_dir, "2017_baseline.pckl") - read_mfile.write(pckl_f) - assert os.path.exists(pckl_f) - with open(pckl_f, "rb") as file: - assert pickle.load(file) From a34478ba00105b0f2d14698164838e01796d056c Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 14:03:51 +0000 Subject: [PATCH 13/17] import fixes --- process/core/io/plot/costs/cli.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/process/core/io/plot/costs/cli.py b/process/core/io/plot/costs/cli.py index 235454d2b..b0e9a4e7a 100644 --- a/process/core/io/plot/costs/cli.py +++ b/process/core/io/plot/costs/cli.py @@ -4,7 +4,7 @@ import process.core.io.mfile.mfile as mf from process.core.io.plot.costs.costs_bar import cost_comp_1990, cost_comp_2014 -from process.core.io.plot.costs.costs_pie import new_cost_model, orig_cost_model +from process.core.io.plot.costs.costs_pie import cost_model_1990, cost_model_2014 from process.core.io.tools import mfile_arg, save save = save("Save figure") @@ -25,9 +25,9 @@ def pie_plot(mfiles, save): # Check which cost model is being used if "c21" in m_file.data: - orig_cost_model(m_file, save) + cost_model_1990(m_file, save) elif "s01" in m_file.data: - new_cost_model(m_file, save) + cost_model_2014(m_file, save) else: print("ERROR: Cannot identify cost data, check MFILE!") From 8bf081567aa4c39a692b4d50e50231a8b7f05d9b Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 14:05:57 +0000 Subject: [PATCH 14/17] inits --- process/core/io/in_dat/__init__.py | 0 process/core/io/mfile/__init__.py | 3 +++ process/core/io/plot/__init__.py | 0 process/core/io/plot/costs/__init__.py | 4 ++++ process/core/io/plot/images/__init__.py | 0 5 files changed, 7 insertions(+) create mode 100644 process/core/io/in_dat/__init__.py create mode 100644 process/core/io/mfile/__init__.py create mode 100644 process/core/io/plot/__init__.py create mode 100644 process/core/io/plot/costs/__init__.py create mode 100644 process/core/io/plot/images/__init__.py diff --git a/process/core/io/in_dat/__init__.py b/process/core/io/in_dat/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/process/core/io/mfile/__init__.py b/process/core/io/mfile/__init__.py new file mode 100644 index 000000000..e18c7ef71 --- /dev/null +++ b/process/core/io/mfile/__init__.py @@ -0,0 +1,3 @@ +from process.core.io.mfile.mfile import MFile, MFileErrorClass + +__all__ = ["MFile", "MFileErrorClass"] diff --git a/process/core/io/plot/__init__.py b/process/core/io/plot/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/process/core/io/plot/costs/__init__.py b/process/core/io/plot/costs/__init__.py new file mode 100644 index 000000000..17a8b988a --- /dev/null +++ b/process/core/io/plot/costs/__init__.py @@ -0,0 +1,4 @@ +from process.core.io.plot.costs.costs_bar import cost_comp_1990, cost_comp_2014 +from process.core.io.plot.costs.costs_pie import cost_model_1990, cost_model_2014 + +__all__ = ["cost_comp_1990", "cost_comp_2014", "cost_model_1990", "cost_model_2014"] diff --git a/process/core/io/plot/images/__init__.py b/process/core/io/plot/images/__init__.py new file mode 100644 index 000000000..e69de29bb From e8424b4e435a912f4991d33697715bbfaa45cc26 Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 14:20:54 +0000 Subject: [PATCH 15/17] some fixes --- process/core/io/in_dat/__init__.py | 3 + process/core/io/mfile/utils.py | 43 ++++++++ tests/integration/test_write_new_in_dat.py | 11 +- tests/regression/test_process_input_files.py | 15 +-- tests/unit/test_main.py | 106 +------------------ tests/unit/test_mfile.py | 2 +- 6 files changed, 65 insertions(+), 115 deletions(-) create mode 100644 process/core/io/mfile/utils.py diff --git a/process/core/io/in_dat/__init__.py b/process/core/io/in_dat/__init__.py index e69de29bb..a330dcf66 100644 --- a/process/core/io/in_dat/__init__.py +++ b/process/core/io/in_dat/__init__.py @@ -0,0 +1,3 @@ +from process.core.io.in_dat.base import InDat + +__all__ = ["InDat"] diff --git a/process/core/io/mfile/utils.py b/process/core/io/mfile/utils.py new file mode 100644 index 000000000..d525221db --- /dev/null +++ b/process/core/io/mfile/utils.py @@ -0,0 +1,43 @@ +import re +from pathlib import Path + +from process import data_structure +from process.core.solver import iteration_variables +from process.main import SingleRun + + +def get_mfile_initial_ixc_values(file_path: Path): + """Initialise the input file and obtain the initial values of the iteration variables + + Parameters + ---------- + file_path : + The path to the MFile to get the initial iteration variable values from. + + Notes + ----- + This method initialises a SingleRun. At present, this involves mutating the global + data structure so it is not safe to run this method during a PROCESS run. + """ + SingleRun(file_path.as_posix()) + iteration_variables.load_iteration_variables() + + iteration_variable_names = [] + iteration_variable_values = [] + + for i in range(data_structure.numerics.nvar): + ivar = data_structure.numerics.ixc[i].item() + + itv = iteration_variables.ITERATION_VARIABLES[ivar] + + iteration_variable_names.append(itv.name) + if array := re.match(r"(\w+)\(([0-9]+)\)", itv.name): + var_name = array.group(1) + index = array.group(2) + iteration_variable_values.append( + getattr(itv.module, var_name)[int(index) - 1] + ) + else: + iteration_variable_values.append(getattr(itv.module, itv.name)) + + return iteration_variable_names, iteration_variable_values diff --git a/tests/integration/test_write_new_in_dat.py b/tests/integration/test_write_new_in_dat.py index 724e39805..e525d5f35 100644 --- a/tests/integration/test_write_new_in_dat.py +++ b/tests/integration/test_write_new_in_dat.py @@ -1,9 +1,10 @@ """Integration tests for write_new_in_dat.py.""" +from click.testing import CliRunner from pytest import approx -from process.core.io import write_new_in_dat from process.core.io.in_dat import InDat +from process.core.io.in_dat.cli import new_indat from process.core.io.mfile import MFile @@ -24,10 +25,12 @@ def test_write_new_in_dat(temp_data, mfile_name): fimp13_exp = mfile.data["f_nd_impurity_electrons(13)"].get_scan(-1) # Write new IN.DAT then inspect value in new input file - write_new_in_dat.main( - args=["-f", str(mfile_path), "-i", str(in_dat_path), "-o", str(new_in_dat_path)] + runner = CliRunner() + runner.invoke( + new_indat, + args=["-f", str(mfile_path), "-i", str(in_dat_path), "-o", str(new_in_dat_path)], ) - in_dat = InDat(str(new_in_dat_path)) + in_dat = InDat(new_in_dat_path) te_obs = in_dat.data["temp_plasma_electron_vol_avg_kev"].get_value fimp13_obs = in_dat.data["f_nd_impurity_electrons"].get_value[12] diff --git a/tests/regression/test_process_input_files.py b/tests/regression/test_process_input_files.py index 416375984..18007facd 100644 --- a/tests/regression/test_process_input_files.py +++ b/tests/regression/test_process_input_files.py @@ -13,10 +13,11 @@ from pathlib import Path import pytest +from click.testing import CliRunner from regression_test_assets import RegressionTestAssetCollector from process.core.io.mfile import MFile -from process.main import main +from process.main import process_cli logger = logging.getLogger(__name__) @@ -60,12 +61,14 @@ def run(self, solver: str): logger.info( f"Running regression test {self.scenario_name} using input file {self.input_file}" ) - try: - main(["--input", str(self.input_file), "--solver", solver]) - except Exception as e: + runner = CliRunner() + result = runner.invoke( + process_cli, ["--input", str(self.input_file), "--solver", solver] + ) + if result.exit_code != 0: raise RuntimeError( - f"\033[1;101m An error occured while running PROCESS: {e}\033[0m" - ) from e + f"\033[1;101m An error occured while running PROCESS: {result.exception}\033[0m" + ) def compare( self, reference_mfile_location: Path, tolerance: float, opt_params_only: bool diff --git a/tests/unit/test_main.py b/tests/unit/test_main.py index ecc04b4ed..6e0c2e8a1 100644 --- a/tests/unit/test_main.py +++ b/tests/unit/test_main.py @@ -1,32 +1,12 @@ """Unit tests for the main.py module.""" -import argparse import shutil from pathlib import Path import pytest -from process import data_structure, main -from process.main import Process, SingleRun, VaryRun - - -def test_main(monkeypatch): - """Check that main() can run. - - Call the main function without any arguments. - :param monkeypatch: monkeypatch fixture - :type monkeypatch: object - """ - # Mock initialisation of the Process object - monkeypatch.setattr(Process, "__init__", mock_init) - main.main(args=[]) - # If args is None, then the argparse parser uses sys.argv (i.e. the - # command-line args) instead. When running from pytest, these are some - # pytest-specific arguments that we don't want going into the Process - # argparser. Hence explicitly setting args=[] ensures that the Process - # argparser gets an empty list (i.e. no arguments). This way it is - # possible to test command-line arguments from the test suite, as if the - # arguments are supplied on the command-line. +from process import data_structure +from process.main import SingleRun, VaryRun def mock_init(*args, **kwargs): @@ -38,88 +18,6 @@ def mock_init(*args, **kwargs): return -def mock_run(*args, **kwargs): - pass - - -@pytest.fixture -def process_obj(monkeypatch): - """Fixture to create a Process object. - - Returns a Process object with a mocked empty __init__ method; create the - object, but don't run the real __init__. - - :param monkeypatch: monkeypatch fixture - :type monkeypatch: object - :return: Process object - :rtype: object - """ - monkeypatch.setattr(main.Process, "__init__", mock_init) - # Mock the __init__ method of the Process class with mock_init - # Return the mocked Process object - return Process() - - -def test_process(process_obj): - """Test that Process objects can be created. - - Check the process_obj fixture can make an object of type Process. - :param process_obj: Process object - :type process_obj: object - """ - assert type(process_obj) is Process - - -def test_parse_args(process_obj, input_file): - """Test Process.parse_args() method. - - Check the input file path argument is being stored on the Process object. - :param process_obj: Process object - :type process_obj: object - :param input_file: fixture for input file - :type input_file: str - """ - # Run parse args method and check file path is stored - process_obj.parse_args(args=["-i", input_file]) - assert process_obj.args.input == input_file - - -def test_run_mode(process_obj, monkeypatch): - """Test the Process.run_mode() method. - - Check that VaryRun and SingleRun can be created based on CLI args. - :param process_obj: Process fixture - :type process_obj: Process - :param monkeypatch: monkeypatch fixture - :type monkeypatch: object - """ - # Mock the args attributes for --varyiterparams and --varyiterparamsconfig - monkeypatch.setattr(process_obj, "args", argparse.Namespace(), raising=False) - monkeypatch.setattr(process_obj.args, "varyiterparams", True, raising=False) - monkeypatch.setattr(process_obj.args, "version", False, raising=False) - monkeypatch.setattr(process_obj.args, "update_obsolete", False, raising=False) - - monkeypatch.setattr( - process_obj.args, "varyiterparamsconfig", "file.conf", raising=False - ) - monkeypatch.setattr(process_obj.args, "solver", "vmcon", raising=False) - - # Mock VaryRun() (don't want it to actually run), then assert run type is - # VaryRun - monkeypatch.setattr(VaryRun, "__init__", mock_init) - monkeypatch.setattr(VaryRun, "run", mock_run) - process_obj.run_mode() - assert isinstance(process_obj.run, VaryRun) - - # Similarly, assert SingleRun when an input file arg is provided - monkeypatch.setattr(process_obj.args, "varyiterparams", False) - monkeypatch.setattr(process_obj.args, "input", "aFile", raising=False) - monkeypatch.setattr(SingleRun, "__init__", mock_init) - monkeypatch.setattr(SingleRun, "run", mock_run) - process_obj.run_mode() - assert isinstance(process_obj.run, SingleRun) - - @pytest.fixture def single_run(monkeypatch, input_file, tmp_path): """Fixture for a SingleRun object. diff --git a/tests/unit/test_mfile.py b/tests/unit/test_mfile.py index 45a141873..96ee28f78 100644 --- a/tests/unit/test_mfile.py +++ b/tests/unit/test_mfile.py @@ -6,7 +6,7 @@ import pytest from process.core.io.mfile import MFile -from process.core.io.mfile_utils import get_mfile_initial_ixc_values +from process.core.io.mfile.utils import get_mfile_initial_ixc_values def test_get_mfile_initial_ixc_values(input_file, tmp_path): From 8cd75a1a1712c4736ee7581cf00889d093477998 Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 14:32:00 +0000 Subject: [PATCH 16/17] fix unit tests --- process/core/io/mfile/mfile.py | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/process/core/io/mfile/mfile.py b/process/core/io/mfile/mfile.py index 7ad81ce18..5d9f7330e 100644 --- a/process/core/io/mfile/mfile.py +++ b/process/core/io/mfile/mfile.py @@ -25,6 +25,7 @@ import json import logging from collections import OrderedDict +from pathlib import Path from typing import Any import numpy as np @@ -380,7 +381,9 @@ def _get_data(item, dat_key): else output ) - def to_json(self, keys_to_write=None, scan: int | None = -1, verbose=False): + def to_json( + self, filename, keys_to_write=None, scan: int | None = -1, verbose=False + ): """Write MFILE object to JSON file Parameters @@ -392,13 +395,12 @@ def to_json(self, keys_to_write=None, scan: int | None = -1, verbose=False): verbose : verbosity of output """ - - filename = f"{self.filename}.json" - - with open(filename, "w") as fp: + with open(filename or f"{self.filename}.json", "w") as fp: json.dump(self.to_dict(keys_to_write, scan, verbose), fp, indent=4) - def to_toml(self, keys_to_write=None, scan: int | None = -1, verbose=False): + def to_toml( + self, filename, keys_to_write=None, scan: int | None = -1, verbose=False + ): """Write MFILE object to JSON file Parameters @@ -412,10 +414,16 @@ def to_toml(self, keys_to_write=None, scan: int | None = -1, verbose=False): """ import toml - with open(f"{self.filename}.toml", "w") as file: + with open(filename or f"{self.filename}.toml", "w") as file: toml.dump(self.to_dict(keys_to_write, scan, verbose), file) - def to_csv(self, keys_to_write=None, scan=-1, verbose=False): + def to_csv( + self, + filename: Path | None = None, + keys_to_write=None, + scan=-1, + verbose=False, + ): """Write to csv file. Parameters @@ -443,7 +451,7 @@ def to_csv(self, keys_to_write=None, scan=-1, verbose=False): for k, v in self.to_dict(keys_to_write, scan=scan, verbose=verbose).items(): output_data.append((v["description"], k, v["value"])) np.savetxt( - f"{self.filename}.csv", + filename or f"{self.filename}.csv", output_data or [], fmt="%.5e", delimiter=",", From ddbf2c14d0a5d2ae43a48653581a2aa0d9ba3c1a Mon Sep 17 00:00:00 2001 From: james <81617086+je-cook@users.noreply.github.com> Date: Fri, 27 Feb 2026 16:55:12 +0000 Subject: [PATCH 17/17] more fixes --- process/core/io/in_dat/__init__.py | 4 +-- process/core/io/mfile/mfile.py | 12 ++++++-- process/core/io/plot/cli.py | 20 ++++++------- process/core/io/plot/costs/costs_bar.py | 12 ++++---- process/core/io/plot/costs/costs_pie.py | 4 +-- process/core/io/plot/plot_proc.py | 19 +++++++------ process/core/io/tools.py | 2 +- process/main.py | 30 ++++++++++++-------- tests/conftest.py | 15 ++++++++++ tests/integration/test_costs_bar.py | 19 ------------- tests/integration/test_costs_pie.py | 19 ------------- tests/integration/test_main_int.py | 30 +++++++++++--------- tests/integration/test_mfile_to_csv.py | 20 ------------- tests/integration/test_plot_proc.py | 11 +++---- tests/integration/test_plot_sankey.py | 6 ++-- tests/integration/test_plot_scans.py | 26 ++++++++--------- tests/integration/test_plot_solutions.py | 2 +- tests/integration/test_plot_stress_tf.py | 6 ++-- tests/regression/test_process_input_files.py | 3 +- tests/unit/test_main.py | 13 +++++---- 20 files changed, 127 insertions(+), 146 deletions(-) delete mode 100644 tests/integration/test_costs_bar.py delete mode 100644 tests/integration/test_costs_pie.py delete mode 100644 tests/integration/test_mfile_to_csv.py diff --git a/process/core/io/in_dat/__init__.py b/process/core/io/in_dat/__init__.py index a330dcf66..51687131e 100644 --- a/process/core/io/in_dat/__init__.py +++ b/process/core/io/in_dat/__init__.py @@ -1,3 +1,3 @@ -from process.core.io.in_dat.base import InDat +from process.core.io.in_dat.base import InDat, INVariable -__all__ = ["InDat"] +__all__ = ["INVariable", "InDat"] diff --git a/process/core/io/mfile/mfile.py b/process/core/io/mfile/mfile.py index 5d9f7330e..2ccbc55f7 100644 --- a/process/core/io/mfile/mfile.py +++ b/process/core/io/mfile/mfile.py @@ -382,7 +382,11 @@ def _get_data(item, dat_key): ) def to_json( - self, filename, keys_to_write=None, scan: int | None = -1, verbose=False + self, + filename: Path | None = None, + keys_to_write=None, + scan: int | None = -1, + verbose=False, ): """Write MFILE object to JSON file @@ -399,7 +403,11 @@ def to_json( json.dump(self.to_dict(keys_to_write, scan, verbose), fp, indent=4) def to_toml( - self, filename, keys_to_write=None, scan: int | None = -1, verbose=False + self, + filename: Path | None = None, + keys_to_write=None, + scan: int | None = -1, + verbose=False, ): """Write MFILE object to JSON file diff --git a/process/core/io/plot/cli.py b/process/core/io/plot/cli.py index ed404a5b1..24df75c81 100644 --- a/process/core/io/plot/cli.py +++ b/process/core/io/plot/cli.py @@ -61,7 +61,7 @@ def sankey(mfile, format_): "-o", "--outputdir", default=Path.cwd(), - type=click.Path(), + type=click.Path(path_type=Path), help="Output directory for plots, defaults to current working directory.", ) @click.option( @@ -176,7 +176,7 @@ def sankey(mfile, format_): @click.option( "-2DC", "--two-dimensional-contour", - "twoD_contour", + "twod_contour", is_flag=True, help=( "Option to plot 2D scans as a coloured contour plot instead of a line plot \n " @@ -218,22 +218,22 @@ def plot_scans_cli( """Plot optimisation information""" return plot_scan( mfiles, - output_names, - output_names2, + list(filter(None, output_names)), + list(filter(None, output_names2)), outputdir, term_output, save_format, axis_font_size, axis_tick_size, x_axis_percent, - list(map(float, x_axis_max)), - list(map(float, x_axis_range)), + list(map(float, filter(None, x_axis_max))), + list(map(float, filter(None, x_axis_range))), y_axis_percent, y_axis_percent2, - list(map(float, y_axis_max)), - list(map(float, y_axis2_max)), - list(map(float, y_axis_range)), - list(map(float, y_axis_range2)), + list(map(float, filter(None, y_axis_max))), + list(map(float, filter(None, y_axis2_max))), + list(map(float, filter(None, y_axis_range))), + list(map(float, filter(None, y_axis_range2))), label_name, twod_contour, stack_plots, diff --git a/process/core/io/plot/costs/costs_bar.py b/process/core/io/plot/costs/costs_bar.py index ed7d44506..62118f2a6 100644 --- a/process/core/io/plot/costs/costs_bar.py +++ b/process/core/io/plot/costs/costs_bar.py @@ -32,7 +32,9 @@ def _format_fig(ax, fig, label, save, filename, index, inflate, ylabel_suffix, n plt.show() -def cost_comp_1990(mfile_list: list[mf.MFile], inflate: float, save: bool) -> None: +def cost_comp_1990( + mfile_list: list[mf.MFile], inflate: float = 1, save: bool = False +) -> None: """ Plot bar chart for the orginal 1990 cost model. Two plots produced: (1) Breakdown of the direct costs and (2) Direct, indirect, etc. @@ -103,7 +105,7 @@ def cost_comp_1990(mfile_list: list[mf.MFile], inflate: float, save: bool) -> No # Simplify grouping sizes = [ cost[2] + cost[5], - *itemgetter(0, 9, 3, 1, 7, 8, 6)(cost), + *itemgetter(0, 9, 3, 1, 7, 8, 10, 6)(cost), sum(itemgetter(4, 11, 12, 13)(cost)), ] @@ -114,15 +116,15 @@ def cost_comp_1990(mfile_list: list[mf.MFile], inflate: float, save: bool) -> No ax.bar(index + identity * bar_width, sizes, bar_width, label=item.filename) ax2.bar(index2 + identity * bar_width, sizes2, bar_width, label=item.filename) - for _ax, _fig, _lab, _save_name, _ind in zip( + for ax_, fig_, lab, save_name, ind in zip( [ax, ax2], [fig, fig2], labels, fnames, [index, index2], strict=True ): _format_fig( - _ax, _fig, _lab, save, _save_name, _ind, inflate, ylabel_suffix, n_mfiles + ax_, fig_, lab, save, save_name, ind, inflate, ylabel_suffix, n_mfiles ) -def cost_comp_2014(mfile_list: list[mf.MFile], inflate: float, save: bool): +def cost_comp_2014(mfile_list: list[mf.MFile], inflate: float = 1, save: bool = False): """Plot bar chart for the new 2014 cost model.""" variables = ( "s09", # Buildings diff --git a/process/core/io/plot/costs/costs_pie.py b/process/core/io/plot/costs/costs_pie.py index 43dcaef63..c5889bbb3 100644 --- a/process/core/io/plot/costs/costs_pie.py +++ b/process/core/io/plot/costs/costs_pie.py @@ -5,7 +5,7 @@ import matplotlib.pyplot as plt -def cost_model_1990(m_file, save): +def cost_model_1990(m_file, save: bool = False): """Plot pie chart for the orginal 1990 cost model. Two plots produced: (1) Breakdown of the direct costs and (2) Direct, indirect, etc. """ @@ -115,7 +115,7 @@ def cost_model_1990(m_file, save): plt.show() -def cost_model_2014(m_file, save): +def cost_model_2014(m_file, save: bool = False): """Plot pie chart for the new 2014 cost model.""" # Read Cost Values s09 = m_file.data["s09"].get_scan(-1) # Buildings diff --git a/process/core/io/plot/plot_proc.py b/process/core/io/plot/plot_proc.py index 1f260124c..83dc4c6bf 100644 --- a/process/core/io/plot/plot_proc.py +++ b/process/core/io/plot/plot_proc.py @@ -6,6 +6,7 @@ import textwrap from dataclasses import dataclass from importlib import resources +from pathlib import Path from typing import Any, Literal import matplotlib as mpl @@ -16,7 +17,7 @@ import numpy as np from matplotlib.axes import Axes from matplotlib.patches import Circle, Rectangle -from matplotlib.path import Path +from matplotlib.path import Path as mplPath from scipy.interpolate import interp1d import process.core.constants as constants @@ -3649,7 +3650,7 @@ def TF_outboard(axis: plt.Axes, item, n_tf_coils, r3, r4, w, facecolor): x4 = r3 * np.cos(ang) - dx y4 = r3 * np.sin(ang) + dy verts = [(x1, y1), (x2, y2), (x3, y3), (x4, y4), (x1, y1)] - path = Path(verts, closed=True) + path = mplPath(verts, closed=True) patch = patches.PathPatch(path, facecolor=facecolor, lw=0) axis.add_patch(patch) @@ -3704,7 +3705,7 @@ def arc_fill(axis: plt.Axes, r1, r2, color="pink"): verts.extend(list(zip(xs2, ys2, strict=False))) endpoint = [(r2, 0)] verts.extend(endpoint) - path = Path(verts, closed=True) + path = mplPath(verts, closed=True) patch = patches.PathPatch(path, facecolor=color, lw=0) axis.add_patch(patch) @@ -4600,7 +4601,7 @@ def plot_vacuum_vessel_and_divertor( upper = radial_build.upper lower = radial_build.lower - i_single_null = mfile.get("i_single_null", scan=scan) + i_single_null = int(mfile.get("i_single_null", scan=scan)) triang_95 = mfile.get("triang95", scan=scan) dz_divertor = mfile.get("dz_divertor", scan=scan) dz_xpoint_divertor = mfile.get("dz_xpoint_divertor", scan=scan) @@ -5409,7 +5410,7 @@ def plot_tf_coils(axis: plt.Axes, mfile: MFile, scan: int, colour_scheme: Litera ) for vert in verts: - path = Path(vert, closed=True) + path = mplPath(vert, closed=True) patch = patches.PathPatch(path, facecolor=colour, lw=0) axis.add_patch(patch) @@ -10242,7 +10243,9 @@ def plot_tf_stress(axis: plt.Axes, mfile: MFile): bound_vertical_strain = [] bound_radial_displacement = [] - with open(mfile.filename.replace("MFILE.DAT", "SIG_TF.json")) as f: + with open( + mfile.filename.with_name(mfile.filename.name.replace("MFILE.DAT", "SIG_TF.json")) + ) as f: sig_data = json.load(f) # Getting the data to be plotted @@ -13410,11 +13413,11 @@ def setup_plot( ) if output_format == "pdf": - with bpdf.PdfPages(mfile + "SUMMARY.pdf") as pdf: + with bpdf.PdfPages(mfile.with_name(mfile.name + "SUMMARY.pdf")) as pdf: for p in pages: pdf.savefig(p) elif output_format == "png": - folder = pathlib.Path(mfile.removesuffix(".DAT") + "_SUMMARY") + folder = pathlib.Path(mfile.with_name(mfile.stem + "_SUMMARY")) folder.mkdir(parents=True, exist_ok=True) for no, page in enumerate(pages): page.savefig(pathlib.Path(folder, f"page{no}.png"), format="png") diff --git a/process/core/io/tools.py b/process/core/io/tools.py index c1b2745a1..d2de97e27 100644 --- a/process/core/io/tools.py +++ b/process/core/io/tools.py @@ -36,7 +36,7 @@ def save(help_): def split_callback(ctx: click.Context, param, value: str | None) -> list[str] | None: # noqa: ARG001 - return value.split(":") if isinstance(value, str) else value + return value.replace(" ", ":").split(":") if isinstance(value, str) else value scan_opt = click.option("--scan", type=int, help="Scan to select") diff --git a/process/main.py b/process/main.py index 047b46e12..b9572d386 100644 --- a/process/main.py +++ b/process/main.py @@ -202,25 +202,25 @@ def process_cli( GitHub : https://github.com/ukaea/PROCESS """ if ctx.invoked_subcommand is None: - if indat is None: - raise click.BadParameter("IN.DAT not specified") if varyiterparams: runtype = VaryRun(config_file, solver) + mfile_path = runtype.config_file.parent / "MFILE.DAT" + elif indat is None: + raise click.BadParameter("IN.DAT not specified") else: runtype = SingleRun(indat, solver, update_obsolete=update_obsolete) + mfile_path = runtype.mfile_path runtype.run() if mfilejson: # Produce a json file containing mfile output, useful for VVUQ work. - mfile_path = Path(mfile_path) mfile_data = MFile(filename=mfile_path) mfile_data.open_mfile() mfile_data.to_json() if full_output: # Run all summary plotting scripts for the output - mfile_path = Path(mfile_path) if mfile_path.exists(): mfile_str = mfile_path.resolve().as_posix() print(f"Plotting mfile {mfile_str}") @@ -347,7 +347,12 @@ class SingleRun: """Perform a single run of PROCESS.""" def __init__( - self, input_file: str, solver: str = "vmcon", *, update_obsolete: bool = False + self, + input_file: Path | str, + solver: str = "vmcon", + *, + filepath_out: Path | str | None = None, + update_obsolete: bool = False, ): """Read input file and initialise variables. @@ -358,7 +363,8 @@ def __init__( solver: which solver to use, as specified in solver.py """ - self.input_file = input_file + self.input_file = Path(input_file) + self.filepath = Path(filepath_out or self.input_file.parent) self.validate_input(update_obsolete) self.init_module_vars() @@ -396,10 +402,10 @@ def set_input(self): """Validate and set the input file path.""" # Check input file ends in "IN.DAT", then save prefix # (the part before the IN.DAT) - if self.input_file[-6:] != "IN.DAT": + if not self.input_file.name.endswith("IN.DAT"): raise ValueError("Input filename must end in IN.DAT.") - self.filename_prefix = self.input_file[:-6] + self.filename_prefix = self.filepath / self.input_file.name[:-6] # Check input file exists (path specified as CLI argument) input_path = Path(self.input_file) @@ -415,19 +421,19 @@ def set_input(self): ) # Set the input file in the Fortran - data_structure.global_variables.fileprefix = str(self.input_path.resolve()) + data_structure.global_variables.fileprefix = self.input_path.resolve() def set_output(self): """Set the output file name. Set Path object on the Process object, and set the prefix in the Fortran. """ - self.output_path = Path(self.filename_prefix + "OUT.DAT") - data_structure.global_variables.output_prefix = self.filename_prefix + self.output_path = Path(self.filepath, self.filename_prefix.name + "OUT.DAT") + data_structure.global_variables.output_prefix = self.filename_prefix.as_posix() def set_mfile(self): """Set the mfile filename.""" - self.mfile_path = Path(self.filename_prefix + "MFILE.DAT") + self.mfile_path = Path(self.filepath, self.filename_prefix.name + "MFILE.DAT") def initialise(self): """Run the init module to call all initialisation routines.""" diff --git a/tests/conftest.py b/tests/conftest.py index af219229b..f6ce361a3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,11 +4,13 @@ """ import os +import traceback import warnings import matplotlib as mpl import pytest from _pytest.fixtures import SubRequest +from click.testing import CliRunner from system_check import system_compatible from process import main @@ -230,3 +232,16 @@ def _plot_show_and_close_class(request): plt.close() else: yield + + +@pytest.fixture() +def cli_runner(): + def _cli_runner(command, args: list[str] | None = None, exit_code=0): + result = CliRunner().invoke(command, args=args or []) + + assert result.exit_code == 0, ( + f"{result.exception} {''.join(traceback.format_exception(result.exc_info[1]))}" + ) + return result + + return _cli_runner diff --git a/tests/integration/test_costs_bar.py b/tests/integration/test_costs_bar.py deleted file mode 100644 index b01ff67b1..000000000 --- a/tests/integration/test_costs_bar.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Integration tests for costs_bar.py.""" - -from process.core.io import costs_bar - - -def test_input_file(temp_data_cwd, mfile_name): - """Run costs_bar on an input MFILE and check for a pdf output. - - :param temp_data: temporary data dir, which is also the cwd - :type temp_data: Path - :param mfile_name: name of the mfile in the data dir - :type mfile_name: str - """ - mfile = temp_data_cwd / mfile_name - mfile_str = str(mfile) - costs_bar.main(args=["-f", mfile_str, "-s"]) - - # Assert a pdf has been created - assert len(list(temp_data_cwd.glob("*.pdf"))) diff --git a/tests/integration/test_costs_pie.py b/tests/integration/test_costs_pie.py deleted file mode 100644 index 5f3ab05f7..000000000 --- a/tests/integration/test_costs_pie.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Integration tests for costs_pie.py.""" - -from process.core.io import costs_pie - - -def test_input_file(temp_data_cwd, mfile_name): - """Run costs_pie on an input MFILE and check for a pdf output. - - :param temp_data: temporary data dir, which is also the cwd - :type temp_data: Path - :param mfile_name: name of the mfile in the data dir - :type mfile_name: str - """ - mfile = temp_data_cwd / mfile_name - mfile_str = str(mfile) - costs_pie.main(args=["-f", mfile_str, "-s"]) - - # Assert a pdf has been created - assert len(list(temp_data_cwd.glob("*.pdf"))) diff --git a/tests/integration/test_main_int.py b/tests/integration/test_main_int.py index 191413530..a82fbd4c2 100644 --- a/tests/integration/test_main_int.py +++ b/tests/integration/test_main_int.py @@ -3,10 +3,10 @@ import json from shutil import copy -from process import main +from process.main import process_cli -def test_single_run(temp_data): +def test_single_run(temp_data, cli_runner): """Test a SingleRun Process run with CLI args. This will just check that an exception isn't thrown. @@ -18,10 +18,10 @@ def test_single_run(temp_data): input_file = str(input_path.resolve()) # Run a SingleRun with an explicitly defined IN.DAT - main.main(args=["-i", input_file]) + cli_runner(process_cli, args=["-i", input_file]) -def test_single_run_cwd(temp_data_cwd): +def test_single_run_cwd(temp_data_cwd, cli_runner): """SingleRun without defining an input file. Try running without a defined input file (no args). This will look for @@ -32,10 +32,11 @@ def test_single_run_cwd(temp_data_cwd): # Copy input file to make a file named "IN.DAT" copy(temp_data_cwd / "large_tokamak_IN.DAT", temp_data_cwd / "IN.DAT") # Run: args must be emptylist; if None, argparse tries to use CLI args - main.main(args=[]) + result = cli_runner(process_cli, args=["-i", "IN.DAT"]) + assert result.exit_code == 0 -def test_vary_run(temp_data): +def test_vary_run(temp_data, cli_runner): """Test a VaryRun with CLI args. :param temp_data: temporary dir containing data files @@ -45,22 +46,23 @@ def test_vary_run(temp_data): # Chosen because it's the only VaryRun in the test suite, and is fast conf_path = temp_data / "run_process.conf" conf_file = str(conf_path.resolve()) - # Run a VaryRun with an explicit conf file name - main.main(args=["--varyiterparams", "--varyiterparamsconfig", conf_file]) + cli_runner( + process_cli, args=["--varyiterparams", "--varyiterparamsconfig", conf_file] + ) -def test_vary_run_cwd(temp_data_cwd): +def test_vary_run_cwd(temp_data_cwd, cli_runner): """Test VaryRun without explicitly defining the conf file name. This will look for a run_process.conf in the cwd. :param temp_data_cwd: temporary data dir, which is also the cwd :type temp_data_cwd: Path """ - main.main(args=["--varyiterparams"]) + cli_runner(process_cli, args=["--varyiterparams"]) -def test_plot_proc(temp_data, mfile_name): +def test_plot_proc(temp_data, mfile_name, cli_runner): """Run plot proc via CLI. Currently, Process needs to run on an input file, then it can run plot_proc @@ -75,13 +77,13 @@ def test_plot_proc(temp_data, mfile_name): input_file_str = str(input_file.resolve()) # Run on input, then plot custom mfile name - main.main(args=["-i", input_file_str, "--full-output"]) + cli_runner(process_cli, args=["-i", input_file_str, "--full-output"]) # Assert a pdf has been created assert len(list(temp_data.glob("*.pdf"))) -def test_single_run_with_mfilejson(temp_data): +def test_single_run_with_mfilejson(temp_data, cli_runner): """Test a SingleRun Process run with CLI args including --mfilejson. This will check that the process runs without throwing an exception @@ -98,7 +100,7 @@ def test_single_run_with_mfilejson(temp_data): mfile = str(mfile_path.resolve()) # Run a SingleRun with the --mfilejson flag. - main.main(args=["-i", input_file, "--mfilejson", "-m", mfile]) + cli_runner(process_cli, args=["-i", input_file, "--mfilejson", "-m", mfile]) # Assert that 'large_tokamak_eval.MFILE.DAT.json' has been produced in the temp_data directory. expected_json = temp_data / "large_tokamak_eval.MFILE.DAT.json" diff --git a/tests/integration/test_mfile_to_csv.py b/tests/integration/test_mfile_to_csv.py deleted file mode 100644 index 9c14ddef4..000000000 --- a/tests/integration/test_mfile_to_csv.py +++ /dev/null @@ -1,20 +0,0 @@ -"""Integration tests for mfile_to_csv.py.""" - -from process.core.io import mfile_to_csv - - -def test_mfile_to_csv(temp_data, mfile_name): - """Run mfile_to_csv via CLI on an MFILE and check for a CSV output. - - varlist.txt defines which variables to extract to CSV. - :param temp_data: temporary data dir - :type temp_data: Path - :param mfile_name: name of the mfile in the data dir - :type mfile_name: str - """ - mfile = temp_data / mfile_name - varlist = temp_data / "mfile_to_csv_varlist.json" - mfile_to_csv.main(args=["-f", str(mfile), "-v", str(varlist)]) - - # Assert a .csv has been produced - assert len(list(temp_data.glob("*.csv"))) diff --git a/tests/integration/test_plot_proc.py b/tests/integration/test_plot_proc.py index ce2af3b3a..a406ca0fe 100644 --- a/tests/integration/test_plot_proc.py +++ b/tests/integration/test_plot_proc.py @@ -2,10 +2,10 @@ from shutil import copy -from process.core.io import plot_proc +from process.core.io.plot.cli import plot_proc -def test_input_file(temp_data, mfile_name): +def test_input_file(temp_data, mfile_name, cli_runner): """Run plot_proc on an input MFILE and check for an output. :param temp_data: temporary data dir @@ -15,13 +15,14 @@ def test_input_file(temp_data, mfile_name): """ mfile = temp_data / mfile_name mfile_str = str(mfile) - plot_proc.main(args=["-f", mfile_str]) + mfile_str = str(mfile) + cli_runner(plot_proc, args=["-f", mfile_str]) # Assert a pdf has been created assert len(list(temp_data.glob("*.pdf"))) -def test_input_file_cwd(temp_data_cwd, mfile_name): +def test_input_file_cwd(temp_data_cwd, mfile_name, cli_runner): """Run plot_proc on an MFILE in the cwd. :param temp_data_cwd: temporary data dir, which is also the cwd @@ -33,7 +34,7 @@ def test_input_file_cwd(temp_data_cwd, mfile_name): copy(temp_data_cwd / mfile_name, temp_data_cwd / "MFILE.DAT") # Run plot_proc with no args, which will look for the default-named mfile - plot_proc.main(args=[]) + cli_runner(plot_proc, args=["-f", (temp_data_cwd / "MFILE.DAT").as_posix()]) # Assert a pdf has been created assert len(list(temp_data_cwd.glob("*.pdf"))) diff --git a/tests/integration/test_plot_sankey.py b/tests/integration/test_plot_sankey.py index 251b91d8d..032162185 100644 --- a/tests/integration/test_plot_sankey.py +++ b/tests/integration/test_plot_sankey.py @@ -1,9 +1,9 @@ """Integration tests for plot_sankey.py.""" -from process.core.io import plot_sankey +from process.core.io.plot.cli import sankey -def test_plot_sankey(temp_data_cwd, mfile_name): +def test_plot_sankey(temp_data_cwd, mfile_name, cli_runner): """Assert plot_sankey can make a pdf in the cwd from an mfile. :param temp_data_cwd: temp path to data dir, which is also the cwd @@ -13,7 +13,7 @@ def test_plot_sankey(temp_data_cwd, mfile_name): """ mfile_path = temp_data_cwd / mfile_name mfile_path_str = str(mfile_path) - plot_sankey.main(args=["-m", mfile_path_str]) + cli_runner(sankey, args=["-f", mfile_path_str]) # Assert a pdf has been created assert len(list(temp_data_cwd.glob("*.pdf"))) > 0 diff --git a/tests/integration/test_plot_scans.py b/tests/integration/test_plot_scans.py index 8d9a9c099..757152799 100644 --- a/tests/integration/test_plot_scans.py +++ b/tests/integration/test_plot_scans.py @@ -1,9 +1,9 @@ """Integration tests for plot_scans.py.""" -from process.core.io import plot_scans +from process.core.io.plot.cli import plot_scans_cli -def test_plot_scans(temp_data, scan_mfile_name): +def test_plot_scans(temp_data, scan_mfile_name, cli_runner): """Run plot_scans script on a scan MFILE.DAT and check for a PDF output. :param temp_data: temporary data dir @@ -13,21 +13,21 @@ def test_plot_scans(temp_data, scan_mfile_name): """ mfile = temp_data / scan_mfile_name - plot_scans.main( + cli_runner( + plot_scans_cli, args=[ - "-f", str(mfile), "-yv", "p_plant_electric_net_mw", "--outputdir", str(temp_data), - ] + ], ) assert len(list(temp_data.glob("*.pdf"))) -def test_plot_scans_stack(temp_data, scan_mfile_name): +def test_plot_scans_stack(temp_data, scan_mfile_name, cli_runner): """Run plot_scans script with stacked plots switch on a scan MFILE.DAT and check for a PDF output. :param temp_data: temporary data dir @@ -37,22 +37,22 @@ def test_plot_scans_stack(temp_data, scan_mfile_name): """ mfile = temp_data / scan_mfile_name - plot_scans.main( + cli_runner( + plot_scans_cli, args=[ - "-f", str(mfile), "-yv", "p_plant_electric_net_mw b_plasma_toroidal_on_axis rmajor", "-stc", "--outputdir", str(temp_data), - ] + ], ) assert len(list(temp_data.glob("*.pdf"))) -def test_plot_scans_2d_contour(temp_data, scan_2d_mfile_name): +def test_plot_scans_2d_contour(temp_data, scan_2d_mfile_name, cli_runner): """Run plot_scans script with 2D contour plot switch on a scan MFILE.DAT and check for a PDF output. :param temp_data: temporary data dir @@ -62,16 +62,16 @@ def test_plot_scans_2d_contour(temp_data, scan_2d_mfile_name): """ mfile = temp_data / scan_2d_mfile_name - plot_scans.main( + cli_runner( + plot_scans_cli, args=[ - "-f", str(mfile), "-yv", "beta_total_vol_avg", "-2DC", "--outputdir", str(temp_data), - ] + ], ) assert len(list(temp_data.glob("*.pdf"))) diff --git a/tests/integration/test_plot_solutions.py b/tests/integration/test_plot_solutions.py index 1a5ab97c3..eaa69ca20 100644 --- a/tests/integration/test_plot_solutions.py +++ b/tests/integration/test_plot_solutions.py @@ -4,7 +4,7 @@ import pytest -from process.core.io.plot_solutions import RunMetadata, plot_mfile_solutions +from process.core.io.plot.plot_solutions import RunMetadata, plot_mfile_solutions @pytest.fixture diff --git a/tests/integration/test_plot_stress_tf.py b/tests/integration/test_plot_stress_tf.py index eeb174488..7a6d0848f 100644 --- a/tests/integration/test_plot_stress_tf.py +++ b/tests/integration/test_plot_stress_tf.py @@ -1,9 +1,9 @@ """Integration tests for plot_stress_tf.py.""" -from process.core.io import plot_stress_tf +from process.core.io.plot.cli import plot_tf_stress -def test_input_file(temp_data_cwd): +def test_input_file(temp_data_cwd, cli_runner): """Run plot_stress_tf on an input MFILE and check for a pdf output. :param temp_data: temporary data dir, which is also the cwd @@ -11,7 +11,7 @@ def test_input_file(temp_data_cwd): """ mfile = temp_data_cwd / "SIG_TF.json" mfile_str = str(mfile) - plot_stress_tf.main(args=["-f", mfile_str]) + cli_runner(plot_tf_stress, args=["-f", mfile_str]) # Assert a pdf has been created assert len(list(temp_data_cwd.glob("*.pdf"))) diff --git a/tests/regression/test_process_input_files.py b/tests/regression/test_process_input_files.py index 18007facd..0faf48b47 100644 --- a/tests/regression/test_process_input_files.py +++ b/tests/regression/test_process_input_files.py @@ -9,6 +9,7 @@ import logging import re import shutil +import traceback from dataclasses import dataclass from pathlib import Path @@ -67,7 +68,7 @@ def run(self, solver: str): ) if result.exit_code != 0: raise RuntimeError( - f"\033[1;101m An error occured while running PROCESS: {result.exception}\033[0m" + f"An error occured while running PROCESS: {result.exception}{''.join(traceback.format_exception(result.exc_info[1]))}" ) def compare( diff --git a/tests/unit/test_main.py b/tests/unit/test_main.py index 6e0c2e8a1..9407b59a4 100644 --- a/tests/unit/test_main.py +++ b/tests/unit/test_main.py @@ -32,7 +32,8 @@ def single_run(monkeypatch, input_file, tmp_path): temp_input_file = shutil.copy(input_file, tmp_path / Path(input_file).name) - single_run.input_file = str(temp_input_file) + single_run.input_file = temp_input_file + single_run.filepath = tmp_path single_run.models = None single_run.set_filenames() single_run.initialise() @@ -58,10 +59,10 @@ def test_set_input(single_run, monkeypatch, input_file): :param input_file: fixture for input file :type input_file: str """ - expected = input_file + expected = Path(input_file) # Mock the input file path to isolate this test from the other Process # methods (don't have to run Process.parse_args() first to set up this way) - monkeypatch.setattr(single_run, "input_file", input_file, raising=False) + monkeypatch.setattr(single_run, "input_file", Path(input_file), raising=False) # Mocking undo trys to set the value as none @@ -82,7 +83,7 @@ def test_set_output(single_run, monkeypatch): # Expected output prefix expected = "output_prefix" # Mock self.filename_prefix on single_run with the value of expected - monkeypatch.setattr(single_run, "filename_prefix", expected, raising=False) + monkeypatch.setattr(single_run, "filename_prefix", Path(expected), raising=False) # Mocking undo trys to set the value as none # monkeypatch.setattr(data_structure.global_variables, "output_prefix", None) @@ -114,9 +115,9 @@ def test_set_mfile(single_run, monkeypatch): prefix = "test" expected = Path(prefix + "MFILE.DAT") # Mock filename_prefix and run - monkeypatch.setattr(single_run, "filename_prefix", prefix, raising=False) + monkeypatch.setattr(single_run, "filename_prefix", Path(prefix), raising=False) single_run.set_mfile() - assert single_run.mfile_path == expected + assert single_run.mfile_path.name == expected.name def test_finish(single_run, monkeypatch):