@@ -579,13 +579,33 @@ def reduce(self, spin_set_reflect, **reduction_options):
579579 # reducers["dd"].reflected_beam.m_spec_sd
580580 # reducers["dd"].direct_beam.m_spec
581581 # reducers["dd"].direct_beam.m_spec_sd
582-
582+ # THIS IS WHERE THE MAGIC HAPPENS
583583
584584 # once the wavelength spectra have been corrected/overwritten then the
585- # reflectivities need to be recalculated
586-
587- # now write out the corrected reflectivity files
588-
585+ # reflectivities need to be recalculated.
586+ # this doesn't correct the offspecular
587+ for reducer in reducers :
588+ reducer .y , reducer .y_err = EP .EPdiv (
589+ reducer .reflected_beam .m_spec ,
590+ reducer .reflected_beam .m_spec_sd ,
591+ reducer .direct_beam .m_spec ,
592+ reducer .direct_beam .m_spec_sd ,
593+ )
594+ # now write out the corrected reflectivity files
595+ fnames = []
596+ datasets = []
597+ datafilename = reducer .reflected_beam .datafilename
598+ datafilename = os .path .basename (datafilename .split (".nx.hdf" )[0 ])
599+
600+ for i in range (np .size (reducer .y , 0 )):
601+ data_tup = reducer .data (scanpoint = i )
602+ datasets .append (ReflectDataset (data_tup ))
603+
604+ for i , dataset in enumerate (datasets ):
605+ fname = f"{ datafilename } _{ i } .dat"
606+ fnames .append (fname )
607+ with open (fname , "wb" ) as f :
608+ dataset .save (f )
589609
590610
591611class SpatzReduce (ReflectReduce ):
0 commit comments