Skip to content

Spectral fitting

spectral_fitting

FitSpectra

Class used for spectral fitting of multiple spectra with same model. This has been written for fitting spectra of galaxy clusters with/without background.

Source code in src/xifu_cluster_sim/spectral_fitting.py
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
class FitSpectra():
    """
    Class used for spectral fitting of multiple spectra with same model.
    This has been written for fitting spectra of galaxy clusters with/without background.
    """
    def __init__(self,
                model_file,
                binning,
                background=False,
                E_min = 0.2,
                E_max = 12.,
                NXB_requirement = 8e-3,
                astro_bkg_model = None, 
                cosmic_bkg_model = None,
                xifu_config = XIFU_Config()):
        """
        Initialize class

        Parameters:
            model_file (str): Path to .xcm file containg the model to use (with abundances, cosmo and cross-section)
            binning (binning): Binning instance (to iterate over spectra in each bin)
            E_min (float): Minimum energy of spectra to take into account
            E_max (float): Maximum energy of spectra to take into account
            NXB_requirement (float): Expected counts of NXB in counts/cm2/s/keV, as a requirement within X-IFU
            astro_bkg_model (str): Path to AXB model
            cosmic_bkg_model (str): Path to CXB model
            xifu_config (XIFU_Config): X-IFU Configuration instance
        """

        self.model_file = model_file
        self.binning = binning
        self.E_min = E_min
        self.E_max = E_max
        self.background = background
        self.NXB_requirement = NXB_requirement
        self.astro_bkg_model = astro_bkg_model
        self.cosmic_bkg_model = cosmic_bkg_model
        self.xifu_pixel_surface_cm2 = (xifu_config.pixel_size_m*100)**2
        self.xifu_pixel_size_arcmin = (xifu_config.pixsize_degree).to(units.arcmin).value

    def fit_spectrum(self,
                    pha_file,
                    arf_file,
                    pix_in_region,
                    log_fitting = True,
                    chatter = 10,
                    do_shakefit = True):
        """
        Fit single spectrum

        Parameters:
            pha_file (str): Path to spectrum as a .pha file
            arf_file (str): Path to arf used for spectrum
            pix_in_region (int): Number of pixels for which the spectrum has been extracted
            log_fitting (bool): Whether to log the fitting in a .log file
            chatter (int): Chatter level of xspec (0 = shut up)
            do_shakefit (bool): Whether to use the shakefit after fitting

        Returns:
            pars_dict (dict): Dictionary of best-fit values for free parameters of each model
        """

        # Set chatter and clear everything
        xs.Xset.chatter = chatter
        xs.AllChains.clear()
        xs.AllData.clear()
        xs.AllModels.clear()

        # Load model
        #xs.Xset.restore(self.model_file) #Doesnt work for this
        #m = xs.AllModels(1)
        m = load_model(self.model_file)

        # Setup logging
        logfile = pha_file.replace('.pha','.log')
        if log_fitting :
            xs.Xset.logChatter = 10
            xs.Xset.openLog(logfile)

        # Load spectrum
        s = xs.Spectrum(pha_file)
        s.response.arf = arf_file

        # Option to run the background
        if self.background: 

            ## NXB 

            #Value in cts/s/keV for a given pixel times the number of pixels
            NXB_level = self.NXB_requirement * self.xifu_pixel_surface_cm2 * pix_in_region
            s.multiresponse[1]=s.multiresponse[0].rmf
            m2=xs.Model("pow", modName="nxb", sourceNum=2, setPars={1:0,2:NXB_level})
            m2(1).frozen=True #constant

            ## AXB
            if self.astro_bkg_model is not None:
                s.multiresponse[2]=s.multiresponse[0].rmf
                s.multiresponse[2].arf=s.multiresponse[0].arf

                axb_model, axb_pars, axb_frozen = load_model(self.astro_bkg_model, pars_only=True)
                m3=xs.Model(axb_model, modName="axb", sourceNum=3, setPars=axb_pars)

                # Freeze parameters
                for j in axb_frozen: 
                    m3(j).frozen=True 

                # Multiply the norm by the angular size of the region
                region_size_arcmin = self.xifu_pixel_size_arcmin**2 * pix_in_region
                m3.setPars({4:m3(4).values[0]*region_size_arcmin})
                m3.setPars({9:m3(9).values[0]*region_size_arcmin})

            ## CXB
            if self.cosmic_bkg_model is not None:
                s.multiresponse[3]=s.multiresponse[0].rmf
                s.multiresponse[3].arf=s.multiresponse[0].arf

                cxb_model, cxb_pars, cxb_frozen=load_model(self.cosmic_bkg_model, pars_only=True)
                m4=xs.Model(cxb_model, modName="cxb", sourceNum=4, setPars=cxb_pars)

                # Freeze parameters
                for j in cxb_frozen: 
                    m4(j).frozen=True

                # Multiply the norm by the angular size of the region
                region_size_arcmin = self.xifu_pixel_size_arcmin**2 * pix_in_region
                m4.setPars({3:m4(3).values[0]*region_size_arcmin})

        # Fitting
        xs.AllData.ignore('0.0-%.1f' % self.E_min)
        xs.AllData.ignore('%.1f-**' % self.E_max)

        xs.AllData.ignore('bad')
        xs.Fit.statMethod = "cstat"

        xs.Fit.nIterations = 1000   
        xs.Fit.query = 'yes'

        xs.Fit.perform()
        xs.Fit.perform()
        xs.Fit.perform()

        # Call shakefit to avoid local minima in case error computation has been requested
        if do_shakefit:
            shakefit(1,m)

        # Chi2
        chi2 = xs.Fit.statistic

        # Get fit results as a dictionary
        m.show()

        pars_dict = {}
        for k in range(1,1+m.nParameters):
            if k == 1:
                pars_dict[m(k).name] = m(k).values[0]
            elif k == 6 :
                pars_dict[m(k).name] = m(k).values[0] / pix_in_region
                if do_shakefit:
                    pars_dict[m(k).name + '_lo'] = m(k).error[0] / pix_in_region
                    pars_dict[m(k).name + '_hi'] = m(k).error[1] / pix_in_region
            else :
                pars_dict[m(k).name] = m(k).values[0]
                if do_shakefit:
                    pars_dict[m(k).name + '_lo'] = m(k).error[0]
                    pars_dict[m(k).name + '_hi'] = m(k).error[1]
        pars_dict['chi2'] = chi2

        if self.background: 
            pars_dict['nxb_norm'] = m2(2).values[0] / pix_in_region / self.xifu_pixel_surface_cm2

            if self.astro_bkg_model is not None:
                pars_dict['axb_norm1'] = m3(4).values[0] / pix_in_region / self.xifu_pixel_size_arcmin**2
                pars_dict['axb_norm2'] = m3(9).values[0] / pix_in_region / self.xifu_pixel_size_arcmin**2

            if self.cosmic_bkg_model is not None:
                pars_dict['cxb_norm'] = m4(3).values[0] / pix_in_region / self.xifu_pixel_size_arcmin**2

        # End logging
        if log_fitting :
            xs.Xset.closeLog()  
        return pars_dict

    def fit_all_spectra(self,
                        spectra_path,
                        numproc = 1,
                        log_fitting = False,
                        chatter = 0,
                        save_fit_results = False,
                        do_shakefit = True):

        """
        Launch fitting over spectra from all bins of binning, in parallel

        Parameters:
            spectra_path (str): Path to spectra (assumes spectra/arfs have been saved as spec_k.pha / spec_k.arf for bin number k)
            numproc (int): Number of parallel processes
            log_fitting (bool): Whether to log the fitting
            chatter (int): Chatter level of xspec (0 = shut up)
            save_fit_results (bool): Whether to save the results of the fit as a .csv file
            do_shakefit (bool): Whether to use shakefit after fitting

        Returns:
            df_results (pandas.Dataframe): Pandas dataframe containing parameters in columns, and best fit values for each bin in rows
        """
        n_bins = self.binning.nb_bins
        args_list = zip([spectra_path + 'spec_{}.pha'.format(k) for k in range(n_bins)],
                        [spectra_path + 'spec_{}.arf'.format(k) for k in range(n_bins)],
                        [len(self.binning.binning_dict[k][0]) for k in range(n_bins)])

        results = Parallel(n_jobs=numproc, 
                           backend="loky")(
                                delayed(self.fit_spectrum)(pha_file, 
                                                           arf_file,
                                                           nb_pix_in_bin,
                                                           log_fitting = log_fitting,
                                                           chatter = chatter,
                                                           do_shakefit = do_shakefit) for pha_file, arf_file, nb_pix_in_bin in tqdm_notebook(args_list, 
                                                                                                                               total = n_bins, 
                                                                                                                               desc = 'Fitting spectra'
                                                                                                                               )
                                )
        self.best_fit_values = results

        # Convert to pandas dataframe
        rows = [
                    {k: (v[0] if isinstance(v, (list, tuple)) else v) for k, v in d.items()}
                    for d in results
                ]

        df_results = pd.DataFrame(rows)

        # Save fit
        if save_fit_results :
            df_results.to_csv(spectra_path + 'fit_all_spectra_res.csv')

        return df_results


    def make_bestfit_maps(self,
                          maps_path,
                          cluster_redshift = None,
                          save_maps = False):
        """
        Make maps of best-fit parameters. 
        If saved, they are saved as 'output_maps.npz'

        Parameters:
            maps_path (str): Path to the directory to save the maps
            cluster_redshift (float): Redshift of the cluster (used for the conversion of best fit redshift to bulk motion in km/s)
            save_maps (bool): Whether to save the maps

        Returns:
            best_fit_norm (np.array): Map of best-fit norm
            best_fit_kT (np.array): Map of best-fit temperature
            best_fit_Z (np.array): Map of best-fit abundance
            best_fit_z (np.array): Map of best-fit redshift
            best_fit_v (np.array): Map of best-fit velocity as converted from redshift, from which cluster redshift is substracted
            best_fit_broad (np.array): Map of best-fit velcoty broadening
        """

        best_fit_norm = np.zeros(self.binning.shape)
        best_fit_kT = np.zeros(self.binning.shape)
        best_fit_Z = np.zeros(self.binning.shape)
        best_fit_z = np.zeros(self.binning.shape)
        best_fit_v = np.zeros(self.binning.shape)
        best_fit_broad = np.zeros(self.binning.shape)

        norm_best_fits = np.array([bestfit['norm'] for bestfit in self.best_fit_values])
        kT_best_fits = np.array([bestfit['kT'] for bestfit in self.best_fit_values])
        Z_best_fits = np.array([bestfit['Abundanc'] for bestfit in self.best_fit_values])
        z_best_fits = np.array([bestfit['Redshift'] for bestfit in self.best_fit_values])
        broad_best_fits = np.array([bestfit['Velocity'] for bestfit in self.best_fit_values])

        best_fit_norm[self.binning.X_pixels,
                      self.binning.Y_pixels] = norm_best_fits[self.binning.bin_num_pix]
        best_fit_kT[self.binning.X_pixels,
                    self.binning.Y_pixels] = kT_best_fits[self.binning.bin_num_pix]
        best_fit_Z[self.binning.X_pixels,
                   self.binning.Y_pixels] = Z_best_fits[self.binning.bin_num_pix]
        best_fit_z[self.binning.X_pixels,
                   self.binning.Y_pixels] = z_best_fits[self.binning.bin_num_pix]
        best_fit_broad[self.binning.X_pixels,
                       self.binning.Y_pixels] = broad_best_fits[self.binning.bin_num_pix]

        if cluster_redshift == None :
            raise Exception("Need to provide redshift of cluster")
        else :
            num = (1+best_fit_z)**2 - (1+cluster_redshift)**2
            denom = (1+best_fit_z)**2 + (1+cluster_redshift)**2
            best_fit_v = const.c.to(units.km/units.s).value * num/denom

        nb_pixels_per_bin = np.array(
            [len(self.binning.binning_dict[k][0]
                                    ) for k in range(self.binning.nb_bins)])

        best_fit_norm[self.binning.X_pixels,
                      self.binning.Y_pixels] /= nb_pixels_per_bin[self.binning.bin_num_pix]

        if save_maps:
            np.savez_compressed(maps_path + 'output_maps.npz', 
                                output_norm = best_fit_norm,
                                output_kT = best_fit_kT,
                                output_Z = best_fit_Z,
                                output_z = best_fit_z,
                                output_v = best_fit_v,
                                output_broad = best_fit_broad)

        return best_fit_norm,best_fit_kT,best_fit_Z,best_fit_z,best_fit_v,best_fit_broad

__init__(model_file, binning, background=False, E_min=0.2, E_max=12.0, NXB_requirement=0.008, astro_bkg_model=None, cosmic_bkg_model=None, xifu_config=XIFU_Config())

Initialize class

Parameters:

Name Type Description Default
model_file str

Path to .xcm file containg the model to use (with abundances, cosmo and cross-section)

required
binning binning

Binning instance (to iterate over spectra in each bin)

required
E_min float

Minimum energy of spectra to take into account

0.2
E_max float

Maximum energy of spectra to take into account

12.0
NXB_requirement float

Expected counts of NXB in counts/cm2/s/keV, as a requirement within X-IFU

0.008
astro_bkg_model str

Path to AXB model

None
cosmic_bkg_model str

Path to CXB model

None
xifu_config XIFU_Config

X-IFU Configuration instance

XIFU_Config()
Source code in src/xifu_cluster_sim/spectral_fitting.py
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
def __init__(self,
            model_file,
            binning,
            background=False,
            E_min = 0.2,
            E_max = 12.,
            NXB_requirement = 8e-3,
            astro_bkg_model = None, 
            cosmic_bkg_model = None,
            xifu_config = XIFU_Config()):
    """
    Initialize class

    Parameters:
        model_file (str): Path to .xcm file containg the model to use (with abundances, cosmo and cross-section)
        binning (binning): Binning instance (to iterate over spectra in each bin)
        E_min (float): Minimum energy of spectra to take into account
        E_max (float): Maximum energy of spectra to take into account
        NXB_requirement (float): Expected counts of NXB in counts/cm2/s/keV, as a requirement within X-IFU
        astro_bkg_model (str): Path to AXB model
        cosmic_bkg_model (str): Path to CXB model
        xifu_config (XIFU_Config): X-IFU Configuration instance
    """

    self.model_file = model_file
    self.binning = binning
    self.E_min = E_min
    self.E_max = E_max
    self.background = background
    self.NXB_requirement = NXB_requirement
    self.astro_bkg_model = astro_bkg_model
    self.cosmic_bkg_model = cosmic_bkg_model
    self.xifu_pixel_surface_cm2 = (xifu_config.pixel_size_m*100)**2
    self.xifu_pixel_size_arcmin = (xifu_config.pixsize_degree).to(units.arcmin).value

fit_all_spectra(spectra_path, numproc=1, log_fitting=False, chatter=0, save_fit_results=False, do_shakefit=True)

Launch fitting over spectra from all bins of binning, in parallel

Parameters:

Name Type Description Default
spectra_path str

Path to spectra (assumes spectra/arfs have been saved as spec_k.pha / spec_k.arf for bin number k)

required
numproc int

Number of parallel processes

1
log_fitting bool

Whether to log the fitting

False
chatter int

Chatter level of xspec (0 = shut up)

0
save_fit_results bool

Whether to save the results of the fit as a .csv file

False
do_shakefit bool

Whether to use shakefit after fitting

True

Returns:

Name Type Description
df_results Dataframe

Pandas dataframe containing parameters in columns, and best fit values for each bin in rows

Source code in src/xifu_cluster_sim/spectral_fitting.py
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
def fit_all_spectra(self,
                    spectra_path,
                    numproc = 1,
                    log_fitting = False,
                    chatter = 0,
                    save_fit_results = False,
                    do_shakefit = True):

    """
    Launch fitting over spectra from all bins of binning, in parallel

    Parameters:
        spectra_path (str): Path to spectra (assumes spectra/arfs have been saved as spec_k.pha / spec_k.arf for bin number k)
        numproc (int): Number of parallel processes
        log_fitting (bool): Whether to log the fitting
        chatter (int): Chatter level of xspec (0 = shut up)
        save_fit_results (bool): Whether to save the results of the fit as a .csv file
        do_shakefit (bool): Whether to use shakefit after fitting

    Returns:
        df_results (pandas.Dataframe): Pandas dataframe containing parameters in columns, and best fit values for each bin in rows
    """
    n_bins = self.binning.nb_bins
    args_list = zip([spectra_path + 'spec_{}.pha'.format(k) for k in range(n_bins)],
                    [spectra_path + 'spec_{}.arf'.format(k) for k in range(n_bins)],
                    [len(self.binning.binning_dict[k][0]) for k in range(n_bins)])

    results = Parallel(n_jobs=numproc, 
                       backend="loky")(
                            delayed(self.fit_spectrum)(pha_file, 
                                                       arf_file,
                                                       nb_pix_in_bin,
                                                       log_fitting = log_fitting,
                                                       chatter = chatter,
                                                       do_shakefit = do_shakefit) for pha_file, arf_file, nb_pix_in_bin in tqdm_notebook(args_list, 
                                                                                                                           total = n_bins, 
                                                                                                                           desc = 'Fitting spectra'
                                                                                                                           )
                            )
    self.best_fit_values = results

    # Convert to pandas dataframe
    rows = [
                {k: (v[0] if isinstance(v, (list, tuple)) else v) for k, v in d.items()}
                for d in results
            ]

    df_results = pd.DataFrame(rows)

    # Save fit
    if save_fit_results :
        df_results.to_csv(spectra_path + 'fit_all_spectra_res.csv')

    return df_results

fit_spectrum(pha_file, arf_file, pix_in_region, log_fitting=True, chatter=10, do_shakefit=True)

Fit single spectrum

Parameters:

Name Type Description Default
pha_file str

Path to spectrum as a .pha file

required
arf_file str

Path to arf used for spectrum

required
pix_in_region int

Number of pixels for which the spectrum has been extracted

required
log_fitting bool

Whether to log the fitting in a .log file

True
chatter int

Chatter level of xspec (0 = shut up)

10
do_shakefit bool

Whether to use the shakefit after fitting

True

Returns:

Name Type Description
pars_dict dict

Dictionary of best-fit values for free parameters of each model

Source code in src/xifu_cluster_sim/spectral_fitting.py
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
def fit_spectrum(self,
                pha_file,
                arf_file,
                pix_in_region,
                log_fitting = True,
                chatter = 10,
                do_shakefit = True):
    """
    Fit single spectrum

    Parameters:
        pha_file (str): Path to spectrum as a .pha file
        arf_file (str): Path to arf used for spectrum
        pix_in_region (int): Number of pixels for which the spectrum has been extracted
        log_fitting (bool): Whether to log the fitting in a .log file
        chatter (int): Chatter level of xspec (0 = shut up)
        do_shakefit (bool): Whether to use the shakefit after fitting

    Returns:
        pars_dict (dict): Dictionary of best-fit values for free parameters of each model
    """

    # Set chatter and clear everything
    xs.Xset.chatter = chatter
    xs.AllChains.clear()
    xs.AllData.clear()
    xs.AllModels.clear()

    # Load model
    #xs.Xset.restore(self.model_file) #Doesnt work for this
    #m = xs.AllModels(1)
    m = load_model(self.model_file)

    # Setup logging
    logfile = pha_file.replace('.pha','.log')
    if log_fitting :
        xs.Xset.logChatter = 10
        xs.Xset.openLog(logfile)

    # Load spectrum
    s = xs.Spectrum(pha_file)
    s.response.arf = arf_file

    # Option to run the background
    if self.background: 

        ## NXB 

        #Value in cts/s/keV for a given pixel times the number of pixels
        NXB_level = self.NXB_requirement * self.xifu_pixel_surface_cm2 * pix_in_region
        s.multiresponse[1]=s.multiresponse[0].rmf
        m2=xs.Model("pow", modName="nxb", sourceNum=2, setPars={1:0,2:NXB_level})
        m2(1).frozen=True #constant

        ## AXB
        if self.astro_bkg_model is not None:
            s.multiresponse[2]=s.multiresponse[0].rmf
            s.multiresponse[2].arf=s.multiresponse[0].arf

            axb_model, axb_pars, axb_frozen = load_model(self.astro_bkg_model, pars_only=True)
            m3=xs.Model(axb_model, modName="axb", sourceNum=3, setPars=axb_pars)

            # Freeze parameters
            for j in axb_frozen: 
                m3(j).frozen=True 

            # Multiply the norm by the angular size of the region
            region_size_arcmin = self.xifu_pixel_size_arcmin**2 * pix_in_region
            m3.setPars({4:m3(4).values[0]*region_size_arcmin})
            m3.setPars({9:m3(9).values[0]*region_size_arcmin})

        ## CXB
        if self.cosmic_bkg_model is not None:
            s.multiresponse[3]=s.multiresponse[0].rmf
            s.multiresponse[3].arf=s.multiresponse[0].arf

            cxb_model, cxb_pars, cxb_frozen=load_model(self.cosmic_bkg_model, pars_only=True)
            m4=xs.Model(cxb_model, modName="cxb", sourceNum=4, setPars=cxb_pars)

            # Freeze parameters
            for j in cxb_frozen: 
                m4(j).frozen=True

            # Multiply the norm by the angular size of the region
            region_size_arcmin = self.xifu_pixel_size_arcmin**2 * pix_in_region
            m4.setPars({3:m4(3).values[0]*region_size_arcmin})

    # Fitting
    xs.AllData.ignore('0.0-%.1f' % self.E_min)
    xs.AllData.ignore('%.1f-**' % self.E_max)

    xs.AllData.ignore('bad')
    xs.Fit.statMethod = "cstat"

    xs.Fit.nIterations = 1000   
    xs.Fit.query = 'yes'

    xs.Fit.perform()
    xs.Fit.perform()
    xs.Fit.perform()

    # Call shakefit to avoid local minima in case error computation has been requested
    if do_shakefit:
        shakefit(1,m)

    # Chi2
    chi2 = xs.Fit.statistic

    # Get fit results as a dictionary
    m.show()

    pars_dict = {}
    for k in range(1,1+m.nParameters):
        if k == 1:
            pars_dict[m(k).name] = m(k).values[0]
        elif k == 6 :
            pars_dict[m(k).name] = m(k).values[0] / pix_in_region
            if do_shakefit:
                pars_dict[m(k).name + '_lo'] = m(k).error[0] / pix_in_region
                pars_dict[m(k).name + '_hi'] = m(k).error[1] / pix_in_region
        else :
            pars_dict[m(k).name] = m(k).values[0]
            if do_shakefit:
                pars_dict[m(k).name + '_lo'] = m(k).error[0]
                pars_dict[m(k).name + '_hi'] = m(k).error[1]
    pars_dict['chi2'] = chi2

    if self.background: 
        pars_dict['nxb_norm'] = m2(2).values[0] / pix_in_region / self.xifu_pixel_surface_cm2

        if self.astro_bkg_model is not None:
            pars_dict['axb_norm1'] = m3(4).values[0] / pix_in_region / self.xifu_pixel_size_arcmin**2
            pars_dict['axb_norm2'] = m3(9).values[0] / pix_in_region / self.xifu_pixel_size_arcmin**2

        if self.cosmic_bkg_model is not None:
            pars_dict['cxb_norm'] = m4(3).values[0] / pix_in_region / self.xifu_pixel_size_arcmin**2

    # End logging
    if log_fitting :
        xs.Xset.closeLog()  
    return pars_dict

make_bestfit_maps(maps_path, cluster_redshift=None, save_maps=False)

Make maps of best-fit parameters. If saved, they are saved as 'output_maps.npz'

Parameters:

Name Type Description Default
maps_path str

Path to the directory to save the maps

required
cluster_redshift float

Redshift of the cluster (used for the conversion of best fit redshift to bulk motion in km/s)

None
save_maps bool

Whether to save the maps

False

Returns:

Name Type Description
best_fit_norm array

Map of best-fit norm

best_fit_kT array

Map of best-fit temperature

best_fit_Z array

Map of best-fit abundance

best_fit_z array

Map of best-fit redshift

best_fit_v array

Map of best-fit velocity as converted from redshift, from which cluster redshift is substracted

best_fit_broad array

Map of best-fit velcoty broadening

Source code in src/xifu_cluster_sim/spectral_fitting.py
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
def make_bestfit_maps(self,
                      maps_path,
                      cluster_redshift = None,
                      save_maps = False):
    """
    Make maps of best-fit parameters. 
    If saved, they are saved as 'output_maps.npz'

    Parameters:
        maps_path (str): Path to the directory to save the maps
        cluster_redshift (float): Redshift of the cluster (used for the conversion of best fit redshift to bulk motion in km/s)
        save_maps (bool): Whether to save the maps

    Returns:
        best_fit_norm (np.array): Map of best-fit norm
        best_fit_kT (np.array): Map of best-fit temperature
        best_fit_Z (np.array): Map of best-fit abundance
        best_fit_z (np.array): Map of best-fit redshift
        best_fit_v (np.array): Map of best-fit velocity as converted from redshift, from which cluster redshift is substracted
        best_fit_broad (np.array): Map of best-fit velcoty broadening
    """

    best_fit_norm = np.zeros(self.binning.shape)
    best_fit_kT = np.zeros(self.binning.shape)
    best_fit_Z = np.zeros(self.binning.shape)
    best_fit_z = np.zeros(self.binning.shape)
    best_fit_v = np.zeros(self.binning.shape)
    best_fit_broad = np.zeros(self.binning.shape)

    norm_best_fits = np.array([bestfit['norm'] for bestfit in self.best_fit_values])
    kT_best_fits = np.array([bestfit['kT'] for bestfit in self.best_fit_values])
    Z_best_fits = np.array([bestfit['Abundanc'] for bestfit in self.best_fit_values])
    z_best_fits = np.array([bestfit['Redshift'] for bestfit in self.best_fit_values])
    broad_best_fits = np.array([bestfit['Velocity'] for bestfit in self.best_fit_values])

    best_fit_norm[self.binning.X_pixels,
                  self.binning.Y_pixels] = norm_best_fits[self.binning.bin_num_pix]
    best_fit_kT[self.binning.X_pixels,
                self.binning.Y_pixels] = kT_best_fits[self.binning.bin_num_pix]
    best_fit_Z[self.binning.X_pixels,
               self.binning.Y_pixels] = Z_best_fits[self.binning.bin_num_pix]
    best_fit_z[self.binning.X_pixels,
               self.binning.Y_pixels] = z_best_fits[self.binning.bin_num_pix]
    best_fit_broad[self.binning.X_pixels,
                   self.binning.Y_pixels] = broad_best_fits[self.binning.bin_num_pix]

    if cluster_redshift == None :
        raise Exception("Need to provide redshift of cluster")
    else :
        num = (1+best_fit_z)**2 - (1+cluster_redshift)**2
        denom = (1+best_fit_z)**2 + (1+cluster_redshift)**2
        best_fit_v = const.c.to(units.km/units.s).value * num/denom

    nb_pixels_per_bin = np.array(
        [len(self.binning.binning_dict[k][0]
                                ) for k in range(self.binning.nb_bins)])

    best_fit_norm[self.binning.X_pixels,
                  self.binning.Y_pixels] /= nb_pixels_per_bin[self.binning.bin_num_pix]

    if save_maps:
        np.savez_compressed(maps_path + 'output_maps.npz', 
                            output_norm = best_fit_norm,
                            output_kT = best_fit_kT,
                            output_Z = best_fit_Z,
                            output_z = best_fit_z,
                            output_v = best_fit_v,
                            output_broad = best_fit_broad)

    return best_fit_norm,best_fit_kT,best_fit_Z,best_fit_z,best_fit_v,best_fit_broad

load_model(infile, pars_only=False)

Convenience function for loading an xspec model from a .xcm file

Parameters:

Name Type Description Default
infile str

Path to .xcm file

required
pars_only bool

Option to return paramaters only

False

Returns:

Name Type Description
m Model

Loaded XSPEC Model

Source code in src/xifu_cluster_sim/spectral_fitting.py
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
def load_model(infile, pars_only=False):
    '''
    Convenience function for loading an xspec model from a .xcm file

    Parameters:
        infile (str): Path to .xcm file
        pars_only (bool): Option to return paramaters only

    Returns:
        m (xspec.Model): Loaded XSPEC Model
    '''
    #On recupere le nom du modele
    model_file = open(infile,'r')
    line = model_file.readline()
    while 'model' not in line:
        if 'cosmo' in line:
            line = line.replace('\n','')
            line = line.replace('cosmo ','')
            xs.Xset.cosmo = ' '.join(line.split(','))
            line = model_file.readline()
        if 'statistic' in line:
            line = model_file.readline()
            continue
        if 'method' in line:
            line = model_file.readline()
            continue
        if 'delta' in line:
            line = model_file.readline()
            continue
        if 'systematic' in line:
            line = model_file.readline()
            continue
        if 'abund' in line:
            line = line.replace('\n','')
            line = line.replace('abund ','')
            xs.Xset.abund = ' '.join(line.split(','))
            line = model_file.readline()
        if 'xsect' in line:
            line = line.replace('\n','')
            line = line.replace('xsect ','')
            xs.Xset.xsect = ' '.join(line.split(','))
            line = model_file.readline()

    line = line.replace('\n','')
    line = line.replace('model','')

    if not pars_only:
        m = xs.Model(line)
        nb_para = m.nParameters
        #On assigne les parametres
        i=1
        line = model_file.readline()
        while (line and i<=nb_para):
            line = line.replace('\n','')
            param_list = line.split()
            param_list = list(map(float,param_list))
            m(i).values =  param_list
            line=model_file.readline()
            i+=1

        return m

    else:
        mod_name=line
        Pars=[]
        Frozen=[]
        i=1
        line = model_file.readline()
        while (line):
            line = line.replace('\n','')
            param_list = line.split()
            param_list = list(map(float,param_list))
            Pars.append(param_list[0])
            if param_list[1]<0:
                Frozen.append(i)
            line=model_file.readline()
            i+=1

        return mod_name, Pars, Frozen

shakefit(dchi, model)

Function to "shake" a fit. Can be used to get it out of local minima. Function that was introduced by Peille, Cucchetti during their work. Ideally, should not be used. The fit should either be instanciated to a true known minimum or a Bayesian approach should be used (e.g. MCMC, BXA, or others).

Source code in src/xifu_cluster_sim/spectral_fitting.py
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
def shakefit(dchi,model):
    """
    Function to "shake" a fit. Can be used to get it out of local minima.
    Function that was introduced by Peille, Cucchetti during their work.
    Ideally, should not be used. The fit should either be instanciated to a true known minimum 
    or a Bayesian approach should be used (e.g. MCMC, BXA, or others).

    """

    nopar = model.nParameters
    xs.Fit.nIterations=100
    pattern = re.compile(r'F........')
    pattern2 = re.compile(r'.T.......')

    for j in range(nopar):
        if (model(j+1).frozen):
            continue 
        doerror=1
        delchi=dchi
        while (doerror == 1) :
            error_command = 'stopat 100 0.01 max 20.0 %.2f %d' % (delchi,j+1) #changed 20 to 50 (Edo)
            #print error_command
            xs.Fit.error(error_command)
            error_out = model(j+1).error[2]
            if (pattern.match(error_out)):
                doerror=0
            else:
                xs.Fit.perform()

            if (pattern2.match(error_out)):
                delchi*=2