From 913f66fd8a8dd11ae15b49c9797f3da2e97d59bd Mon Sep 17 00:00:00 2001 From: Lukas Beiske Date: Thu, 26 Sep 2024 15:01:16 +0200 Subject: [PATCH] Fix some code smells --- src/ctapipe/irf/tests/test_benchmarks.py | 20 ++++++++++---------- src/ctapipe/irf/tests/test_binning.py | 7 ++++--- src/ctapipe/irf/tests/test_irfs.py | 22 +++++++++++----------- src/ctapipe/irf/tests/test_select.py | 4 ++-- 4 files changed, 27 insertions(+), 26 deletions(-) diff --git a/src/ctapipe/irf/tests/test_benchmarks.py b/src/ctapipe/irf/tests/test_benchmarks.py index 012f920d9fb..511a4015645 100644 --- a/src/ctapipe/irf/tests/test_benchmarks.py +++ b/src/ctapipe/irf/tests/test_benchmarks.py @@ -10,14 +10,14 @@ def test_make_2d_energy_bias_res(irf_events_table): from ctapipe.irf import EnergyBiasResolution2dMaker - biasResMkr = EnergyBiasResolution2dMaker( + bias_res_maker = EnergyBiasResolution2dMaker( fov_offset_n_bins=3, fov_offset_max=3 * u.deg, true_energy_n_bins_per_decade=7, true_energy_max=155 * u.TeV, ) - bias_res_hdu = biasResMkr.make_bias_resolution_hdu(events=irf_events_table) + bias_res_hdu = bias_res_maker.make_bias_resolution_hdu(events=irf_events_table) # min 7 bins per decade between 0.015 TeV and 155 TeV -> 7 * 4 + 1 = 29 bins assert ( bias_res_hdu.data["N_EVENTS"].shape @@ -35,7 +35,7 @@ def test_make_2d_energy_bias_res(irf_events_table): def test_make_2d_ang_res(irf_events_table): from ctapipe.irf import AngularResolution2dMaker - angResMkr = AngularResolution2dMaker( + ang_res_maker = AngularResolution2dMaker( fov_offset_n_bins=3, fov_offset_max=3 * u.deg, true_energy_n_bins_per_decade=7, @@ -44,7 +44,7 @@ def test_make_2d_ang_res(irf_events_table): reco_energy_min=0.03 * u.TeV, ) - ang_res_hdu = angResMkr.make_angular_resolution_hdu(events=irf_events_table) + ang_res_hdu = ang_res_maker.make_angular_resolution_hdu(events=irf_events_table) assert ( ang_res_hdu.data["N_EVENTS"].shape == ang_res_hdu.data["ANGULAR_RESOLUTION"].shape @@ -56,8 +56,8 @@ def test_make_2d_ang_res(irf_events_table): hi_vals=[3 * u.deg, 150 * u.TeV], ) - angResMkr.use_true_energy = True - ang_res_hdu = angResMkr.make_angular_resolution_hdu(events=irf_events_table) + ang_res_maker.use_true_energy = True + ang_res_hdu = ang_res_maker.make_angular_resolution_hdu(events=irf_events_table) assert ( ang_res_hdu.data["N_EVENTS"].shape == ang_res_hdu.data["ANGULAR_RESOLUTION"].shape @@ -97,7 +97,7 @@ def test_make_2d_sensitivity( obs_time=u.Quantity(50, u.h), ) - sensMkr = Sensitivity2dMaker( + sens_maker = Sensitivity2dMaker( fov_offset_n_bins=3, fov_offset_max=3 * u.deg, reco_energy_n_bins_per_decade=7, @@ -107,11 +107,11 @@ def test_make_2d_sensitivity( # needs a theta cut atm. theta_cuts = QTable() theta_cuts["center"] = 0.5 * ( - sensMkr.reco_energy_bins[:-1] + sensMkr.reco_energy_bins[1:] + sens_maker.reco_energy_bins[:-1] + sens_maker.reco_energy_bins[1:] ) - theta_cuts["cut"] = sensMkr.fov_offset_max + theta_cuts["cut"] = sens_maker.fov_offset_max - sens_hdu = sensMkr.make_sensitivity_hdu( + sens_hdu = sens_maker.make_sensitivity_hdu( signal_events=gamma_events, background_events=proton_events, theta_cut=theta_cuts, diff --git a/src/ctapipe/irf/tests/test_binning.py b/src/ctapipe/irf/tests/test_binning.py index e1040f15058..4925f8855ca 100644 --- a/src/ctapipe/irf/tests/test_binning.py +++ b/src/ctapipe/irf/tests/test_binning.py @@ -9,6 +9,7 @@ def test_check_bins_in_range(tmp_path): from ctapipe.irf import ResultValidRange, check_bins_in_range valid_range = ResultValidRange(min=0.03 * u.TeV, max=200 * u.TeV) + errormessage = "Valid range for result is 0.03 to 200., got" # bins are in range bins = u.Quantity(np.logspace(-1, 2, 10), u.TeV) @@ -16,17 +17,17 @@ def test_check_bins_in_range(tmp_path): # bins are too small bins = u.Quantity(np.logspace(-2, 2, 10), u.TeV) - with pytest.raises(ValueError, match="Valid range for"): + with pytest.raises(ValueError, match=errormessage): check_bins_in_range(bins, valid_range) # bins are too big bins = u.Quantity(np.logspace(-1, 3, 10), u.TeV) - with pytest.raises(ValueError, match="Valid range for"): + with pytest.raises(ValueError, match=errormessage): check_bins_in_range(bins, valid_range) # bins are too big and too small bins = u.Quantity(np.logspace(-2, 3, 10), u.TeV) - with pytest.raises(ValueError, match="Valid range for"): + with pytest.raises(ValueError, match=errormessage): check_bins_in_range(bins, valid_range) logger = logging.getLogger("ctapipe.irf.binning") diff --git a/src/ctapipe/irf/tests/test_irfs.py b/src/ctapipe/irf/tests/test_irfs.py index db6179f98e1..8da1f2dac2e 100644 --- a/src/ctapipe/irf/tests/test_irfs.py +++ b/src/ctapipe/irf/tests/test_irfs.py @@ -22,14 +22,14 @@ def _check_boundaries_in_hdu( def test_make_2d_bkg(irf_events_table): from ctapipe.irf import BackgroundRate2dMaker - bkgMkr = BackgroundRate2dMaker( + bkg_maker = BackgroundRate2dMaker( fov_offset_n_bins=3, fov_offset_max=3 * u.deg, reco_energy_n_bins_per_decade=7, reco_energy_max=155 * u.TeV, ) - bkg_hdu = bkgMkr.make_bkg_hdu(events=irf_events_table, obs_time=1 * u.s) + bkg_hdu = bkg_maker.make_bkg_hdu(events=irf_events_table, obs_time=1 * u.s) # min 7 bins per decade between 0.015 TeV and 155 TeV -> 7 * 4 + 1 = 29 bins assert bkg_hdu.data["BKG"].shape == (1, 3, 29) @@ -41,7 +41,7 @@ def test_make_2d_bkg(irf_events_table): def test_make_2d_energy_migration(irf_events_table): from ctapipe.irf import EnergyDispersion2dMaker - migMkr = EnergyDispersion2dMaker( + edisp_maker = EnergyDispersion2dMaker( fov_offset_n_bins=3, fov_offset_max=3 * u.deg, true_energy_n_bins_per_decade=7, @@ -50,12 +50,12 @@ def test_make_2d_energy_migration(irf_events_table): energy_migration_min=0.1, energy_migration_max=10, ) - mig_hdu = migMkr.make_edisp_hdu(events=irf_events_table, point_like=False) + edisp_hdu = edisp_maker.make_edisp_hdu(events=irf_events_table, point_like=False) # min 7 bins per decade between 0.015 TeV and 155 TeV -> 7 * 4 + 1 = 29 bins - assert mig_hdu.data["MATRIX"].shape == (1, 3, 20, 29) + assert edisp_hdu.data["MATRIX"].shape == (1, 3, 20, 29) _check_boundaries_in_hdu( - mig_hdu, + edisp_hdu, lo_vals=[0 * u.deg, 0.015 * u.TeV, 0.1], hi_vals=[3 * u.deg, 155 * u.TeV, 10], colnames=["THETA", "ENERG", "MIGRA"], @@ -65,7 +65,7 @@ def test_make_2d_energy_migration(irf_events_table): def test_make_2d_eff_area(irf_events_table): from ctapipe.irf import EffectiveArea2dMaker - effAreaMkr = EffectiveArea2dMaker( + eff_area_maker = EffectiveArea2dMaker( fov_offset_n_bins=3, fov_offset_max=3 * u.deg, true_energy_n_bins_per_decade=7, @@ -80,7 +80,7 @@ def test_make_2d_eff_area(irf_events_table): viewcone_min=0 * u.deg, viewcone_max=10 * u.deg, ) - eff_area_hdu = effAreaMkr.make_aeff_hdu( + eff_area_hdu = eff_area_maker.make_aeff_hdu( events=irf_events_table, point_like=False, signal_is_point_like=False, @@ -96,7 +96,7 @@ def test_make_2d_eff_area(irf_events_table): ) # point like data -> only 1 fov offset bin - eff_area_hdu = effAreaMkr.make_aeff_hdu( + eff_area_hdu = eff_area_maker.make_aeff_hdu( events=irf_events_table, point_like=False, signal_is_point_like=True, @@ -108,7 +108,7 @@ def test_make_2d_eff_area(irf_events_table): def test_make_3d_psf(irf_events_table): from ctapipe.irf import Psf3dMaker - psfMkr = Psf3dMaker( + psf_maker = Psf3dMaker( fov_offset_n_bins=3, fov_offset_max=3 * u.deg, true_energy_n_bins_per_decade=7, @@ -116,7 +116,7 @@ def test_make_3d_psf(irf_events_table): source_offset_n_bins=110, source_offset_max=2 * u.deg, ) - psf_hdu = psfMkr.make_psf_hdu(events=irf_events_table) + psf_hdu = psf_maker.make_psf_hdu(events=irf_events_table) # min 7 bins per decade between 0.015 TeV and 155 TeV -> 7 * 4 + 1 = 29 bins assert psf_hdu.data["RPSF"].shape == (1, 110, 3, 29) diff --git a/src/ctapipe/irf/tests/test_select.py b/src/ctapipe/irf/tests/test_select.py index 963dd2c64ff..87aa35195a7 100644 --- a/src/ctapipe/irf/tests/test_select.py +++ b/src/ctapipe/irf/tests/test_select.py @@ -48,14 +48,14 @@ def test_normalise_column_names(dummy_table): for c in needed_cols: assert c in norm_table.colnames - # error if reco_{alt,az} is missing because of no-standard name + # error if reco_{alt,az} is missing because of non-standard name with pytest.raises(ValueError, match="No column corresponding"): epp = EventPreProcessor( energy_reconstructor="dummy", geometry_reconstructor="geom", gammaness_classifier="classifier", ) - norm_table = epp.normalise_column_names(dummy_table) + _ = epp.normalise_column_names(dummy_table) def test_events_loader(gamma_diffuse_full_reco_file, irf_events_loader_test_config):