diff --git a/src/sparkx/Histogram.py b/src/sparkx/Histogram.py index 600b9dc7..2d3ed76a 100755 --- a/src/sparkx/Histogram.py +++ b/src/sparkx/Histogram.py @@ -565,6 +565,9 @@ def average_weighted(self, weights): variance = np.average( (self.histograms_ - average) ** 2.0, axis=0, weights=weights ) + # Ensure the result is a 2D array + if average.ndim == 1: + average = average.reshape(1, -1) self.histograms_ = average self.error_ = np.sqrt(variance) @@ -574,6 +577,9 @@ def average_weighted(self, weights): self.histogram_raw_count_ = np.sum(self.histograms_raw_count_, axis=0) self.scaling_ = self.scaling_[0] + if self.scaling_.ndim == 1: + self.scaling_ = self.scaling_.reshape(1, -1) + self.number_of_histograms_ = 1 return self @@ -607,6 +613,10 @@ def average_weighted_by_error(self): weights = 1 / self.error_**2 average = np.average(self.histograms_, axis=0, weights=weights) + # Ensure the result is a 2D array + if average.ndim == 1: + average = average.reshape(1, -1) + self.histograms_ = average self.error_ = np.sqrt( 1.0 / np.sum(1.0 / np.square(self.error_), axis=0) @@ -617,6 +627,9 @@ def average_weighted_by_error(self): self.histogram_raw_count_ = np.sum(self.histograms_raw_count_, axis=0) self.scaling_ = self.scaling_[0] + if self.scaling_.ndim == 1: + self.scaling_ = self.scaling_.reshape(1, -1) + self.number_of_histograms_ = 1 return self @@ -688,9 +701,15 @@ def scale_histogram(self, value): self.error_[-1] *= value elif isinstance(value, (list, np.ndarray)): - self.histograms_[-1] *= np.asarray(value) - self.scaling_[-1] *= np.asarray(value) - self.scaling_[-1] *= np.asarray(value) + if np.asarray(value).shape != self.histograms_[-1].shape: + raise ValueError( + "The shape of the scaling factor array is not compatible with the histogram shape" + ) + + value_array=np.asarray(value) + self.histograms_[-1] *= value_array + self.scaling_[-1] *= value_array + self.scaling_[-1] *= value_array def set_error(self, own_error): """ diff --git a/src/sparkx/Particle.py b/src/sparkx/Particle.py index b61f2e1e..0f5b7621 100755 --- a/src/sparkx/Particle.py +++ b/src/sparkx/Particle.py @@ -423,6 +423,7 @@ def __initialize_from_array(self, input_format, particle_array): + " is not known by PDGID, charge could not be computed. Consider setting it by hand." ) else: + print(particle_array) raise ValueError( "The input file is corrupted! " + "A line with wrong number of columns " diff --git a/tests/test_Histogram.py b/tests/test_Histogram.py index 88361833..975ff88c 100644 --- a/tests/test_Histogram.py +++ b/tests/test_Histogram.py @@ -315,7 +315,7 @@ def test_average(): hist.error_, np.array([0, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.0]) ) assert isinstance(hist.scaling_, np.ndarray) - assert not any(isinstance(i, np.ndarray) for i in hist.scaling_) + assert all(isinstance(i, np.ndarray) for i in hist.scaling_) assert np.allclose(counts_summed, hist.histogram_raw_count_) @@ -362,7 +362,7 @@ def test_average_weighted_by_error(): hist.error_, np.array([0.89442719, 1.41421356, 2.12132034]), atol=0.01 ) assert isinstance(hist.scaling_, np.ndarray) - assert not any(isinstance(i, np.ndarray) for i in hist.scaling_) + assert all(isinstance(i, np.ndarray) for i in hist.scaling_) assert np.allclose(counts_summed, hist.histogram_raw_count_)