Skip to content

Commit

Permalink
Merge branch 'sparkx_devel' into sass/BulkObservables
Browse files Browse the repository at this point in the history
  • Loading branch information
NGoetz authored Dec 11, 2024
2 parents d5cda2f + 201792f commit 49ea2bb
Show file tree
Hide file tree
Showing 28 changed files with 563 additions and 363 deletions.
43 changes: 22 additions & 21 deletions .github/workflows/code_formatting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,22 +5,20 @@ on:
branches:
- main
- sparkx_devel
types:
- closed # Trigger when the PR is closed (includes merging)

jobs:
code-formatting:
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Checkout PR branch
uses: actions/checkout@v3
with:
ref: ${{ github.sha }} # Check out the exact commit of the merged PR
ref: ${{ github.head_ref }}
fetch-depth: 0 # Ensures full history is available

- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: '3.9'

Expand All @@ -29,22 +27,25 @@ jobs:
python -m pip install --upgrade pip
pip install black==24.8.0
- name: Format code
- name: Run black to check formatting
id: black-check
run: |
black --line-length 80 src/sparkx tests/
black --check --line-length 80 src/sparkx tests/
continue-on-error: true

- name: Commit changes
run: |
git config --local user.name "GitHub Action"
git config --local user.email "[email protected]"
git add src/sparkx tests/
git commit -m "Automatically format code using black" || echo "No changes to commit"
- name: Capture formatting status
if: ${{ steps.black-check.outcome == 'failure' }}
run: echo "needs_formatting=true" >> $GITHUB_ENV

- name: Push changes
run: |
git push origin ${{ github.head_ref }}
- name: Format code with black
if: env.needs_formatting == 'true'
run: black --line-length 80 src/sparkx tests/

- name: Run format test again
- name: Push formatted changes
if: env.needs_formatting == 'true'
run: |
# Add your test commands here
black --check --line-length 80 src/sparkx tests/
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
git add src/sparkx tests/
git commit -m "Auto-format code with black"
git push origin ${{ github.head_ref }}
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ Date:
* Added support for the SMASH 3.2 feature of custom output format
* Add option to add two Oscar/Jetscape/ParticleObjectStorer instances while preserving the event order
* BulkObservables: Add a class for calculating spectra and integrated yields
* Oscar: Add function to extract the impact parameters

### Changed
* Particle: Rename several methods for a more intuitive naming scheme. Renamed methods are:
Expand Down
7 changes: 4 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,11 @@ build-backend = "setuptools.build_meta"
name = "sparkx"
version = "2.0.0"
authors = [
{ name="Lucas Constantin", email="[email protected]"},
{ name="Niklas Götz", email="[email protected]"},
{ name="Renata Krupczak", email="[email protected]"},
{ name="Hendrik Roch", email="[email protected]" },
{ name="Carl Rosenkvist", email="[email protected]"},
{ name="Nils Sass", email="[email protected]" }
]
description = "Software Package for Analyzing Relativistic Kinematics in Collision eXperiments"
Expand All @@ -25,11 +27,10 @@ classifiers = [
]

dependencies = [
"particle==0.23.0",
"particle>=0.23.0",
"numpy>=1.23.5",
"scipy>=1.10.1",
"abc-property==1.0",
"fastjet==3.4.1.3",
"fastjet>=3.4.2.1",
"matplotlib>=3.7.1"
]

Expand Down
7 changes: 3 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
# Main dependencies
particle==0.23.0
particle>=0.23.0
numpy>=1.23.5
scipy>=1.10.1
abc-property==1.0
fastjet==3.4.1.3
fastjet>=3.4.2.1
matplotlib>=3.7.1

# Documentation dependencies
Expand All @@ -15,7 +14,7 @@ setuptools>=68.0.0

# Development and testing dependencies
pytest>=7.4.3
black>=24.8.0
black==24.8.0

# Editable Sparkx install
-e .
7 changes: 3 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,17 @@
packages=find_packages(where='src'),
package_dir={'': 'src'},
install_requires=[
"particle==0.23.0",
"particle>=0.23.0",
"numpy>=1.23.5",
"scipy>=1.10.1",
"abc-property==1.0",
"fastjet==3.4.1.3",
"fastjet>=3.4.2.1",
"matplotlib>=3.7.1",
],
version='2.0.0',
description='Software Package for Analyzing Relativistic Kinematics in Collision eXperiments',
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author='Niklas Götz, Renata Krupczak, Hendrik Roch, Nils Sass',
author='Lucas Constantin, Niklas Götz, Renata Krupczak, Hendrik Roch, Carl Rosenkvist, Nils Sass',
author_email="[email protected], [email protected], [email protected]",
url="https://smash-transport.github.io/sparkx/",
download_url="https://github.com/smash-transport/sparkx",
Expand Down
58 changes: 34 additions & 24 deletions src/sparkx/BaseStorer.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class BaseStorer(ABC):
----------
num_output_per_event_ : numpy.array
Array containing the event number and the number of particles in this
event as :code:`num_output_per_event_[event i][num_output in event i]`
event as :code:`num_output_per_event_[event i][num_output in event i]`
(updated when filters are applied)
num_events_ : int
Number of events contained in the Oscar object (updated when filters
Expand Down Expand Up @@ -105,11 +105,11 @@ def __init__(
self.particle_list_,
self.num_events_,
self.num_output_per_event_,
self.custom_attr_list
self.custom_attr_list,
) = self.loader_.load(**kwargs)
else:
raise ValueError("Loader has not been created properly")

def __add__(self, other: "BaseStorer") -> "BaseStorer":
"""
Adds two BaseStorer objects by combining their particle lists and updating num_output_per_event accordingly.
Expand All @@ -134,12 +134,14 @@ def __add__(self, other: "BaseStorer") -> "BaseStorer":
"""
if not isinstance(other, BaseStorer):
raise TypeError("Can only add BaseStorer objects")

# Ensure that both instances are of the same class
if type(self) is not type(other):
raise TypeError("Can only add objects of the same class")

combined_particle_list: list = self.particle_list_ + other.particle_list_
combined_particle_list: list = (
self.particle_list_ + other.particle_list_
)

# Ensure num_output_per_event_ is not None
if self.num_output_per_event_ is None:
Expand All @@ -156,18 +158,22 @@ def __add__(self, other: "BaseStorer") -> "BaseStorer":
)

# Adjust event_number for the parts that originally belonged to other
combined_num_output_per_event[self.num_events_:, 0] += self.num_events_
combined_num_output_per_event[self.num_events_ :, 0] += self.num_events_

combined_storer: BaseStorer = self.__class__.__new__(self.__class__)
combined_storer.__dict__.update(self.__dict__) # Inherit all properties from self
combined_storer.__dict__.update(
self.__dict__
) # Inherit all properties from self
combined_storer._update_after_merge(other)
combined_storer.particle_list_ = combined_particle_list
combined_storer.num_output_per_event_ = combined_num_output_per_event
combined_storer.num_events_ = self.num_events_ + other.num_events_
combined_storer.loader_ = None # Loader is not applicable for combined object
combined_storer.loader_ = (
None # Loader is not applicable for combined object
)

return combined_storer

@abstractmethod
def _update_after_merge(self, other: "BaseStorer") -> None:
"""
Expand Down Expand Up @@ -203,7 +209,7 @@ def num_output_per_event(self) -> Optional[np.ndarray]:
:code:`num_output_per_event[event_n, number_of_particles_in_event_n]`
:code:`num_output_per_event` is updated with every manipulation e.g.
:code:`num_output_per_event` is updated with every manipulation e.g.
after applying cuts.
Returns
Expand Down Expand Up @@ -336,7 +342,7 @@ def particle_species(
Returns
-------
self : BaseStorer object
Containing only particle species specified by :code:`pdg_list` for
Containing only particle species specified by :code:`pdg_list` for
every event
"""
self.particle_list_ = particle_species(self.particle_list_, pdg_list)
Expand All @@ -348,7 +354,7 @@ def remove_particle_species(
self, pdg_list: Union[int, Union[Tuple[int], List[int], np.ndarray]]
) -> "BaseStorer":
"""
Remove particle species from :code:`particle_list` by their PDG ID in
Remove particle species from :code:`particle_list` by their PDG ID in
every event.
Parameters
Expand Down Expand Up @@ -409,7 +415,7 @@ def lower_event_energy_cut(
Parameters
----------
minimum_event_energy : int or float
The minimum event energy threshold. Should be a positive integer or
The minimum event energy threshold. Should be a positive integer or
float.
Returns
Expand All @@ -421,10 +427,10 @@ def lower_event_energy_cut(
Raises
------
TypeError
If the :code:`minimum_event_energy` parameter is not an integer or
If the :code:`minimum_event_energy` parameter is not an integer or
float.
ValueError
If the :code:`minimum_event_energy` parameter is less than or
If the :code:`minimum_event_energy` parameter is less than or
equal to 0.
"""
self.particle_list_ = lower_event_energy_cut(
Expand Down Expand Up @@ -502,7 +508,7 @@ def rapidity_cut(
cut_value : float
If a single value is passed, the cut is applied symmetrically
around 0.
For example, if :code:`cut_value = 1`, only particles with rapidity
For example, if :code:`cut_value = 1`, only particles with rapidity
in :code:`[-1.0, 1.0]` are kept.
cut_value : tuple
Expand Down Expand Up @@ -556,7 +562,7 @@ def spacetime_rapidity_cut(
) -> "BaseStorer":
"""
Apply spacetime rapidity (space-time rapidity) cut to all events and
remove all particles with spacetime rapidity not complying with
remove all particles with spacetime rapidity not complying with
cut_value.
Parameters
Expand Down Expand Up @@ -587,7 +593,7 @@ def spacetime_rapidity_cut(

def multiplicity_cut(
self, cut_value_tuple: Tuple[Union[float, None], Union[float, None]]
) -> "BaseStorer":
) -> "BaseStorer":
"""
Apply multiplicity cut. Remove all events with a multiplicity not
complying with cut_value.
Expand All @@ -596,7 +602,7 @@ def multiplicity_cut(
----------
cut_value_tuple : tuple
Upper and lower bound for multiplicity. If the multiplicity of an event is
not in this range, the event is discarded. The range is inclusive on the
not in this range, the event is discarded. The range is inclusive on the
lower bound and exclusive on the upper bound.
Returns
Expand Down Expand Up @@ -634,7 +640,7 @@ def spacetime_cut(
Returns
-------
self : BaseStorer object
Containing only particles complying with the spacetime cut for all
Containing only particles complying with the spacetime cut for all
events
"""
self.particle_list_ = spacetime_cut(
Expand Down Expand Up @@ -662,7 +668,7 @@ def particle_status(
Returns
-------
self : BaseStorer object
Containing only hadrons with status specified by
Containing only hadrons with status specified by
:code:`status_list` for every event
"""
self.particle_list_ = particle_status(self.particle_list_, status_list)
Expand Down Expand Up @@ -848,9 +854,13 @@ def _update_num_output_per_event_after_filter(self) -> None:
self.num_output_per_event_[1] = len(self.particle_list_[0])
elif self.num_output_per_event_.ndim == 2:
# Handle the case where num_output_per_event_ is a two-dimensional array
updated_num_output_per_event : np.ndarray = np.ndarray((len(self.particle_list_),2), dtype=int)
updated_num_output_per_event: np.ndarray = np.ndarray(
(len(self.particle_list_), 2), dtype=int
)
for event in range(len(self.particle_list_)):
updated_num_output_per_event[event][0] = event + self.num_output_per_event_[0][0]
updated_num_output_per_event[event][0] = (
event + self.num_output_per_event_[0][0]
)
updated_num_output_per_event[event][1] = len(
self.particle_list_[event]
)
Expand All @@ -869,7 +879,7 @@ def print_particle_lists_to_file(self, output_file: str) -> None:
Prints the particle lists to a specified file.
This method should be implemented by subclasses to print the particle
lists to the specified output file. The method raises a
lists to the specified output file. The method raises a
:code:`NotImplementedError` if it is not overridden by a subclass.
Parameters
Expand Down
Loading

0 comments on commit 49ea2bb

Please sign in to comment.