Skip to content

Commit

Permalink
Blacken.
Browse files Browse the repository at this point in the history
  • Loading branch information
DinoBektesevic committed Jan 10, 2024
1 parent 94a6bd9 commit 22ff1c0
Show file tree
Hide file tree
Showing 16 changed files with 691 additions and 665 deletions.
1 change: 0 additions & 1 deletion src/kbmod/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,3 @@
from .search import PSF, RawImage, LayeredImage, ImageStack, StackSearch
from .standardizers import Standardizer, StandardizerConfig
from .image_collection import ImageCollection

58 changes: 28 additions & 30 deletions src/kbmod/image_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@
from .analysis_utils import PostProcess


__all__ = ["ImageCollection", ]
__all__ = [
"ImageCollection",
]


class ImageCollection:
Expand Down Expand Up @@ -82,9 +84,9 @@ class ImageCollection:
when instantiated from a Table which does not have the required
columns, or has null-values in the required columns.
"""

required_metadata = ["location", "mjd", "ra", "dec"]
_supporting_metadata = ["std_name", "std_idx", "ext_idx", "wcs", "bbox",
"config"]
_supporting_metadata = ["std_name", "std_idx", "ext_idx", "wcs", "bbox", "config"]

########################
# CONSTRUCTORS
Expand Down Expand Up @@ -129,8 +131,7 @@ def _validate(self, metadata):
# check that standardizer to row lookup exists
missing_keys = [key for key in ["std_idx", "ext_idx"] if key not in cols]
if missing_keys:
return False, ("missing required standardizer-row lookup indices: "
f"{missing_keys}")
return False, ("missing required standardizer-row lookup indices: " f"{missing_keys}")

return True, ""

Expand Down Expand Up @@ -161,12 +162,9 @@ def __init__(self, metadata, standardizers=None):
if n_stds is None:
n_stds = len(np.unique(metadata["location"]))
self.data.meta["n_stds"] = n_stds
self._standardizers = np.full((n_stds, ), None)
self._standardizers = np.full((n_stds,), None)

self._userColumns = [
col for col in self.data.columns
if col not in self._supporting_metadata
]
self._userColumns = [col for col in self.data.columns if col not in self._supporting_metadata]

@classmethod
def read(cls, *args, format=None, units=None, descriptions=None, **kwargs):
Expand All @@ -183,12 +181,13 @@ def read(cls, *args, format=None, units=None, descriptions=None, **kwargs):
ic : `ImageCollection`
Image Collection
"""
metadata = Table.read(*args, format=format, units=units,
descriptions=descriptions, **kwargs)
metadata = Table.read(*args, format=format, units=units, descriptions=descriptions, **kwargs)
metadata["wcs"] = [WCS(w) for w in metadata["wcs"] if w is not None]
metadata["bbox"] = [json.loads(b) for b in metadata["bbox"]]
metadata["config"] = [json.loads(c) for c in metadata["config"]]
meta = json.loads(metadata.meta["comments"][0],)
meta = json.loads(
metadata.meta["comments"][0],
)
metadata.meta = meta
return cls(metadata)

Expand Down Expand Up @@ -226,7 +225,9 @@ def fromStandardizers(cls, standardizers, meta=None):
# a.fits ...1
# a.fits ...2
# a.fits ...3
unravelColumns = [key for key, val in stdMeta.items() if isiterable(val) and not isinstance(val, str)]
unravelColumns = [
key for key, val in stdMeta.items() if isiterable(val) and not isinstance(val, str)
]
for j, ext in enumerate(stdFits.processable):
row = {}
for key in stdMeta.keys():
Expand Down Expand Up @@ -272,10 +273,7 @@ def fromTargets(cls, tgts, force=None, config=None, **kwargs):
ValueError:
when location is not recognized as a file, directory or an URI
"""
standardizers = [
Standardizer.get(tgt, force=force, config=config, **kwargs)
for tgt in tgts
]
standardizers = [Standardizer.get(tgt, force=force, config=config, **kwargs) for tgt in tgts]
return cls.fromStandardizers(standardizers)

@classmethod
Expand All @@ -300,8 +298,7 @@ def fromDir(cls, dirpath, recursive=False, force=None, config=None, **kwargs):
Remaining kwargs, not listed here, are passed onwards to
the underlying `Standardizer`.
"""
fits_files = glob.glob(os.path.join(dirpath, "*fits*"),
recursive=recursive)
fits_files = glob.glob(os.path.join(dirpath, "*fits*"), recursive=recursive)
return cls.fromTargets(fits_files, force=force, config=config, **kwargs)

########################
Expand Down Expand Up @@ -397,11 +394,10 @@ def get_standardizer(self, index, **kwargs):
self._standardizers[std_idx] = std_cls(**kwargs, **row)

# maybe a clever dataclass to shortcut the idx lookups on the user end?
return {"std": self._standardizers[std_idx],
"ext": self.data[index]["ext_idx"]}
return {"std": self._standardizers[std_idx], "ext": self.data[index]["ext_idx"]}

def get_standardizers(self, idxs, **kwargs):
""" Get the standardizers used to extract metadata of the selected
"""Get the standardizers used to extract metadata of the selected
rows.
Parameters
Expand All @@ -419,7 +415,9 @@ def get_standardizers(self, idxs, **kwargs):
the extension (``ext``) that maps to the given metadata row index.
"""
if isinstance(idxs, int):
return [self.get_standardizer(idxs, **kwargs), ]
return [
self.get_standardizer(idxs, **kwargs),
]
else:
return [self.get_standardizer(idx, **kwargs) for idx in idxs]

Expand Down Expand Up @@ -463,10 +461,11 @@ def write(self, *args, format=None, serialize_method=None, **kwargs):
current_comments = tmpdata.meta.get("comments", None)
if current_comments is not None:
tmpdata.meta = {}
tmpdata.meta["comments"] = [stringified, ]
tmpdata.meta["comments"] = [
stringified,
]

tmpdata.write(*args, format=format, serialize_method=serialize_method,
**kwargs)
tmpdata.write(*args, format=format, serialize_method=serialize_method, **kwargs)

def get_zero_shifted_times(self):
"""Returns a list of timestamps such that the first image
Expand Down Expand Up @@ -501,8 +500,7 @@ def toImageStack(self):
imageStack : `~kbmod.search.image_stack`
Image stack for processing with KBMOD.
"""
layeredImages = [img for std in self.standardizers
for img in std["std"].toLayeredImage()]
layeredImages = [img for std in self.standardizers for img in std["std"].toLayeredImage()]
return ImageStack(layeredImages)

def _calc_suggested_angle(self, wcs, center_pixel=(1000, 2000), step=12):
Expand Down Expand Up @@ -579,7 +577,7 @@ def run(self, config):
# Compute the ecliptic angle for the images. Assume they are all the
# same size? Technically that is currently a requirement, although it's
# not explicit (can this be in C++ code?)
center_pixel = (imageStack.get_width()/2, imageStack.get_height()/2)
center_pixel = (imageStack.get_width() / 2, imageStack.get_height() / 2)
suggested_angle = self._calc_suggested_angle(self.wcs[0], center_pixel)

# Set up the post processing data structure.
Expand Down
Loading

0 comments on commit 22ff1c0

Please sign in to comment.