Skip to content

Commit

Permalink
add NoMountData to autofix list and add option to skip monotonically …
Browse files Browse the repository at this point in the history
…increasing time check
  • Loading branch information
kmharrington committed Jan 2, 2025
1 parent 6f04c1c commit 16a09f6
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 12 deletions.
41 changes: 31 additions & 10 deletions sotodlib/io/bookbinder.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@ class DuplicateAncillaryData(Exception):
"""Exception raised when we find the HK data has copies of the same timestamps"""
pass

class NonMonotonicAncillaryTimes(Exception):
"""Exception raised when we find the HK data has timestamps that are not strictly increasing monotonically"""
pass

class BookDirHasFiles(Exception):
"""Exception raised when files already exist in a book directory"""
pass
Expand Down Expand Up @@ -157,7 +161,7 @@ def process_frame(self, frame):
self.times.append(np.array(block.times) / core.G3Units.s)
self.data.append(block[self.field])

def finalize(self, drop_duplicates=False):
def finalize(self, drop_duplicates=False, require_monotonic_times=True):
"""Finalize data, and store in numpy array"""
self.times = np.hstack(self.times, dtype=np.float64)
self.data = np.hstack(self.data)
Expand All @@ -177,8 +181,14 @@ def finalize(self, drop_duplicates=False):
)
self.times = self.times[idxs]
self.data = self.data[idxs]
assert np.all(np.diff(self.times)>0), \
f"Times from {self.addr} are not increasing"
if not np.all(np.diff(self.times)>0):
bad = np.sum( np.diff(self.times) <= 0)
msg = f"Times from {self.addr} have {bad} samples that are " \
"not increasing"
if require_monotonic_times:
raise NonMonotonicAncillaryTimes(msg)
else:
log.warning(msg)

@dataclass
class HkData:
Expand Down Expand Up @@ -215,12 +225,15 @@ def process_frame(self, frame):
if isinstance(f, HkDataField):
f.process_frame(frame)

def finalize(self, drop_duplicates=True):
def finalize(self, drop_duplicates=True, require_monotonic_times=True):
"""Finalizes HkDatafields"""
for fld in fields(self):
f = getattr(self, fld.name)
if isinstance(f, HkDataField):
f.finalize(drop_duplicates=drop_duplicates)
f.finalize(
drop_duplicates=drop_duplicates,
require_monotonic_times=require_monotonic_times,
)

class AncilProcessor:
"""
Expand Down Expand Up @@ -258,7 +271,8 @@ class for what housekeeping fields are allowed. For example::
"""
def __init__(self, files, book_id, hk_fields: Dict,
drop_duplicates=False, require_hwp=True,
require_acu=True, log=None
require_acu=True, require_monotonic_times=True,
log=None
):
self.hkdata: HkData = HkData.from_dict(hk_fields)

Expand All @@ -270,7 +284,7 @@ def __init__(self, files, book_id, hk_fields: Dict,
self.drop_duplicates = drop_duplicates
self.require_hwp = require_hwp
self.require_acu = require_acu

self.require_monotonic_times = require_monotonic_times
if log is None:
self.log = logging.getLogger('bookbinder')
else:
Expand Down Expand Up @@ -335,7 +349,10 @@ def preprocess(self):
)
self.hkdata.hwp_freq = None

self.hkdata.finalize(drop_duplicates=self.drop_duplicates)
self.hkdata.finalize(
drop_duplicates=self.drop_duplicates,
require_monotonic_times=self.require_monotonic_times,
)
self.preprocessed = True

def bind(self, outdir, times, frame_idxs, file_idxs):
Expand Down Expand Up @@ -803,6 +820,8 @@ class BookBinder:
multiple copies of the same data
require_acu: bool, optional
if true, will throw error if we do not find Mount data
require_monotonic_times: bool, optional
if true, will throw error if we ever see timestamps not increasing or going backwards
require_hwp: bool, optional
if true, will throw error if we do not find HWP data
allow_bad_time: bool, optional
Expand All @@ -821,10 +840,11 @@ class BookBinder:
file_idxs : np.ndarray
Array of output file indices for all output frames in the book
"""
def __init__(self, book, obsdb, filedb, data_root, readout_ids, outdir, hk_fields,
max_samps_per_frame=50_000, max_file_size=1e9,
def __init__(self, book, obsdb, filedb, data_root, readout_ids,
outdir, hk_fields, max_samps_per_frame=50_000, max_file_size=1e9,
ignore_tags=False, ancil_drop_duplicates=False,
require_hwp=True, require_acu=True,
require_monotonic_times=True,
allow_bad_timing=False):
self.filedb = filedb
self.book = book
Expand Down Expand Up @@ -888,6 +908,7 @@ def __init__(self, book, obsdb, filedb, data_root, readout_ids, outdir, hk_field
drop_duplicates=ancil_drop_duplicates,
require_hwp=require_hwp,
require_acu=require_acu,
require_monotonic_times=require_monotonic_times,
)
self.streams = {}
for obs_id, files in filedb.items():
Expand Down
3 changes: 3 additions & 0 deletions sotodlib/io/imprinter.py
Original file line number Diff line number Diff line change
Expand Up @@ -658,6 +658,7 @@ def _get_binder_for_book(self,
allow_bad_timing=False,
require_hwp=True,
require_acu=True,
require_monotonic_times=True,
):
"""get the appropriate bookbinder for the book based on its type"""

Expand All @@ -683,6 +684,7 @@ def _get_binder_for_book(self,
allow_bad_timing=allow_bad_timing,
require_hwp=require_hwp,
require_acu=require_acu,
require_monotonic_times=require_monotonic_times,
)
return bookbinder

Expand Down Expand Up @@ -742,6 +744,7 @@ def bind_book(
allow_bad_timing=False,
require_hwp=True,
require_acu=True,
require_monotonic_times=True,
check_configs={}
):
"""Bind book using bookbinder
Expand Down
27 changes: 25 additions & 2 deletions sotodlib/io/imprinter_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,11 +80,15 @@ def fix_single_book(imprint:Imprinter, book:Books):
)
require_acu = set_tag_and_validate("Require ACU data? ([y]/n)")
require_hwp = set_tag_and_validate("Require HWP data? ([y]/n)")
require_monotonic_times = set_tag_and_validate(
"Require Monotonic Housekeeping times? [y]/n"
)
imprint.bind_book(
book, ignore_tags=ignore_tags, ancil_drop_duplicates=ancil_drop_duplicates,
allow_bad_timing=allow_bad_timing,
require_acu=require_acu,
require_hwp=require_hwp,
require_monotonic_times=require_monotonic_times,
)
elif resp == 4:
utils.set_book_wont_bind(imprint, book)
Expand Down Expand Up @@ -144,7 +148,7 @@ def autofix_failed_books(
utils.set_book_rebind(imprint, book)
imprint.bind_book(book)
except Exception as e :
print(f"Book {book.bid} failed again!")
print(f"Book {book.bid} failed again!")
elif "DuplicateAncillaryData" in book.message:
print(f"Binding {book.bid} while fixing Duplicate Ancil Data")
try:
Expand Down Expand Up @@ -178,11 +182,30 @@ def autofix_failed_books(
if not test_mode:
utils.set_book_rebind(imprint, book)
imprint.bind_book(book, allow_bad_timing=True,)
except Exception as e:
except Exception:
print(f"Book {book.bid} failed again!")
book.message = book.message + \
' SECOND-FAIL. Tried with `allow_bad_timing=True`'
imprint.get_session().commit()
elif (
"NoMountData" in book.message
):
## ACU data was messed up somehow. this is ok for oper books
if book.type == 'obs':
print("Not autofixing obs books with bad mount data")
continue
elif book.type != 'oper':
raise ValueError(f"What book got me here? {book.bid}")
print(f"Binding {book.bid} without complete ACU data")
try:
if not test_mode:
utils.set_book_rebind(imprint, book)
imprint.bind_book(book, require_acu=False,)
except:
print(f"Book {book.bid} failed again!")
book.message = book.message + \
' SECOND-FAIL. Tried with `require_acu=False`'
imprint.get_session().commit()
elif 'MissingReadoutIDError' in book.message:
print(f"Book {book.bid} does not have readout ids, not binding")
if not test_mode:
Expand Down

0 comments on commit 16a09f6

Please sign in to comment.