Skip to content

Commit

Permalink
Reduce memory footprint with an optimization in cache checking (#321)
Browse files Browse the repository at this point in the history
  • Loading branch information
jimmymathews authored May 28, 2024
1 parent f1ce820 commit 6bf1e02
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 3 deletions.
2 changes: 2 additions & 0 deletions spatialprofilingtoolbox/ondemand/fast_cache_assessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ def block_until_available(self):
else:
verbose=False
up_to_date = self._cache_is_up_to_date(verbose=verbose)
if up_to_date:
break
if verbose:
logger.debug('Waiting for cache to be available.')
check_count += 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,12 @@ def load_from_db(self, study: str | None = None) -> None:
SELECT specimen, blob_contents FROM ondemand_studies_index osi
WHERE osi.blob_type='centroids';
''', (study, ))
specimens_to_blobs = tuple(cursor.fetchall())
self._studies[study] = {}
for _, blob in specimens_to_blobs:
obj = pickle.loads(blob)
while True:
row = cursor.fetchone()
if row is None:
break
obj = pickle.loads(row[1])
for key, value in obj.items():
if not key in self._studies:
self._studies[study][key] = {}
Expand Down

0 comments on commit 6bf1e02

Please sign in to comment.