Skip to content

Commit

Permalink
update docstring
Browse files Browse the repository at this point in the history
Signed-off-by: KumoLiu <[email protected]>
  • Loading branch information
KumoLiu committed Nov 10, 2023
1 parent 1bb1ae7 commit 520e2b3
Showing 1 changed file with 28 additions and 1 deletion.
29 changes: 28 additions & 1 deletion monai/data/grid_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,25 @@ class GridPatchDataset(IterableDataset):
see also: :py:class:`monai.data.PatchIter` or :py:class:`monai.data.PatchIterd`.
transform: a callable data transform operates on the patches.
with_coordinates: whether to yield the coordinates of each patch, default to `True`.
cache: whether to use cache mache mechanism, default to `False`.
see also: :py:class:`monai.data.CacheDataset`.
cache_num: number of items to be cached. Default is `sys.maxsize`.
will take the minimum of (cache_num, data_length x cache_rate, data_length).
cache_rate: percentage of cached data in total, default is 1.0 (cache all).
will take the minimum of (cache_num, data_length x cache_rate, data_length).
num_workers: the number of worker threads if computing cache in the initialization.
If num_workers is None then the number returned by os.cpu_count() is used.
If a value less than 1 is specified, 1 will be used instead.
progress: whether to display a progress bar.
copy_cache: whether to `deepcopy` the cache content before applying the random transforms,
default to `True`. if the random transforms don't modify the cached content
(for example, randomly crop from the cached image and deepcopy the crop region)
or if every cache item is only used once in a `multi-processing` environment,
may set `copy=False` for better performance.
as_contiguous: whether to convert the cached NumPy array or PyTorch tensor to be contiguous.
it may help improve the performance of following logic.
hash_func: a callable to compute hash from data items to be cached.
defaults to `monai.data.utils.pickle_hashing`.
"""

Expand Down Expand Up @@ -305,6 +324,14 @@ def _load_cache_item(self, idx: int):
return patch_cache

def _generate_patches(self, src, **apply_args):
"""
yield patches optionally post-processed by transform.
Args:
src: a iterable of image patches.
apply_args: other args for `self.patch_transform`.
"""
for patch, *others in src:
out_patch = patch
if self.patch_transform is not None:
Expand Down Expand Up @@ -343,7 +370,7 @@ def __iter__(self):

class PatchDataset(IterableDataset):
"""
returns a patch from an image dataset.
Yields patches from data read from an image dataset.
The patches are generated by a user-specified callable `patch_func`,
and are optionally post-processed by `transform`.
For example, to generate random patch samples from an image dataset:
Expand Down

0 comments on commit 520e2b3

Please sign in to comment.