From de6fb383f340f9563570aa8804366174020b0f3d Mon Sep 17 00:00:00 2001 From: Vibhu Jawa Date: Mon, 5 Aug 2024 11:12:14 -0700 Subject: [PATCH] Skip loading model if its allready fitted Signed-off-by: Vibhu Jawa --- crossfit/backend/torch/loader.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/crossfit/backend/torch/loader.py b/crossfit/backend/torch/loader.py index ce48589..3514df2 100644 --- a/crossfit/backend/torch/loader.py +++ b/crossfit/backend/torch/loader.py @@ -75,7 +75,6 @@ def __next__(self): batch = {key: val[self.current_idx : end] for key, val in self.tensor_dict.items()} if self.max_seq_len is not None: - # TODO: Check this if self.padding_side == "right": batch = {key: val[:, : self.max_seq_len] for key, val in batch.items()} else: @@ -162,10 +161,6 @@ def __next__(self): for key, val in self.tensor_dict.items() if key not in self.to_ignore } - # TODO: Fix max_length - if self.max_seq_len is None: - self.max_seq_len = self.model.max_seq_length() - batch = clip_tokens( token_o=batch, max_length=self.max_seq_len,