Skip to content

Commit

Permalink
merge_main
Browse files Browse the repository at this point in the history
  • Loading branch information
elephaint committed Nov 19, 2024
2 parents ddc617f + 642ced4 commit c529ced
Show file tree
Hide file tree
Showing 73 changed files with 284 additions and 51 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@fb13cb306901256ace3dab689990e13a5550ffaa # v1.11.0
uses: pypa/gh-action-pypi-publish@15c56dba361d8335944d31a2ecd17d700fc7bcbc # v1.12.2
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
14 changes: 13 additions & 1 deletion nbs/common.base_model.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -498,13 +498,25 @@
" datamodule_constructor = TimeSeriesDataModule\n",
" else:\n",
" datamodule_constructor = _DistributedTimeSeriesDataModule\n",
" \n",
" dataloader_kwargs = self.dataloader_kwargs if self.dataloader_kwargs is not None else {}\n",
" \n",
" if self.num_workers_loader != 0: # value is not at its default\n",
" warnings.warn(\n",
" \"The `num_workers_loader` argument is deprecated and will be removed in a future version. \"\n",
" \"Please provide num_workers through `dataloader_kwargs`, e.g. \"\n",
" f\"`dataloader_kwargs={{'num_workers': {self.num_workers_loader}}}`\",\n",
" category=FutureWarning,\n",
" )\n",
" dataloader_kwargs['num_workers'] = self.num_workers_loader\n",
"\n",
" datamodule = datamodule_constructor(\n",
" dataset=dataset, \n",
" batch_size=batch_size,\n",
" valid_batch_size=valid_batch_size,\n",
" num_workers=self.num_workers_loader,\n",
" drop_last=self.drop_last_loader,\n",
" shuffle_train=shuffle_train,\n",
" **dataloader_kwargs\n",
" )\n",
"\n",
" if self.val_check_steps > self.max_steps:\n",
Expand Down
19 changes: 15 additions & 4 deletions nbs/core.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1595,15 +1595,26 @@
" except FileNotFoundError:\n",
" raise Exception('No configuration found in directory.')\n",
"\n",
" # in 1.6.4, `local_scaler_type` / `scalers_` lived on the dataset.\n",
" # in order to preserve backwards-compatibility, we check to see if these are found on the dataset\n",
" # in case they cannot be found in `config_dict`\n",
" default_scalar_type = getattr(dataset, \"local_scaler_type\", None)\n",
" default_scalars_ = getattr(dataset, \"scalers_\", None)\n",
"\n",
" # Create NeuralForecast object\n",
" neuralforecast = NeuralForecast(\n",
" models=models,\n",
" freq=config_dict['freq'],\n",
" local_scaler_type=config_dict['local_scaler_type'],\n",
" local_scaler_type=config_dict.get(\"local_scaler_type\", default_scalar_type),\n",
" )\n",
"\n",
" for attr in ['id_col', 'time_col', 'target_col']:\n",
" setattr(neuralforecast, attr, config_dict[attr])\n",
" attr_to_default = {\n",
" \"id_col\": \"unique_id\",\n",
" \"time_col\": \"ds\",\n",
" \"target_col\": \"y\"\n",
" }\n",
" for attr, default in attr_to_default.items():\n",
" setattr(neuralforecast, attr, config_dict.get(attr, default))\n",
" # only restore attribute if available\n",
" for attr in ['prediction_intervals', '_cs_df']:\n",
" if attr in config_dict.keys():\n",
Expand All @@ -1624,7 +1635,7 @@
" # Fitted flag\n",
" neuralforecast._fitted = config_dict['_fitted']\n",
"\n",
" neuralforecast.scalers_ = config_dict['scalers_']\n",
" neuralforecast.scalers_ = config_dict.get(\"scalers_\", default_scalars_)\n",
"\n",
" return neuralforecast\n",
" \n",
Expand Down
9 changes: 7 additions & 2 deletions nbs/docs/tutorials/18_adding_models.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,6 @@
" step_size: int = 1,\n",
" scaler_type: str = 'identity',\n",
" random_seed: int = 1,\n",
" num_workers_loader: int = 0,\n",
" drop_last_loader: bool = False,\n",
" **trainer_kwargs):\n",
" # Inherit BaseWindows class\n",
Expand Down Expand Up @@ -415,7 +414,13 @@
]
}
],
"metadata": {},
"metadata": {
"kernelspec": {
"display_name": "python3",
"language": "python",
"name": "python3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
3 changes: 3 additions & 0 deletions nbs/models.autoformer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -466,6 +466,7 @@
" `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
" `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
" `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
"\n",
"\t*References*<br>\n",
Expand Down Expand Up @@ -516,6 +517,7 @@
" optimizer_kwargs = None,\n",
" lr_scheduler = None,\n",
" lr_scheduler_kwargs = None,\n",
" dataloader_kwargs=None,\n",
" **trainer_kwargs):\n",
" super(Autoformer, self).__init__(h=h,\n",
" input_size=input_size,\n",
Expand Down Expand Up @@ -544,6 +546,7 @@
" optimizer_kwargs=optimizer_kwargs,\n",
" lr_scheduler=lr_scheduler,\n",
" lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
Expand Down
3 changes: 3 additions & 0 deletions nbs/models.bitcn.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,7 @@
" `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
" `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
" `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br> \n",
"\n",
" **References**<br> \n",
Expand Down Expand Up @@ -228,6 +229,7 @@
" optimizer_kwargs = None,\n",
" lr_scheduler = None,\n",
" lr_scheduler_kwargs = None,\n",
" dataloader_kwargs=None,\n",
" **trainer_kwargs):\n",
" super(BiTCN, self).__init__(\n",
" h=h,\n",
Expand Down Expand Up @@ -257,6 +259,7 @@
" optimizer_kwargs=optimizer_kwargs,\n",
" lr_scheduler=lr_scheduler,\n",
" lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
" **trainer_kwargs\n",
" )\n",
"\n",
Expand Down
3 changes: 3 additions & 0 deletions nbs/models.deepar.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,7 @@
" `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
" `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
" `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br> \n",
"\n",
" **References**<br>\n",
Expand Down Expand Up @@ -234,6 +235,7 @@
" optimizer_kwargs = None,\n",
" lr_scheduler = None,\n",
" lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
" **trainer_kwargs):\n",
"\n",
" if exclude_insample_y:\n",
Expand Down Expand Up @@ -267,6 +269,7 @@
" optimizer_kwargs=optimizer_kwargs,\n",
" lr_scheduler=lr_scheduler,\n",
" lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
" **trainer_kwargs)\n",
"\n",
" self.n_samples = trajectory_samples\n",
Expand Down
3 changes: 3 additions & 0 deletions nbs/models.deepnpts.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@
" `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
" `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
" `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br> \n",
"\n",
" **References**<br>\n",
Expand Down Expand Up @@ -172,6 +173,7 @@
" optimizer_kwargs = None,\n",
" lr_scheduler = None,\n",
" lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
" **trainer_kwargs):\n",
"\n",
" if exclude_insample_y:\n",
Expand Down Expand Up @@ -211,6 +213,7 @@
" optimizer_kwargs=optimizer_kwargs,\n",
" lr_scheduler=lr_scheduler,\n",
" lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
" **trainer_kwargs)\n",
"\n",
" self.h = h\n",
Expand Down
3 changes: 3 additions & 0 deletions nbs/models.dilated_rnn.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -406,6 +406,7 @@
" `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
" `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
" `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br> \n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br> \n",
" \"\"\"\n",
" # Class attributes\n",
Expand Down Expand Up @@ -450,6 +451,7 @@
" optimizer_kwargs = None,\n",
" lr_scheduler = None,\n",
" lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
" **trainer_kwargs):\n",
" super(DilatedRNN, self).__init__(\n",
" h=h,\n",
Expand Down Expand Up @@ -479,6 +481,7 @@
" optimizer_kwargs=optimizer_kwargs,\n",
" lr_scheduler=lr_scheduler,\n",
" lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
" **trainer_kwargs\n",
" )\n",
"\n",
Expand Down
3 changes: 3 additions & 0 deletions nbs/models.dlinear.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@
" `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
" `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
" `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
"\n",
"\t*References*<br>\n",
Expand Down Expand Up @@ -210,6 +211,7 @@
" optimizer_kwargs = None,\n",
" lr_scheduler = None,\n",
" lr_scheduler_kwargs = None,\n",
" dataloader_kwargs=None,\n",
" **trainer_kwargs):\n",
" super(DLinear, self).__init__(h=h,\n",
" input_size=input_size,\n",
Expand Down Expand Up @@ -238,6 +240,7 @@
" optimizer_kwargs=optimizer_kwargs,\n",
" lr_scheduler=lr_scheduler,\n",
" lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
" **trainer_kwargs)\n",
" \n",
" # Architecture\n",
Expand Down
5 changes: 4 additions & 1 deletion nbs/models.fedformer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -469,6 +469,7 @@
" `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
" `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
" `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
"\n",
" \"\"\"\n",
Expand Down Expand Up @@ -518,6 +519,7 @@
" optimizer_kwargs=None,\n",
" lr_scheduler = None,\n",
" lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
" **trainer_kwargs):\n",
" super(FEDformer, self).__init__(h=h,\n",
" input_size=input_size,\n",
Expand All @@ -544,7 +546,8 @@
" optimizer=optimizer,\n",
" optimizer_kwargs=optimizer_kwargs,\n",
" lr_scheduler=lr_scheduler,\n",
" lr_scheduler_kwargs=lr_scheduler_kwargs, \n",
" lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs, \n",
" **trainer_kwargs)\n",
" # Architecture\n",
" self.label_len = int(np.ceil(input_size * decoder_input_size_multiplier))\n",
Expand Down
3 changes: 3 additions & 0 deletions nbs/models.gru.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@
" `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
" `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
" `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br> \n",
" \"\"\"\n",
" # Class attributes\n",
Expand Down Expand Up @@ -179,6 +180,7 @@
" optimizer_kwargs = None,\n",
" lr_scheduler = None,\n",
" lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
" **trainer_kwargs):\n",
" \n",
" self.RECURRENT = recurrent\n",
Expand Down Expand Up @@ -211,6 +213,7 @@
" optimizer_kwargs=optimizer_kwargs,\n",
" lr_scheduler=lr_scheduler,\n",
" lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
" **trainer_kwargs\n",
" )\n",
"\n",
Expand Down
3 changes: 3 additions & 0 deletions nbs/models.informer.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,7 @@
" `optimizer_kwargs`: dict, optional, list of parameters used by the user specified `optimizer`.<br>\n",
" `lr_scheduler`: Subclass of 'torch.optim.lr_scheduler.LRScheduler', optional, user specified lr_scheduler instead of the default choice (StepLR).<br>\n",
" `lr_scheduler_kwargs`: dict, optional, list of parameters used by the user specified `lr_scheduler`.<br>\n",
" `dataloader_kwargs`: dict, optional, list of parameters passed into the PyTorch Lightning dataloader by the `TimeSeriesDataLoader`. <br>\n",
" `**trainer_kwargs`: int, keyword trainer arguments inherited from [PyTorch Lighning's trainer](https://pytorch-lightning.readthedocs.io/en/stable/api/pytorch_lightning.trainer.trainer.Trainer.html?highlight=trainer).<br>\n",
"\n",
"\t*References*<br>\n",
Expand Down Expand Up @@ -363,6 +364,7 @@
" optimizer_kwargs = None,\n",
" lr_scheduler = None,\n",
" lr_scheduler_kwargs = None,\n",
" dataloader_kwargs = None,\n",
" **trainer_kwargs):\n",
" super(Informer, self).__init__(h=h,\n",
" input_size=input_size,\n",
Expand Down Expand Up @@ -391,6 +393,7 @@
" optimizer_kwargs=optimizer_kwargs,\n",
" lr_scheduler=lr_scheduler,\n",
" lr_scheduler_kwargs=lr_scheduler_kwargs,\n",
" dataloader_kwargs=dataloader_kwargs,\n",
" **trainer_kwargs)\n",
"\n",
" # Architecture\n",
Expand Down
Loading

0 comments on commit c529ced

Please sign in to comment.