Skip to content

Commit

Permalink
Fix all docstrings for docs (#97)
Browse files Browse the repository at this point in the history
  • Loading branch information
alan-cooney authored Nov 23, 2023
1 parent c16afdb commit c962143
Show file tree
Hide file tree
Showing 30 changed files with 241 additions and 107 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ jobs:
run: poetry run pyright
- name: Ruff lint
run: poetry run ruff check . --output-format=github
- name: Docstrings lint
run: poetry run pydoclint .
- name: Ruff format
run: poetry run ruff format . --check
- name: Pytest
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/gh-pages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ jobs:
cache: "poetry"
- name: Install poe
run: pip install poethepoet
- name: Install mkdocs
run: pip install mkdocs
- name: Install dependencies
run: poetry install --with docs
- name: Generate docs
run: poe gen-docs
- name: Build Docs
run: poe make-docs
- name: Upload Docs Artifact
Expand Down
1 change: 1 addition & 0 deletions .vscode/cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@
"typecheck",
"ultralow",
"uncopyrighted",
"ungraphed",
"unsqueeze",
"venv",
"virtualenv",
Expand Down
17 changes: 8 additions & 9 deletions docs/gen_ref_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def is_source_file(file: Path) -> bool:
"""Check if the provided file is a source file for Sparse Encoder.
Args:
file (Path): The file path to check.
file: The file path to check.
Returns:
bool: True if the file is a source file, False otherwise.
Expand All @@ -34,11 +34,10 @@ def process_path(path: Path) -> tuple[Path, Path, Path]:
"""Process the given path for documentation generation.
Args:
path (Path): The file path to process.
path: The file path to process.
Returns:
tuple[Path, Path, Path]: A tuple containing module path, documentation path,
and full documentation path.
A tuple containing module path, documentation path, and full documentation path.
"""
module_path = path.relative_to(PROJECT_ROOT).with_suffix("")
doc_path = path.relative_to(PROJECT_ROOT).with_suffix(".md")
Expand All @@ -56,9 +55,9 @@ def generate_documentation(path: Path, module_path: Path, full_doc_path: Path) -
"""Generate documentation for the given source file.
Args:
path (Path): The source file path.
module_path (Path): The module path.
full_doc_path (Path): The full documentation file path.
path: The source file path.
module_path: The module path.
full_doc_path: The full documentation file path.
"""
if module_path.name == "__main__":
return
Expand All @@ -77,8 +76,8 @@ def generate_nav_file(nav: mkdocs_gen_files.nav.Nav, reference_dir: Path) -> Non
"""Generate the navigation file for the documentation.
Args:
nav (mkdocs_gen_files.Nav): The navigation object.
reference_dir (Path): The directory to write the navigation file.
nav: The navigation object.
reference_dir: The directory to write the navigation file.
"""
with mkdocs_gen_files.open(reference_dir / "SUMMARY.md", "w") as nav_file:
nav_file.writelines(nav.build_literate_nav())
Expand Down
62 changes: 46 additions & 16 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

20 changes: 17 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@
jupyter=">=1"
plotly=">=5"
poethepoet=">=0.24.2"
pyright=">=1.1.334"
pydoclint="^0.3.8"
pyright=">=1.1.337"
pytest=">=7"
pytest-cov=">=4"
pytest-timeout=">=2.2.0"
Expand All @@ -42,14 +43,14 @@
mkdocs-section-index=">=0.3.8"
mkdocstrings={extras=["python"], version=">=0.24.0"}
mkdocstrings-python=">=1.7.3"
mknotebooks="^0.8.0"
pytkdocs-tweaks=">=0.0.7"
mknotebooks = "^0.8.0"

[tool.poe.tasks]
[tool.poe.tasks.check]
help="All checks"
ignore_fail=false
sequence=["check-lock", "format", "lint", "test", "typecheck"]
sequence=["check-lock", "docstring-lint", "format", "lint", "test", "typecheck"]

[tool.poe.tasks.format]
cmd="ruff format ."
Expand All @@ -59,6 +60,10 @@
cmd="ruff check . --fix"
help="Lint (with autofix)"

[tool.poe.tasks.docstring-lint]
cmd="pydoclint ."
help="Lint docstrings"

[tool.poe.tasks.ruff]
help=" [alias for lint && format]"
ignore_fail=false
Expand Down Expand Up @@ -95,6 +100,7 @@
[tool.poe.tasks.gen-docs]
help="Cleans out the automatically generated docs."
script="docs.gen_ref_pages:run"

[tool.poe.tasks.make-docs]
cmd="mkdocs build"
help="Generates our docs"
Expand Down Expand Up @@ -225,3 +231,11 @@
strictListInference=true
strictParameterNoneValue=true
strictSetInference=true

[tool.pydoclint]
allow-init-docstring=true
arg-type-hints-in-docstring=false
check-return-types=false
check-yield-types=false
exclude='\.venv'
style="google"
Original file line number Diff line number Diff line change
Expand Up @@ -68,5 +68,7 @@ def resample_dead_neurons(
autoencoder: Sparse autoencoder model.
loss_fn: Loss function.
train_batch_size: Train batch size (also used for resampling).
Returns:
Indices of dead neurons, and the updates for the encoder and decoder weights and biases.
"""
raise NotImplementedError
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,9 @@ def compute_loss_and_get_activations(
Returns:
A tuple containing the loss per item, and all input activations.
Raises:
ValueError: If the number of items in the store is less than the number of inputs
"""
with torch.no_grad():
loss_batches: list[TrainBatchStatistic] = []
Expand Down Expand Up @@ -274,6 +277,9 @@ def resample_dead_neurons(
autoencoder: Sparse autoencoder model.
loss_fn: Loss function.
train_batch_size: Train batch size (also used for resampling).
Returns:
Indices of dead neurons, and the updates for the encoder and decoder weights and biases.
"""
with torch.no_grad():
dead_neuron_indices = self.get_dead_neuron_indices(neuron_activity)
Expand Down
5 changes: 0 additions & 5 deletions sparse_autoencoder/activation_store/base_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,27 +51,22 @@ class ActivationStore(Dataset[InputOutputActivationVector], ABC):
@abstractmethod
def append(self, item: InputOutputActivationVector) -> Future | None:
"""Add a Single Item to the Store."""
raise NotImplementedError

@abstractmethod
def extend(self, batch: InputOutputActivationBatch) -> Future | None:
"""Add a Batch to the Store."""
raise NotImplementedError

@abstractmethod
def empty(self) -> None:
"""Empty the Store."""
raise NotImplementedError

@abstractmethod
def __len__(self) -> int:
"""Get the Length of the Store."""
raise NotImplementedError

@abstractmethod
def __getitem__(self, index: int) -> InputOutputActivationVector:
"""Get an Item from the Store."""
raise NotImplementedError

def shuffle(self) -> None:
"""Optional shuffle method."""
Expand Down
9 changes: 6 additions & 3 deletions sparse_autoencoder/activation_store/disk_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,9 +251,12 @@ def __len__(self) -> int:
"""Length Dunder Method.
Example:
>>> store = DiskActivationStore(max_cache_size=1, empty_dir=True)
>>> print(len(store))
0
>>> store = DiskActivationStore(max_cache_size=1, empty_dir=True)
>>> print(len(store))
0
Returns:
The number of activation vectors in the dataset.
"""
# Calculate the length if not cached
if self._disk_n_activation_vectors.value == -1:
Expand Down
Loading

0 comments on commit c962143

Please sign in to comment.