Skip to content

Commit

Permalink
Merge pull request #354 from AllenInstitute/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
rcpeene authored Mar 1, 2024
2 parents 8c21351 + 9d0731a commit 6386700
Show file tree
Hide file tree
Showing 48 changed files with 1,130 additions and 1,165 deletions.
8 changes: 8 additions & 0 deletions .codespellrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[codespell]
# Ref: https://github.com/codespell-project/codespell#using-a-config-file
skip = .git,.codespellrc
check-hidden = true
# Ignore embedded or returned images in ipynb and some acronyms
ignore-regex = ((^\s*"image/\S+": "|<img src='data:image/\S+;base64,).*|\bSOM\b)
#ignore-regex = image/\S+.*
ignore-words-list = nin
23 changes: 23 additions & 0 deletions .github/workflows/codespell.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Codespell configuration is within .codespellrc
---
name: Codespell

on:
push:
branches: [main]
pull_request:
branches: [main]

permissions:
contents: read

jobs:
codespell:
name: Check for spelling errors
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v4
- name: Codespell
uses: codespell-project/actions-codespell@v2
17 changes: 17 additions & 0 deletions data/contributors.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
Name,ORCID,Institution,Email,Github,Funding,Role
Katrina Ager,,Arizona State University,[email protected],https://github.com/katrinaager,,"Processing"
Ahad Bawany,,Allen Institute,[email protected],https://github.com/Ahad-Allen,,"Processing"
Corbett Bennett,0009-0001-2847-7754,Allen Institute,[email protected],https://github.com/corbennett,,"Processing"
Benjamin Dichter,0000-0001-5725-6910,CatalystNeuro,[email protected],https://github.com/bendichter,,"Conceptualization"
Satrajit Ghosh,0000-0002-5312-6729,Massachusetts Institute of Technology,[email protected],https://github.com/satra,NIH R24MH117295,"Conceptualization"
Colleen J. Gillon,0000-0002-2253-7816,Imperial College London,[email protected],https://github.com/colleenjg,,"Processing"
Carly Kiselycznyk,,Allen Institute,[email protected],,,"Management"
Jerome Lecoq,0000-0002-0131-0938,Allen Institute,[email protected],https://github.com/jeromelecoq,,"Conceptualization, Management"
Mackenzie Mathis,0000-0001-7368-4456,Swiss Federal Institute of Technology in Lausanne,[email protected],https://github.com/MMathisLab,,"Review"
NIH,,,,,,"Funding"
R. Carter Peene,0009-0000-6660-2264,Allen Institute,[email protected],rcpeene,,"Conceptualization, Processing"
Jason Pina,0000-0003-1385-8762,York University,[email protected],https://github.com/jayepi,,"Conceptualization"
Hyeyoung Shin,,Seoul National University,[email protected],https://github.com/hs13,,"Conceptualization"
Josh Siegle,,Allen Institute,[email protected],https://github.com/jsiegle,NIH U24,"Processing"
Jacob Westerberg,0000-0001-5331-8707,Netherlands Institute for Neuroscience,[email protected],https://github.com/jakewesterberg,,"Conceptualization"
Alex Williams,0000-0001-5853-103X,New York University,[email protected],https://github.com/ahwillia,,"Review"
File renamed without changes.
2 changes: 1 addition & 1 deletion data/ecephys_schema.csv
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ electrodes,Electrode IDs,array,nwb.electrodes.id,IDs column of the electrodes ta
electrodes,Electrode X Coordinates,array,nwb.electrodes.x,CCF X coordinates column of the electrodes table. Represents the X location of electrodes along a probe.
electrodes,Electrode Y Coordinates,array,nwb.electrodes.y,CCF Y coordinates column of the electrodes table. Represents the Y location of electrodes along a probe.
electrodes,Electrode Z Coordinates,array,nwb.electrodes.z,CCF Z coordinates column of the electrodes table. Represents the Z location of electrodes along a probe.
electrodes,Impedance,array,nwb.electrodes.imp,Impedence column of the electrodes table. Unused in our data.
electrodes,Impedance,array,nwb.electrodes.imp,Impedance column of the electrodes table. Unused in our data.
electrodes,Electrode Locations,array,nwb.electrodes.location,Brain region acronyms column of the electrodes table
electrodes,Electrode Filters,array,nwb.electrodes.filtering,Column of the electrodes table describing the list of filters applied to each electrode.
electrodes,Electrode Groups,array,nwb.electrodes.group,References to group object column of the electrodes table
Expand Down
2 changes: 1 addition & 1 deletion databook_utils/cred_assign_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ def create_roi_mask_contours(df_row, sess_idx=0, cw=1):
Returns:
- roi_masks (2D array):
ROI mask contour image (hei x wid), overlayed for all ROIs,
ROI mask contour image (hei x wid), overlaid for all ROIs,
with 1s where mask contours are present, and 0s elsewhere.
"""

Expand Down
14 changes: 5 additions & 9 deletions databook_utils/dandi_utils.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@

import h5py
import os
import remfile

from random import randint

from dandi import download
from dandi import dandiapi
from fsspec.implementations.cached import CachingFileSystem
from fsspec import filesystem
from pynwb import NWBHDF5IO


Expand Down Expand Up @@ -49,11 +49,7 @@ def dandi_stream_open(dandiset_id, dandi_filepath, dandi_api_key=None):
base_url = file.client.session.head(file.base_download_url)
file_url = base_url.headers["Location"]

fs = CachingFileSystem(
fs=filesystem("http")
)

f = fs.open(file_url, "rb")
file = h5py.File(f)
io = NWBHDF5IO(file=file, mode='r', load_namespaces=True)
rem_file = remfile.File(file_url)
h5py_file = h5py.File(rem_file, "r")
io = NWBHDF5IO(file=h5py_file, mode="r", load_namespaces=True)
return io
66 changes: 59 additions & 7 deletions databook_utils/insert_authors_version.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import csv
import subprocess


from docutils import nodes
from docutils.parsers.rst import Directive

Expand All @@ -14,30 +15,30 @@ def setstring_to_set(setstring):
# possible todo: replace this with ast.eval_literal
def dictstring_to_dict(dictstring):
dictstring = dictstring.replace(", ", ",")
dictstring = dictstring.replace(": ", ":")
dictstring = dictstring.replace("-- ", "--")
dictlist = dictstring.split(",")

this_dict = {}
for elem in dictlist:
try:
key, val = elem.split(":")
key, val = elem.split("--")
except:
raise ValueError("Aliases should be formatted in key value pairs, delimited by a colon")
if key not in this_dict:
this_dict[key] = val
return this_dict


class AuthorsList(Directive):
class Committers(Directive):

optional_arguments = 3
option_spec = {"blacklist": setstring_to_set, "additional_authors": setstring_to_set, "aliases": dictstring_to_dict}

def run(self):
blacklist= self.options.get("blacklist", set())
blacklist= self.options.get("blacklist", "blah")
additional_authors = self.options.get("additional_authors", set())
aliases = self.options.get("aliases", {})

log = subprocess.Popen(["git", "log"], stdout=subprocess.PIPE, text=True)
shortlog = subprocess.check_output(["git", "shortlog", "-sn"], stdin=log.stdout, encoding="utf8")
print(shortlog)
Expand All @@ -54,6 +55,28 @@ def run(self):
return [emphasis_node]


class Authors(Directive):

optional_arguments = 1
option_spec = {"role": str}

def run(self):
authors = []

selected_role = self.options.get("role", "")

table = list(csv.reader(open("./data/contributors.csv")))
role_idx = table[0].index("Role")
name_idx = table[0].index("Name")
for contributor in table[1:]:
contributor_roles = contributor[role_idx].split(", ")
if selected_role == "" or selected_role in contributor_roles:
authors.append(contributor[name_idx])

emphasis_node = nodes.emphasis(text=", ".join(authors))
return [emphasis_node]


class VersionNumber(Directive):

optional_arguments = 1
Expand All @@ -74,9 +97,38 @@ def run(self):
return [paragraph_node]


class AuthorsIndex(Directive):

def run(self):
table = list(csv.reader(open("./data/contributors.csv")))

section = nodes.section(ids=["contributorsblock"])
section += nodes.title("","Contributors")
for idx, properties in enumerate(table):
if idx == 0:
continue
entry = nodes.section(ids=["contributorentry"])

name = properties[0]
entry.append(nodes.strong(text=name))

line_block = nodes.line_block()
for property in properties[1:]:
if property != "":
line_block.append(nodes.line(text=property))
line_block.append(nodes.line(text=""))

entry.append(line_block)
section.append(entry)

return [section]


def setup(app):
app.add_directive("authors", AuthorsList)
app.add_directive("committers", Committers)
app.add_directive("version", VersionNumber)
app.add_directive("authors", Authors)
app.add_directive("authors_index", AuthorsIndex)

return {
'version': '0.1',
Expand Down
1 change: 1 addition & 0 deletions docs/_config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ sphinx:
extra_extensions:
- insert_authors_version
- sphinx.ext.intersphinx

repository:
url: https://github.com/AllenInstitute/openscope_databook
path_to_book: docs
Expand Down
6 changes: 4 additions & 2 deletions docs/_toc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@ parts:
- file: methods/jupyter_book.md
- file: methods/github.md
- file: methods/environments.md
- caption: Bibliography and FAQ
- caption: Appendix
chapters:
- file: contribution.md
- file: bibliography.md
- file: FAQ.md
- file: authors_index.md
- file: FAQ.md
3 changes: 3 additions & 0 deletions docs/authors_index.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@

```{authors_index}
```
2 changes: 1 addition & 1 deletion docs/basics/download_nwb.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"outputs": [],
"source": [
"try:\n",
" from dandi_utils import dandi_stream_open\n",
" from databook_utils.dandi_utils import dandi_stream_open\n",
"except:\n",
" !git clone https://github.com/AllenInstitute/openscope_databook.git\n",
" %cd openscope_databook\n",
Expand Down
Loading

0 comments on commit 6386700

Please sign in to comment.