Skip to content

Commit

Permalink
Fix typos (#608)
Browse files Browse the repository at this point in the history
* Fix typos

* Create .codespellrc
  • Loading branch information
ehennestad authored Nov 1, 2024
1 parent 70fcace commit 23ff9fb
Show file tree
Hide file tree
Showing 14 changed files with 20 additions and 17 deletions.
2 changes: 1 addition & 1 deletion +contrib/+tdt/TDTbin2mat.m
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@
% the selected block. Each file starts with a 1024 byte boolean channel
% map indicating which channel's sort codes have been saved in the file.
% Following this map, is a sort code field that maps 1:1 with the event
% ID for a given block. The event ID is essentially the Nth occurance of
% ID for a given block. The event ID is essentially the Nth occurrence of
% an event on the entire TSQ file.

% look for the exact one
Expand Down
2 changes: 1 addition & 1 deletion +io/createParsedType.m
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@

[warningMessage, warningID] = lastwarn();

% Handle any warnings if they occured.
% Handle any warnings if they occurred.
if ~isempty(warningMessage)
if strcmp( warningID, 'NWB:CheckUnset:InvalidProperties' )

Expand Down
2 changes: 1 addition & 1 deletion +io/mapData2H5.m
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
%% Do Data Conversions
switch class(data)
case {'types.untyped.RegionView' 'types.untyped.ObjectView'}
%will throw errors if refdata DNE. Caught at NWBData level.
%will throw errors if refdata DNE (does not exist). Caught at NWBData level.
data = io.getRefData(fid, data);
case 'logical'
% encode as int8 values.
Expand Down
2 changes: 1 addition & 1 deletion +misc/parseSkipInvalidName.m
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
function parseSkipInvalidName(parser, keywordArguments)
%PARSESKIPINVALIDNAME as parse() but without constraing on valid property names.
%PARSESKIPINVALIDNAME as parse() but without constraining on valid property names.

validArgFlags = false(size(keywordArguments));
for i = 1:2:length(keywordArguments)
Expand Down
2 changes: 1 addition & 1 deletion +misc/str2validName.m
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
function valid = str2validName(propname, prefix)
% STR2VALIDNAME
% Converts the property name into a valid matlab property name.
% propname: the offending propery name
% propname: the offending property name
% prefix: optional prefix to use instead of the ambiguous "dyn"
if ~iscell(propname) && isvarname(propname)
valid = propname;
Expand Down
4 changes: 2 additions & 2 deletions +tests/+unit/PynwbTutorialTest.m
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
'object_id.py', ... % Does not export nwb file
'plot_configurator.py', ... % Does not export nwb file
'brain_observatory.py', ... % Requires allen sdk
'extensions.py'}; % Discrepency between tutorial and schema: https://github.com/NeurodataWithoutBorders/pynwb/issues/1952
'extensions.py'}; % Discrepancy between tutorial and schema: https://github.com/NeurodataWithoutBorders/pynwb/issues/1952

% SkippedFiles - Name of exported nwb files to skip reading with matnwb
SkippedFiles = {'family_nwb_file_0.nwb'} % requires family driver from h5py
Expand Down Expand Up @@ -208,7 +208,7 @@ function installPythonDependencies(testCase)

function pynwbFolder = downloadPynwb()
githubUrl = 'https://github.com/NeurodataWithoutBorders/pynwb/archive/refs/heads/master.zip';
pynwbFolder = downloadZippedGithubRepo(githubUrl, '.'); % Download in current direcory
pynwbFolder = downloadZippedGithubRepo(githubUrl, '.'); % Download in current directory
end

function repoFolder = downloadZippedGithubRepo(githubUrl, targetFolder)
Expand Down
2 changes: 1 addition & 1 deletion +tests/+util/verifyContainerEqual.m
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ function verifyContainerEqual(testCase, actual, expected, ignoreList)
tests.util.verifyContainerEqual(testCase, actualValue.value, expectedValue.value);
elseif isdatetime(expectedValue)...
|| (iscell(expectedValue) && all(cellfun('isclass', expectedValue, 'datetime')))
% linux MATLAB doesn't appear to propery compare datetimes whereas
% linux MATLAB doesn't appear to properly compare datetimes whereas
% Windows MATLAB does. This is a workaround to get tests to work
% while getting close enough to exact date representation.
actualValue = types.util.checkDtype(prop, 'datetime', actualValue);
Expand Down
2 changes: 1 addition & 1 deletion +types/+untyped/@DataStub/DataStub.m
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@
elseif length(varargin) == 1
% note: you cannot leverage subsref here because when
% load() is called, it's calling the builtin version of
% subsref, which apparantly poisons all calls in load() to
% subsref, which apparently poisons all calls in load() to
% use builtin subsref. We use the internal load_mat_style
% to workaround this.
data = obj.load_mat_style(varargin{1});
Expand Down
4 changes: 2 additions & 2 deletions +types/+untyped/DataPipe.m
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
% DATAPIPE(..., 'maxSize', MAXSIZE) Sets the maximum size of the HDF5
% Dataset. To append data later, use the MAXSIZE of the full
% dataset. Inf on any axis will allow the Dataset to grow without
% limit in that dimension. If not provided, MAXSIZE is infered from
% limit in that dimension. If not provided, MAXSIZE is inferred from
% the DATA. An error is thrown if neither MAXSIZE nor DATA is provided.
%
% DATAPIPE(..., 'axis', AXIS) Set which dimension axis to increment when
Expand Down Expand Up @@ -114,7 +114,7 @@
warning('NWB:DataPipe:UnusedArguments',...
['Other keyword arguments were added along with a valid '...
'filename and path. Since the filename and path are valid, the '...
'following extra properties will be superceded by the '...
'following extra properties will be superseded by the '...
'configuration on file:\n%s'],...
strjoin(formatted, newline));
end
Expand Down
2 changes: 1 addition & 1 deletion +util/loadEventAlignedSpikeTimes.m
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
% ST = LOADEVENTALIGNEDTIMESERIESDATA(NWB, UNIT_ID, EVENT_TIMES) returns
% a cell array containing the spike times relative to the timestamps contained
% in the EVENT_TIMES array. Optional arguments control the size of the
% temporal widnow within which spike times are included.
% temporal window within which spike times are included.
% OPTIONAL KEYWORD ARGUMENTS
% 'before_time' - specifies the time, in seconds, before the event for
% the inclusion of spike times. Defaults to 1.
Expand Down
3 changes: 3 additions & 0 deletions .codespellrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[codespell]
skip = *.html,*logo_matnwb.svg,*fastsearch.m,*.yaml,*UpdateThirdPartyFromUpstream.sh,*testResults.xml
ignore-words-list = DNE,nd,whos
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ NWB files use the HDF5 format to store data. There are two main differences betw
The NWB schema has regular updates and is open to addition of new types along with modification of previously defined types. As such, certain type presumptions made by MatNWB may be invalidated in the future from a NWB schema. Furthermore, new types may require implementations that will be missing in MatNWB until patched in.

For those planning on using matnwb alongside pynwb, please keep the following in mind:
- MatNWB is dependent on the schema, which may not necessary correspond with your PyNWB schema version. Please consider overwriting the contents within MatNWB's **~/schema/core** directory with the generating PyNWB's **src/pynwb/data directory** and running generateCore to ensure compatibilty between systems.
- MatNWB is dependent on the schema, which may not necessary correspond with your PyNWB schema version. Please consider overwriting the contents within MatNWB's **~/schema/core** directory with the generating PyNWB's **src/pynwb/data directory** and running generateCore to ensure compatibility between systems.

The `master` branch in this repository is considered perpetually unstable. If you desire Matnwb's full functionality (full round-trip with nwb data), please consider downloading the more stable releases in the Releases tab. Most releases will coincide with nwb-schema releases and guarantee compatibility of new features introduced with the schema release along with backwards compatibility with all previous nwb-schema releases.

Expand Down
4 changes: 2 additions & 2 deletions tutorials/convertTrials.m
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@
% Though TimeIntervals is a subclass of the DynamicTable type, we opt for
% populating the Dynamic Table data by column instead of using `addRow`
% here because of how the data is formatted. DynamicTable is flexible
% enough to accomodate both styles of data conversion.
% enough to accommodate both styles of data conversion.
trials_epoch = types.core.TimeIntervals(...
'colnames', {'start_time'}, ...
'description', 'trial data and properties', ...
Expand Down Expand Up @@ -393,7 +393,7 @@
% ('unitx' where 'x' is some unit ID).

%%
% Trial IDs, wherever they are used, are placed in a relevent |control| property in the
% Trial IDs, wherever they are used, are placed in a relevant |control| property in the
% data object and will indicate what data is associated with what trial as
% defined in |trials|'s |id| column.

Expand Down
4 changes: 2 additions & 2 deletions tutorials/dataPipe.m
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
DataPipe = types.untyped.DataPipe('data', DataToCompress);

%%
% This is the most basic way to acheive compression, and all of the
% This is the most basic way to achieve compression, and all of the
% optimization decisions are automatically determined by MatNWB.
%% Background
% HDF5 has built-in ability to compress and decompress individual datasets.
Expand Down Expand Up @@ -98,7 +98,7 @@
% resulting file size of 1.1MB. The chunk size was chosen such that it
% spans each individual row of the matrix.
%
% Use the combination of arugments that fit your need.
% Use the combination of arguments that fit your need.
% When dealing with large datasets, you may want to use iterative write to
% ensure that you stay within the bounds of your system memory and use
% chunking and compression to optimize storage, read and write of the data.
Expand Down

0 comments on commit 23ff9fb

Please sign in to comment.