diff --git a/+misc/getTutorialNwbFilePath.m b/+misc/getTutorialNwbFilePath.m new file mode 100644 index 00000000..34af1c7c --- /dev/null +++ b/+misc/getTutorialNwbFilePath.m @@ -0,0 +1,35 @@ +function nwbFilePath = getTutorialNwbFilePath(filename, options) +% getTutorialNwbFilePath - Get a filepath to save a tutorial nwb file. +% +% nwbFilePath = getTutorialNwbFilePath(filename) creates an absolute +% filepath to save a tutorial nwb file given a filename. +% The file is saved in /tutorials/tutorial_nwb_files + + arguments + filename char + options.ExportLocation (1,1) string ... + {mustBeMember(options.ExportLocation, ["default", "workdir"])} = "default" + end + + % Check if function is called from testing framework. If yes, ensure + % file is saved to the current working directory. + callingStackTrace = dbstack(); + if numel(callingStackTrace) >= 4 && ... + strcmp(callingStackTrace(4).name, 'TutorialTest.testTutorial') + options.ExportLocation = "workdir"; + end + + if options.ExportLocation == "default" + saveFolder = fullfile(misc.getMatnwbDir, 'tutorials', 'tutorial_nwb_files'); + elseif options.ExportLocation == "workdir" + saveFolder = pwd; + end + + if ~isfolder(saveFolder); mkdir(saveFolder); end + + if ~endsWith(filename, '.nwb') + filename = [filename, '.nwb']; + end + + nwbFilePath = fullfile(saveFolder, filename); +end diff --git a/+tests/+unit/PynwbTutorialTest.m b/+tests/+unit/PynwbTutorialTest.m new file mode 100644 index 00000000..946661be --- /dev/null +++ b/+tests/+unit/PynwbTutorialTest.m @@ -0,0 +1,292 @@ +classdef PynwbTutorialTest < matlab.unittest.TestCase +% PynwbTutorialTest - Unit test for testing the pynwb tutorials. +% +% This test will test most pynwb tutorial files (while skipping tutorials with +% dependencies) If the tutorial creates nwb file(s), the test will also try +% to open these with matnwb. +% +% See also tests.util.getPythonPath + + properties + MatNwbDirectory + PyNwbDirectory + end + + properties (TestParameter) + % TutorialFile - A cell array where each cell is the name of a + % tutorial file. testTutorial will run on each file individually + tutorialFile = listTutorialFiles(); + end + + properties (Constant) + % SkippedTutorials - Tutorials from pynwb to skip + SkippedTutorials = {... + 'streaming.py', ... % Requires that HDF5 library is installed with the ROS3 driver enabled which is not a given + 'object_id.py', ... % Does not export nwb file + 'plot_configurator.py', ... % Does not export nwb file + 'brain_observatory.py', ... % Requires allen sdk + 'extensions.py'}; % Discrepency between tutorial and schema: https://github.com/NeurodataWithoutBorders/pynwb/issues/1952 + + % SkippedFiles - Name of exported nwb files to skip reading with matnwb + SkippedFiles = {'family_nwb_file_0.nwb'} % requires family driver from h5py + + % PythonDependencies - Package dependencies for running pynwb tutorials + PythonDependencies = {'hdmf-zarr', 'dataframe-image', 'matplotlib', 'dandi'} + end + + properties (Access = private) + PythonEnvironment % Stores the value of the environment variable + % "PYTHONPATH" to restore when test is finished. + end + + methods (TestClassSetup) + function setupClass(testCase) + % Get the root path of the matnwb repository + rootPath = getMatNwbRootDirectory(); + testCase.MatNwbDirectory = rootPath; + + % Use a fixture to add the folder to the search path + testCase.applyFixture(matlab.unittest.fixtures.PathFixture(rootPath)); + + nwbClearGenerated() % Clear the generated schema classes + + % Use a fixture to create a temporary working directory + testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture); + + % Download pynwb in the current (temp) directory and cd into pynwb + testCase.PyNwbDirectory = downloadPynwb(); + cd( testCase.PyNwbDirectory ) + + testCase.createVirtualPythonEnvironment() + testCase.installPythonDependencies() + + % Add site-packages to python path + testCase.PythonEnvironment = getenv('PYTHONPATH'); + L = dir('temp_venv/lib/python*/site-*'); % Find the site-packages folder + pythonPath = fullfile(L.folder, L.name); + setenv('PYTHONPATH', pythonPath) + end + end + + methods (TestClassTeardown) + function tearDownClass(testCase) + % Restore environment variable + setenv('PYTHONPATH', testCase.PythonEnvironment); + end + end + + methods (TestMethodSetup) + function setupMethod(testCase) %#ok + % pass + end + end + + methods (TestMethodTeardown) + function teardownMethod(testCase) %#ok + % Clear/delete all nwb files + L = dir('*.nwb'); + for i = 1:numel(L) + delete(fullfile(L(i).folder, L(i).name)) + end + + % Consider whether to also run nwbClearGenerated here + end + end + + methods (Test) + function testTutorial(testCase, tutorialFile) + + %S = pyenv(); + %pythonPath = S.Executable; + + pythonPath = tests.util.getPythonPath(); + + cmd = sprintf('"%s" %s', pythonPath, tutorialFile ); + [status, cmdout] = system(cmd); + + if status == 1 + if contains( cmdout, "ModuleNotFoundError: No module named 'hdf5plugin'" ) + % pass + %keyboard + else + [~, tutorialName] = fileparts(tutorialFile); + error('Failed to run python tutorial named "%s" with error:\n %s', tutorialName, cmdout) + end + end + + testCase.testReadTutorialNwbFileWithMatNwb() + end + end + + methods + function testReadTutorialNwbFileWithMatNwb(testCase) + + % Retrieve all files generated by the tutorial + nwbListing = dir('*.nwb'); + + for i = 1:numel(nwbListing) + nwbFilename = nwbListing(i).name; + if any(strcmp(nwbFilename, tests.unit.PynwbTutorialTest.SkippedFiles)) + continue + end + + try + %schemaVersion = util.getSchemaVersion(nwbFilename); %Debug + + % NB: Need to specify savedir to current directory (.) in + % order to generate schema in working directory for test + nwbFile = nwbRead(nwbFilename, 'savedir', '.'); %#ok + catch ME + error(ME.message) + %testCase.verifyFail(sprintf('Failed to read file %s with error: %s', nwbListing(i).name, ME.message)); + end + end + end + end + + methods (Access = private) % Utility functions + function createVirtualPythonEnvironment(testCase) + % Todo: Consider to use py.* + %py.venv.create('.', with_pip=true) + + pythonPath = tests.util.getPythonPath(); + cmd = sprintf("%s -m venv ./temp_venv", pythonPath ); + [status, cmdout] = system(cmd); + + if ~status == 0 + error("Failed to create virtual python environment with error:\n%s", cmdout) + end + + % Activate virtual python environment + if isunix + system('source ./temp_venv/bin/activate'); + elseif ispc + system('temp_venv\Scripts\activate') + end + end + + function installPythonDependencies(testCase) + % Install python dependencies + pipExecutable = './temp_venv/bin/pip3'; + for i = 1:numel(testCase.PythonDependencies) + iName = testCase.PythonDependencies{i}; + installCmdStr = sprintf('%s install %s', pipExecutable, iName); + evalc( "system(installCmdStr)" ); % Install without command window output + end + end + end +end + +function tutorialNames = listTutorialFiles() +% listTutorialFiles - List names of all tutorial files (exclude skipped files) + + % Note: Without a token, github api requests are limited to 60 per + % hour. The listFilesInRepo will make 4 requests per call + if isenv('GITHUB_TOKEN') + token = getenv('GITHUB_TOKEN'); + else + token = ''; + end + + allFilePaths = listFilesInRepo(... + 'NeurodataWithoutBorders', 'pynwb', 'docs/gallery/', token); + + % Exclude files that are not .py files. + [~, fileNames, fileExt] = fileparts(allFilePaths); + keep = strcmp(fileExt, '.py'); + allFilePaths = allFilePaths(keep); + + % Exclude skipped files. + fileNames = strcat(fileNames(keep), '.py'); + [~, iA] = setdiff(fileNames, tests.unit.PynwbTutorialTest.SkippedTutorials, 'stable'); + tutorialNames = allFilePaths(iA); +end + +function folderPath = getMatNwbRootDirectory() + folderPath = fileparts(fileparts(fileparts(mfilename('fullpath')))); +end + +function pynwbFolder = downloadPynwb() + githubUrl = 'https://github.com/NeurodataWithoutBorders/pynwb/archive/refs/heads/master.zip'; + pynwbFolder = downloadZippedGithubRepo(githubUrl, '.'); % Download in current direcory +end + +function repoFolder = downloadZippedGithubRepo(githubUrl, targetFolder) +%downloadZippedGithubRepo Download addon to a specified addon folder + + % Create a temporary path for storing the downloaded file. + [~, ~, fileType] = fileparts(githubUrl); + tempFilepath = [tempname, fileType]; + + % Download the file containing the repository + try + tempFilepath = websave(tempFilepath, githubUrl); + fileCleanupObj = onCleanup( @(fname) delete(tempFilepath) ); + catch ME + if throwErrorIfFails + rethrow(ME) + end + end + + fileNames = unzip(tempFilepath, targetFolder); + + % Delete the temp zip file + clear fileCleanupObj + + repoFolder = fullfile(targetFolder, fileNames{1}); +end + +function allFiles = listFilesInRepo(owner, repo, path, token) + % This function lists all files in a GitHub repository, including subfolders. + % Inputs: + % - owner: GitHub username or organization name + % - repo: Repository name + % - path: Folder path in the repository (use '' for root) + % - token: Personal Access Token for GitHub API (use '' for public repos) + % Outputs: + % - allFiles: Cell array of file paths + + if nargin < 3 + path = ''; + end + if nargin < 4 + token = ''; + end + + % Construct the API URL + url = ['https://api.github.com/repos/' owner '/' repo '/contents/' path]; + + % Set up HTTP headers, including authentication if provided + headers = matlab.net.http.HeaderField.empty; + if ~isempty(token) + headers(end+1) = matlab.net.http.HeaderField('Authorization', ['token ' token]); + end + headers(end+1) = matlab.net.http.HeaderField('Accept', 'application/vnd.github.v3+json'); + + % Send the HTTP GET request + request = matlab.net.http.RequestMessage('GET', headers); + response = request.send(url); + + % Check if the request was successful + if response.StatusCode == matlab.net.http.StatusCode.OK + contents = response.Body.Data; + else + error('Failed to fetch data: %s', response.StatusLine); + end + + % Initialize the output + allFiles = {}; + + % Process the contents + for i = 1:numel(contents) + item = contents(i); + if strcmp(item.type, 'file') + % If it's a file, add its path to the list + allFiles{end+1} = item.path; %#ok + elseif strcmp(item.type, 'dir') + % If it's a directory, recursively fetch its contents + subfolderFiles = listFilesInRepo(owner, repo, item.path, token); + allFiles = [allFiles, subfolderFiles]; %#ok + end + end +end diff --git a/+tests/+unit/TutorialTest.m b/+tests/+unit/TutorialTest.m new file mode 100644 index 00000000..15ea7e33 --- /dev/null +++ b/+tests/+unit/TutorialTest.m @@ -0,0 +1,141 @@ +classdef TutorialTest < matlab.unittest.TestCase +% TutorialTest - Unit test for testing the matnwb tutorials. +% +% This test will test most tutorial files (while skipping tutorials with +% dependencies) If the tutorial creates an nwb file, the test will also try +% to open this with pynwb. +% +% Note: +% - Requires MATLAB XXXX to run py.* commands. +% - pynwb must be installed in the python environment returned by +% pyenv() + + properties + MatNwbDirectory + end + + properties (TestParameter) + % TutorialFile - A cell array where each cell is the name of a + % tutorial file. testTutorial will run on each file individually + tutorialFile = listTutorialFiles(); + end + + properties (Constant) + SkippedTutorials = {... + 'basicUsage.mlx', ... % depends on external data + 'convertTrials.m', ... % depends on basicUsage output + 'formatStruct.m', ... % Actually a utility script, not a tutorial + 'read_demo.mlx'}; % depends on external data + + % SkippedFiles - Name of exported nwb files to skip reading with pynwb + SkippedFiles = {'testFileWithDataPipes.nwb'} % does not produce a valid nwb file + end + + methods (TestClassSetup) + function setupClass(testCase) + % Get the root path of the matnwb repository + rootPath = getMatNwbRootDirectory(); + tutorialsFolder = fullfile(rootPath, 'tutorials'); + + testCase.MatNwbDirectory = rootPath; + + % Use a fixture to add the folder to the search path + testCase.applyFixture(matlab.unittest.fixtures.PathFixture(rootPath)); + testCase.applyFixture(matlab.unittest.fixtures.PathFixture(tutorialsFolder)); + + % Note: The following seems to not be working on the azure pipeline + % Keep for reference + + % % % Make sure pynwb is installed in MATLAB's Python Environment + % % args = py.list({py.sys.executable, "-m", "pip", "install", "pynwb"}); + % % py.subprocess.check_call(args); + % % + % % % Add pynwb to MATLAB's python environment path + % % pynwbPath = getenv('PYNWB_PATH'); + % % if count(py.sys.path, pynwbPath) == 0 + % % insert(py.sys.path,int32(0),pynwbPath); + % % end + + % % Alternative: Use python script for reading file with pynwb + setenv('PYTHONPATH', fileparts(mfilename('fullpath'))); + + nwbClearGenerated() + end + end + + methods (TestClassTeardown) + function tearDownClass(testCase) %#ok + %generateCore() + end + end + + methods (TestMethodSetup) + function setupMethod(testCase) + testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture); + generateCore('savedir', '.'); + end + end + + methods (Test) + function testTutorial(testCase, tutorialFile) %#ok + run(tutorialFile) + testCase.testReadTutorialNwbFileWithPynwb() + end + end + + methods + function testReadTutorialNwbFileWithPynwb(testCase) + + % Retrieve all files generated by tutorial + nwbListing = dir('*.nwb'); + + for i = 1:numel(nwbListing) + nwbFilename = nwbListing(i).name; + if any(strcmp(nwbFilename, tests.unit.TutorialTest.SkippedFiles)) + continue + end + + try + try + io = py.pynwb.NWBHDF5IO(nwbListing(i).name); + nwbObject = io.read(); + testCase.verifyNotEmpty(nwbObject, 'The NWB file should not be empty.'); + io.close() + + catch ME + if strcmp(ME.identifier, 'MATLAB:undefinedVarOrClass') && ... + contains(ME.message, 'py.pynwb.NWBHDF5IO') + + pythonExecutable = tests.util.getPythonPath(); + cmd = sprintf('"%s" -B -m read_nwbfile_with_pynwb %s',... + pythonExecutable, nwbFilename); + + status = system(cmd); + if status ~= 0 + error('Failed to read NWB file "%s" using pynwb', nwbFilename) + end + else + rethrow(ME) + end + end + + catch ME + error(ME.message) + %testCase.verifyFail(sprintf('Failed to read file %s with error: %s', nwbListing(i).name, ME.message)); + end + end + end + end +end + +function tutorialNames = listTutorialFiles() +% listTutorialFiles - List names of all tutorial files (exclude skipped files) + rootPath = getMatNwbRootDirectory(); + L = dir(fullfile(rootPath, 'tutorials')); + L( [L.isdir] ) = []; % Ignore folders + tutorialNames = setdiff({L.name}, tests.unit.TutorialTest.SkippedTutorials); +end + +function folderPath = getMatNwbRootDirectory() + folderPath = fileparts(fileparts(fileparts(mfilename('fullpath')))); +end diff --git a/+tests/+unit/read_nwbfile_with_pynwb.py b/+tests/+unit/read_nwbfile_with_pynwb.py new file mode 100644 index 00000000..546e353d --- /dev/null +++ b/+tests/+unit/read_nwbfile_with_pynwb.py @@ -0,0 +1,17 @@ +import sys +from pynwb import NWBHDF5IO + +def pynwbread(): + if len(sys.argv) > 1: + # Take the first input argument + nwb_file_path = sys.argv[1] + print(f"Reading file '{nwb_file_path}' with pynwb.") + + with NWBHDF5IO(nwb_file_path, "r") as io: + read_nwbfile = io.read() + + else: + raise Exception("No filepath was provided") + +if __name__ == "__main__": + pynwbread() \ No newline at end of file diff --git a/+tests/+unit/tutorialTest.m b/+tests/+unit/tutorialTest.m deleted file mode 100644 index 5ec7699e..00000000 --- a/+tests/+unit/tutorialTest.m +++ /dev/null @@ -1,37 +0,0 @@ -function tests = tutorialTest() -tests = functiontests(localfunctions); -end - -function setupOnce(testCase) -rootPath = fullfile(fileparts(mfilename('fullpath')), '..', '..'); -testCase.applyFixture(matlab.unittest.fixtures.PathFixture(rootPath)); -tutorialPath = fullfile(rootPath, 'tutorials'); -addpath(tutorialPath); -testCase.TestData.listing = dir(tutorialPath); -end - -function setup(testCase) -testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture); -generateCore('savedir', '.'); -rehash(); -end - -function testTutorials(testCase) -skippedTutorials = {... - 'basicUsage.mlx', ... % depends on external data - 'convertTrials.m', ... % depends on basicUsage output - 'formatStruct.m', ... % Actually a utility script, not a tutorial - 'read_demo.mlx'}; % depends on external data -for i = 1:length(testCase.TestData.listing) - listing = testCase.TestData.listing(i); - if listing.isdir || any(strcmp(skippedTutorials, listing.name)) - continue; - end - try - run(listing.name); - catch ME - error('NWB:Test:Tutorial', ... - 'Error while running test `%s`. Full error message:\n\n%s', listing.name, getReport(ME)); - end -end -end \ No newline at end of file diff --git a/+tests/+util/getPythonPath.m b/+tests/+util/getPythonPath.m new file mode 100644 index 00000000..3a43dc0a --- /dev/null +++ b/+tests/+util/getPythonPath.m @@ -0,0 +1,14 @@ +function pythonPath = getPythonPath() + envPath = fullfile('+tests', 'env.mat'); + + if 2 == exist(envPath, 'file') + Env = load(envPath, '-mat'); + if isfield(Env, 'pythonPath') + pythonPath = Env.pythonPath; + else + pythonPath = fullfile(Env.pythonDir, 'python'); + end + else + pythonPath = 'python'; + end +end diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 1cb8537d..33b11d33 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -21,6 +21,7 @@ steps: pip install pynwb pip install hdf5plugin echo "##vso[task.setvariable variable=plugin_path]$(python -c "import hdf5plugin; print(hdf5plugin.PLUGINS_PATH)")" + echo "##vso[task.setvariable variable=pynwb_path]$(python -c "import pynwb; print(pynwb.__path__[0])")" displayName: 'Install PyNWB' - task: RunMATLABCommand@0 @@ -28,6 +29,7 @@ steps: command: "results = assertSuccess(nwbtest); assert(~isempty(results), 'No tests ran');" env: HDF5_PLUGIN_PATH: $(plugin_path) + PYNWB_PATH: $(pynwb_path) - task: PublishTestResults@2 condition: succeededOrFailed() diff --git a/tutorials/behavior.mlx b/tutorials/behavior.mlx index bd1c4eda..33fdc5cd 100644 Binary files a/tutorials/behavior.mlx and b/tutorials/behavior.mlx differ diff --git a/tutorials/dataPipe.m b/tutorials/dataPipe.m index 0603482d..3f834248 100644 --- a/tutorials/dataPipe.m +++ b/tutorials/dataPipe.m @@ -62,14 +62,14 @@ % compression by manually specifying the chunk size using _chunkSize_ argument. % % We can demonstrate the benefit of chunking by exploring the following -% scenario. The following code utilizes DataPipe’s default chunk size: +% scenario. The following code utilizes DataPipe's default chunk size: % fData = randi(250, 100, 1000); % Create fake data % create an nwb structure with required fields nwb = NwbFile( ... - 'session_start_time', '2020-01-01 00:00:00', ... + 'session_start_time', datetime('2020-01-01 00:00:00', 'TimeZone', 'local'), ... 'identifier', 'ident1', ... 'session_description', 'DataPipeTutorial'); @@ -117,7 +117,7 @@ % create an nwb structure with required fields nwb=NwbFile( ... - 'session_start_time', '2020-01-01 00:00:00', ... + 'session_start_time', datetime('2020-01-01 00:00:00', 'TimeZone', 'local'), ... 'identifier', 'ident1', ... 'session_description', 'DataPipeTutorial'); @@ -163,7 +163,7 @@ %assign data without compression nwb=NwbFile(... - 'session_start_time', '2020-01-01 00:00:00', ... + 'session_start_time', datetime(2020, 1, 1, 0, 0, 0, 'TimeZone', 'local'), ... 'identifier','ident1', ... 'session_description', 'DataPipeTutorial'); diff --git a/tutorials/dimensionMapNoDataPipes.mlx b/tutorials/dimensionMapNoDataPipes.mlx index 443846d6..630ffdc5 100644 Binary files a/tutorials/dimensionMapNoDataPipes.mlx and b/tutorials/dimensionMapNoDataPipes.mlx differ diff --git a/tutorials/dimensionMapWithDataPipes.mlx b/tutorials/dimensionMapWithDataPipes.mlx index 4aa7aee3..f24f6505 100644 Binary files a/tutorials/dimensionMapWithDataPipes.mlx and b/tutorials/dimensionMapWithDataPipes.mlx differ diff --git a/tutorials/dynamic_tables.mlx b/tutorials/dynamic_tables.mlx index 58d471c8..48c94846 100644 Binary files a/tutorials/dynamic_tables.mlx and b/tutorials/dynamic_tables.mlx differ diff --git a/tutorials/dynamically_loaded_filters.mlx b/tutorials/dynamically_loaded_filters.mlx index cfc9d923..a921b4ee 100644 Binary files a/tutorials/dynamically_loaded_filters.mlx and b/tutorials/dynamically_loaded_filters.mlx differ diff --git a/tutorials/html/dataPipe.html b/tutorials/html/dataPipe.html index 39c1232c..1d404b4f 100644 --- a/tutorials/html/dataPipe.html +++ b/tutorials/html/dataPipe.html @@ -1,12 +1,17 @@ - - - - - Neurodata Without Borders (NWB) advanced write using DataPipe

Neurodata Without Borders (NWB) advanced write using DataPipe

How to utilize HDF5 compression using dataPipe

authors: Ivan Smalianchuk and Ben Dichter
+  
+
+
+
+

Neurodata Without Borders (NWB) advanced write using DataPipe

+ +

How to utilize HDF5 compression using dataPipe

+
authors: Ivan Smalianchuk and Ben Dichter
 contact: smalianchuk.ivan@gmail.com, ben.dichter@catalystneuro.com
-last edited: Jan 04, 2021

Contents

Neurophysiology data can be quite large, often in the 10s of GB per session and sometimes much larger. Here, we demonstrate methods in MatNWB that allow you to deal with large datasets. These methods are compression and iterative write. Both of these techniques use the types.untyped.DataPipe object, which sends specific instructions to the HDF5 backend about how to store data.

Compression - basic implementation

To compress experimental data (in this case a 3D matrix with dimensions [250 250 70]) one must assign it as a DataPipe type:

DataToCompress = randi(100, 250, 250, 70);
+last edited: Jan 04, 2021
+ +

Contents

+ +

Neurophysiology data can be quite large, often in the 10s of GB per session and sometimes much larger. Here, we demonstrate methods in MatNWB that allow you to deal with large datasets. These methods are compression and iterative write. Both of these techniques use the types.untyped.DataPipe object, which sends specific instructions to the HDF5 backend about how to store data.

+

Compression - basic implementation

+

To compress experimental data (in this case a 3D matrix with dimensions [250 250 70]) one must assign it as a DataPipe type:

+
DataToCompress = randi(100, 250, 250, 70);
 DataPipe = types.untyped.DataPipe('data', DataToCompress);
-

This is the most basic way to acheive compression, and all of the optimization decisions are automatically determined by MatNWB.

Background

HDF5 has built-in ability to compress and decompress individual datasets. If applied intelligently, this can dramatically reduce the amount of space used on the hard drive to represent the data. The end user does not need to worry about the compression status of the dataset- HDF5 will automatically decompress the dataset on read.

The above example uses default chunk size and compression level (3). To optimize compression, compressionLevel and chunkSize must be considered. compressionLevel ranges from 0 - 9 where 9 is the highest level of compression and 0 is the lowest. chunkSize is less intuitive to adjust; to implement compression, chunk size must be less than data size.

DataPipe Arguments

+

+

This is the most basic way to acheive compression, and all of the optimization decisions are automatically determined by MatNWB.

+

Background

+

HDF5 has built-in ability to compress and decompress individual datasets. If applied intelligently, this can dramatically reduce the amount of space used on the hard drive to represent the data. The end user does not need to worry about the compression status of the dataset- HDF5 will automatically decompress the dataset on read.

+

The above example uses default chunk size and compression level (3). To optimize compression, compressionLevel and chunkSize must be considered. compressionLevel ranges from 0 - 9 where 9 is the highest level of compression and 0 is the lowest. chunkSize is less intuitive to adjust; to implement compression, chunk size must be less than data size.

+

+DataPipe Arguments

+

@@ -80,11 +127,16 @@
maxSizeSets the maximum size of the HDF5 Dataset. Unless using iterative writing, this should match the size of Data. To append data later, use the maxSize for the full dataset. You can use Inf for a value of a dimension if you do not know its final size.
dataThe data to compress. Must be numerical data.
chunkSizeSets chunk size for the compression. Must be less than maxSize.
compressionLevelLevel of compression ranging from 0-9 where 9 is the highest level of compression. The default is level 3.
offsetAxis offset of dataset to append. May be used to overwrite data.
-

Chunking

HDF5 Datasets can be either stored in continuous or chunked mode. Continuous means that all of the data is written to one continuous block on the hard drive, and chunked means that the dataset is automatically split into chunks that are distributed across the hard drive. The user does not need to know the mode used- HDF5 handles the gathering of chunks automatically. However, it is worth understanding these chunks because they can have a big impact on space used and read and write speed. When using compression, the dataset MUST be chunked. HDF5 is not able to apply compression to continuous datasets.

If chunkSize is not explicitly specified, dataPipe will determine an appropriate chunk size. However, you can optimize the performance of the compression by manually specifying the chunk size using chunkSize argument.

We can demonstrate the benefit of chunking by exploring the following scenario. The following code utilizes DataPipe’s default chunk size:

fData = randi(250, 100, 1000); % Create fake data
+

+

Chunking

+

HDF5 Datasets can be either stored in continuous or chunked mode. Continuous means that all of the data is written to one continuous block on the hard drive, and chunked means that the dataset is automatically split into chunks that are distributed across the hard drive. The user does not need to know the mode used- HDF5 handles the gathering of chunks automatically. However, it is worth understanding these chunks because they can have a big impact on space used and read and write speed. When using compression, the dataset MUST be chunked. HDF5 is not able to apply compression to continuous datasets.

+

If chunkSize is not explicitly specified, dataPipe will determine an appropriate chunk size. However, you can optimize the performance of the compression by manually specifying the chunk size using chunkSize argument.

+

We can demonstrate the benefit of chunking by exploring the following scenario. The following code utilizes DataPipe's default chunk size:

+
fData = randi(250, 100, 1000); % Create fake data
 
 % create an nwb structure with required fields
 nwb = NwbFile( ...
-    'session_start_time', '2020-01-01 00:00:00', ...
+    'session_start_time', datetime('2020-01-01 00:00:00', 'TimeZone', 'local'), ...
     'identifier', 'ident1', ...
     'session_description', 'DataPipeTutorial');
 
@@ -99,16 +151,24 @@
 nwb.acquisition.set('data', fdataNWB);
 
 nwbExport(nwb, 'DefaultChunks.nwb');
-

This results in a file size of 47MB (too large), and the process takes 11 seconds (far too long). Setting the chunk size manually as in the example code below resolves these issues:

fData_compressed = types.untyped.DataPipe( ...
+
+

This results in a file size of 47MB (too large), and the process takes 11 seconds (far too long). Setting the chunk size manually as in the example code below resolves these issues:

+
fData_compressed = types.untyped.DataPipe( ...
     'data', fData, ...
     'chunkSize', [1, 1000], ...
     'axis', 1);
-

This change results in the operation completing in 0.7 seconds and resulting file size of 1.1MB. The chunk size was chosen such that it spans each individual row of the matrix.

Use the combination of arugments that fit your need. When dealing with large datasets, you may want to use iterative write to ensure that you stay within the bounds of your system memory and use chunking and compression to optimize storage, read and write of the data.

Iterative Writing

If experimental data is close to, or exceeds the available system memory, performance issues may arise. To combat this effect of large data, DataPipe can utilize iterative writing, where only a portion of the data is first compressed and saved, and then additional portions are appended.

To demonstrate, we can create a nwb file with a compressed time series data:

dataPart1 = randi(250, 1, 1000); % "load" 1/4 of the entire dataset
+
+

This change results in the operation completing in 0.7 seconds and resulting file size of 1.1MB. The chunk size was chosen such that it spans each individual row of the matrix.

+

Use the combination of arugments that fit your need. When dealing with large datasets, you may want to use iterative write to ensure that you stay within the bounds of your system memory and use chunking and compression to optimize storage, read and write of the data.

+

Iterative Writing

+

If experimental data is close to, or exceeds the available system memory, performance issues may arise. To combat this effect of large data, DataPipe can utilize iterative writing, where only a portion of the data is first compressed and saved, and then additional portions are appended.

+

To demonstrate, we can create a nwb file with a compressed time series data:

+
dataPart1 = randi(250, 1, 1000); % "load" 1/4 of the entire dataset
 fullDataSize = [1 40000]; % this is the size of the TOTAL dataset
 
 % create an nwb structure with required fields
 nwb=NwbFile( ...
-    'session_start_time', '2020-01-01 00:00:00', ...
+    'session_start_time', datetime('2020-01-01 00:00:00', 'TimeZone', 'local'), ...
     'identifier', 'ident1', ...
     'session_description', 'DataPipeTutorial');
 
@@ -128,18 +188,24 @@
 nwb.acquisition.set('time_series', fdataNWB);
 
 nwbExport(nwb, 'DataPipeTutorial_iterate.nwb');
-

To append the rest of the data, simply load the NWB file and use the append method:

nwb = nwbRead('DataPipeTutorial_iterate.nwb', 'ignorecache'); %load the nwb file with partial data
+
+

To append the rest of the data, simply load the NWB file and use the append method:

+
nwb = nwbRead('DataPipeTutorial_iterate.nwb', 'ignorecache'); %load the nwb file with partial data
 
 % "load" each of the remaining 1/4ths of the large dataset
 for i = 2:4 % iterating through parts of data
     dataPart_i=randi(250, 1, 10000); % faked data chunk as if it was loaded
     nwb.acquisition.get('time_series').data.append(dataPart_i); % append the loaded data
 end
-

The axis property defines the dimension in which additional data will be appended. In the above example, the resulting dataset will be 4000x1. However, if we set axis to 2 (and change fullDataSize appropriately), then the resulting dataset will be 1000x4.

Timeseries example

Following is an example of how to compress and add a timeseries to an NWB file:

fData=randi(250, 1, 10000); % create fake data;
+
+

The axis property defines the dimension in which additional data will be appended. In the above example, the resulting dataset will be 4000x1. However, if we set axis to 2 (and change fullDataSize appropriately), then the resulting dataset will be 1000x4.

+

Timeseries example

+

Following is an example of how to compress and add a timeseries to an NWB file:

+
fData=randi(250, 1, 10000); % create fake data;
 
 %assign data without compression
 nwb=NwbFile(...
-    'session_start_time', '2020-01-01 00:00:00', ...
+    'session_start_time', datetime(2020, 1, 1, 0, 0, 0, 'TimeZone', 'local'), ...
     'identifier','ident1', ...
     'session_description', 'DataPipeTutorial');
 
@@ -167,7 +233,14 @@
 
 % write the file
 nwbExport(nwb, 'Compressed.nwb');
-
\ No newline at end of file +--> + + diff --git a/tutorials/html/dimensionMapNoDataPipes.html b/tutorials/html/dimensionMapNoDataPipes.html index f7b7932c..34e8d692 100644 --- a/tutorials/html/dimensionMapNoDataPipes.html +++ b/tutorials/html/dimensionMapNoDataPipes.html @@ -1,16 +1,40 @@ -MatNWB <-> HDF5 Dimension Mapping

MatNWB <-> HDF5 Dimension Mapping

Create Table

First, create a TimeIntervals table of height 10.
% Define VectorData objects for each column
% 1D column
start_col = types.hdmf_common.VectorData( ...
'description', 'start_times column', ...
'data', (1:10)' ... # maps onto HDF5 dataset of size (10,)
);
% 1D column
stop_col = types.hdmf_common.VectorData( ...
'description', 'stop_times column', ...
'data', (2:11)' ... # maps onto HDF5 dataset of size (10,)
);
% 4D column
randomval_col = types.hdmf_common.VectorData( ...
'description', 'randomvalues column', ...
'data', rand(5,2,3,10) ... # maps onto HDF5 dataset of size (10, 3, 2, 5)
);
% Create table
trials_table = types.core.TimeIntervals(...
'description', 'test dynamic table column',...
'colnames', {'start_time','stop_time','randomvalues'}, ...
'start_time', start_col, ...
'stop_time', stop_col, ...
'randomvalues', randomval_col, ...
'id', types.hdmf_common.ElementIdentifiers('data', (0:9)') ...
);

Export Table

Create NWB file with TimeIntervals table and export.
% Create NwbFile object with required arguments
file = NwbFile( ...
'session_start_time', '2022-01-01 00:00:00', ...
'identifier', 'ident1', ...
'session_description', 'test file' ...
);
% Assign to intervals_trials
file.intervals_trials = trials_table;
% Export
nwbExport(file, 'testFileNoDataPipes.nwb');
You can examine the dimensions of the datasets on file using HDFView. Screenshots for this file are below.
Screen Shot 2022-01-07 at 11.07.25 AM.png
Screen Shot 2022-01-07 at 11.07.19 AM.png
+MatNWB <-> HDF5 Dimension Mapping

MatNWB <-> HDF5 Dimension Mapping

This tutorial demonstrates how the dimensions of a MATLAB array maps onto a dataset in HDF5. There are two main differences between the way MATLAB and HDF5 represents dimensions:
  1. C-ordering vs F-ordering: HDF5 is C-ordered, which means it stores data in a rows-first pattern, whereas MATLAB is F-ordered, storing data in the reverse pattern, with the last dimension of the array stored consecutively. The result is that the data in HDF5 is effectively the transpose of the array in MATLAB.
  2. 1D data (i.e vectors): HDF5 can store 1-D arrays, but in MATLAB the lowest dimensionality of an array is 2-D.
Due to differences in how MATLAB and HDF5 represent data, the dimensions of datasets are flipped when writing to/from file in MatNWB. Additionally, MATLAB represents 1D vectors in a 2D format, either as row vectors or column vectors, whereas HDF5 treats vectors as truly 1D. Consequently, when a 1D dataset from HDF5 is loaded into MATLAB, it is always represented as a column vector. To avoid unintentional changes in data dimensions, it is therefore recommended to avoid writing row vectors into an NWB file for 1D datasets.
Contrast this tutorial with the dimensionMapWithDataPipes tutorial that illustrates how vectors are represented differently when using DataPipe objects within VectorData objects.

Create Table

First, create a TimeIntervals table of height 10.
% Define VectorData objects for each column
% 1D column
start_col = types.hdmf_common.VectorData( ...
'description', 'start_times column', ...
'data', (1:10)' ... # maps onto HDF5 dataset of size (10,)
);
% 1D column
stop_col = types.hdmf_common.VectorData( ...
'description', 'stop_times column', ...
'data', (2:11)' ... # maps onto HDF5 dataset of size (10,)
);
% 4D column
randomval_col = types.hdmf_common.VectorData( ...
'description', 'randomvalues column', ...
'data', rand(5,2,3,10) ... # maps onto HDF5 dataset of size (10, 3, 2, 5)
);
 
% 1D column
id_col = types.hdmf_common.ElementIdentifiers('data', int64(0:9)'); % maps onto HDF5 dataset of size (10,)
 
% Create table
trials_table = types.core.TimeIntervals(...
'description', 'test dynamic table column',...
'colnames', {'start_time','stop_time','randomvalues'}, ...
'start_time', start_col, ...
'stop_time', stop_col, ...
'randomvalues', randomval_col, ...
'id', id_col ...
);

Export Table

Create NWB file with TimeIntervals table and export.
% Create NwbFile object with required arguments
file = NwbFile( ...
'session_start_time', datetime('2022-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'test file' ...
);
% Assign to intervals_trials
file.intervals_trials = trials_table;
% Export
nwbExport(file, 'testFileNoDataPipes.nwb');
You can examine the dimensions of the datasets on file using HDFView. Screenshots for this file are below.
Screen Shot 2022-01-07 at 11.07.25 AM.png
Screen Shot 2022-01-07 at 11.07.19 AM.png

\ No newline at end of file +--> +
\ No newline at end of file diff --git a/tutorials/html/dimensionMapWithDataPipes.html b/tutorials/html/dimensionMapWithDataPipes.html index 13527243..fb17536d 100644 --- a/tutorials/html/dimensionMapWithDataPipes.html +++ b/tutorials/html/dimensionMapWithDataPipes.html @@ -1,20 +1,34 @@ -MatNWB <-> HDF5 Dimension Mapping

MatNWB <-> HDF5 Dimension Mapping

Create Table

First, create an expandable TimeIntervals table of height 10.
% Define VectorData objects for each column
% 1D column
start_col = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', 1:10, ... # maps onto HDF5 dataset of size (10, )
'maxSize', Inf ...
) ...
);
% 1D column
stop_col = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', 1:10, ... # maps onto HDF5 dataset of size (10, 1)
'maxSize', [1 Inf], ...
'axis', 2 ...
) ...
);
% 1D column
cond_col = types.hdmf_common.VectorData( ...
'description', 'condition column', ...
'data', types.untyped.DataPipe( ...
'data', randi(2,10,1), ... # maps onto HDF5 dataset of size (1, 10)
'maxSize', [Inf, 1], ...
'axis', 1 ...
) ...
);
% 4D column
randomval_col = types.hdmf_common.VectorData( ...
'description', 'randomvalues column', ...
'data', types.untyped.DataPipe( ...
'data', rand(5,2,3,10), ... # maps onto HDF5 dataset of size (10, 3, 2, 5)
'maxSize', [5, 2, 3, Inf], ...
'axis', 4 ...
) ...
);
% 1D column
ids_col = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', (0:9), ... # maps onto HDF5 dataset of size (10, )
'maxSize', Inf ...
) ...
);
% Create table
trials_table = types.core.TimeIntervals(...
'description', 'test dynamic table column',...
'colnames', {'start_time', 'stop_time', 'randomvalues', 'conditions'}, ...
'start_time', start_col, ...
'stop_time', stop_col, ...
'conditions', cond_col, ...
'randomvalues', randomval_col, ...
'id', ids_col ...
);

Export Table

Create NWB file with expandable TimeIntervals table and export.
% Create NwbFile object with required arguments
file = NwbFile( ...
'session_start_time', '2022-01-01 00:00:00', ...
'identifier', 'ident1', ...
'session_description', 'test file' ...
);
% Assign to intervals_trials
file.intervals_trials = trials_table;
% Export
nwbExport(file, 'testFileWithDataPipes.nwb');
You can examine the dimensions of the datasets on file using HDFView. Screenshots for this file are below.
Screen Shot 2022-01-12 at 1.12.42 PM.png
Screen Shot 2022-01-12 at 1.12.47 PM.png
Screen Shot 2022-01-07 at 4.26.21 PM.png
Screen Shot 2022-01-07 at 4.26.12 PM.png
+MatNWB <-> HDF5 Dimension Mapping

MatNWB <-> HDF5 Dimension Mapping

This tutorial is easier to follow if you have already looked at the dimensionMapNoDataPipes tutorial or if you compare these side by side.
The key difference when using DataPipe instead of VectorData is that 1D data can be represented in HDF5 as 2D, thus allowing you to write either row or column vectors. This is made possible because of the maxSize property of the DataPipe class, which lets you specify a max size for each dimension. By setting the maxSize to [1, N] or [N, 1], vectors in HDF5 are represented as 2D arrays, just like in MATLAB. The flipping of the dimension order still applies, so a row vector in MATLAB becomes a column vector in HDF5 and vice versa.
Please note: The following tutorial mixes row and column vectors and does not produce a valid dynamic table. The tutorial is only meant to showcase how data maps onto HDF5 datasets when using DataPipe objects.

Create Table

First, create an expandable TimeIntervals table of height 10.
% 1D column
start_col = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', 1:10, ... # maps onto HDF5 dataset of size (10, )
'maxSize', Inf ...
) ...
);
% 1D column
stop_col = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', 1:10, ... # maps onto HDF5 dataset of size (10, 1)
'maxSize', [1 Inf], ...
'axis', 2 ...
) ...
);
% 1D column
cond_col = types.hdmf_common.VectorData( ...
'description', 'condition column', ...
'data', types.untyped.DataPipe( ...
'data', randi(2,10,1), ... # maps onto HDF5 dataset of size (1, 10)
'maxSize', [Inf, 1], ...
'axis', 1 ...
) ...
);
% 4D column
randomval_col = types.hdmf_common.VectorData( ...
'description', 'randomvalues column', ...
'data', types.untyped.DataPipe( ...
'data', rand(5,2,3,10), ... # maps onto HDF5 dataset of size (10, 3, 2, 5)
'maxSize', [5, 2, 3, Inf], ...
'axis', 4 ...
) ...
);
% 1D column
ids_col = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int64(0:9), ... # maps onto HDF5 dataset of size (10, )
'maxSize', Inf ...
) ...
);
% Create table
trials_table = types.core.TimeIntervals(...
'description', 'test dynamic table column',...
'colnames', {'start_time', 'stop_time', 'randomvalues', 'conditions'}, ...
'start_time', start_col, ...
'stop_time', stop_col, ...
'conditions', cond_col, ...
'randomvalues', randomval_col, ...
'id', ids_col ...
);

Export Table

Create NWB file with expandable TimeIntervals table and export.
% Create NwbFile object with required arguments
file = NwbFile( ...
'session_start_time', datetime('2022-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'test file' ...
);
% Assign to intervals_trials
file.intervals_trials = trials_table;
% Export
nwbExport(file, 'testFileWithDataPipes.nwb');
You can examine the dimensions of the datasets on file using HDFView. Screenshots for this file are below.
Screen Shot 2022-01-12 at 1.12.42 PM.png
Screen Shot 2022-01-12 at 1.12.47 PM.png
Screen Shot 2022-01-07 at 4.26.21 PM.png
Screen Shot 2022-01-07 at 4.26.12 PM.png

\ No newline at end of file +--> +
\ No newline at end of file diff --git a/tutorials/html/dynamic_tables.html b/tutorials/html/dynamic_tables.html index 3eaab62e..5d46a4fd 100644 --- a/tutorials/html/dynamic_tables.html +++ b/tutorials/html/dynamic_tables.html @@ -87,13 +87,13 @@ Multidimensional ragged array columns Adding rows to multidimensional array columns Learn More! - Python Tutorial

MatNWB Setup

Start by setting up your MATLAB workspace. The code below adds the directory containing the MatNWB package to the MATLAB search path. MatNWB works by automatically creating API classes based on a defined schema.
%{
path_to_matnwb = '~/Repositories/matnwb'; % change to your own path location
addpath(genpath(pwd));
%}

Constructing a table with initialized columns

The DynamicTable class represents a column-based table to which you can add custom columns. It consists of a a description, a list of columns , and a list of row IDs. You can create a DynamicTable by first defining the VectorData objects that will make up the columns of the table. Each VectorData object must contain the same number of rows. A list of rows IDs may be passed to the DynamicTable using the id argument. Row IDs are a useful way to access row information independent of row location index. The list of row IDs must be cast as an ElementIdentifiers object before being passed to the DynamicTable object. If no value is passed to id, an ElementIdentifiers object with 0-indexed row IDs will be created for you automatically.
MATLAB Syntax Note: Using column vectors is crucial to properly build vectors and tables. When defining individual values, make sure to use semi-colon (;) instead of instead of comma (,) when defining the data fields of these.
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', [1;2] ...
);
 
col2 = types.hdmf_common.VectorData( ...
'description', 'column #2', ...
'data', {'a';'b'} ...
);
 
my_table = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1', 'col2'}, ...
'col1', col1, ...
'col2', col2, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0;1]) ... % 0-indexed, for compatibility with Python
);
my_table
my_table =
DynamicTable with properties: + Python Tutorial

MatNWB Setup

Start by setting up your MATLAB workspace. The code below adds the directory containing the MatNWB package to the MATLAB search path. MatNWB works by automatically creating API classes based on a defined schema.
%{
path_to_matnwb = '~/Repositories/matnwb'; % change to your own path location
addpath(genpath(pwd));
%}

Constructing a table with initialized columns

The DynamicTable class represents a column-based table to which you can add custom columns. It consists of a a description, a list of columns , and a list of row IDs. You can create a DynamicTable by first defining the VectorData objects that will make up the columns of the table. Each VectorData object must contain the same number of rows. A list of rows IDs may be passed to the DynamicTable using the id argument. Row IDs are a useful way to access row information independent of row location index. The list of row IDs must be cast as an ElementIdentifiers object before being passed to the DynamicTable object. If no value is passed to id, an ElementIdentifiers object with 0-indexed row IDs will be created for you automatically.
MATLAB Syntax Note: Using column vectors is crucial to properly build vectors and tables. When defining individual values, make sure to use semi-colon (;) instead of instead of comma (,) when defining the data fields of these.
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', [1;2] ...
);
 
col2 = types.hdmf_common.VectorData( ...
'description', 'column #2', ...
'data', {'a';'b'} ...
);
 
my_table = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1', 'col2'}, ...
'col1', col1, ...
'col2', col2, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0;1]) ... % 0-indexed, for compatibility with Python
);
my_table
my_table =
DynamicTable with properties: id: [1×1 types.hdmf_common.ElementIdentifiers] colnames: {'col1' 'col2'} description: 'an example table' vectordata: [2×1 types.untyped.Set] -

Adding rows

You can add rows to an existing DynamicTable using the object's addRow method. One way of using this method is to pass in the names of columns as parameter names followed by the elements to append. The class of the elements of the column must match the elements to append.
my_table.addRow('col1', 3, 'col2', {'c'}, 'id', 2);

Adding columns

You can add new columns to an existing DynamicTable object using the addColumn method. One way of using this method is to pass in the names of each new column followed by the corresponding values for each new column. The height of the new columns must match the height of the table.
col3 = types.hdmf_common.VectorData('description', 'column #3', ...
'data', [100; 200; 300]);
col4 = types.hdmf_common.VectorData('description', 'column #4', ...
'data', {'a1'; 'b2'; 'c3'});
 
my_table.addColumn('col3', col3,'col4', col4);

Enumerated (categorical) data

EnumData is a special type of column for storing an enumerated data type. This way each unique value is stored once, and the data references those values by index. Using this method is more efficient than storing a single value many times, and has the advantage of communicating to downstream tools that the data is categorical in nature.

Warning Regarding EnumData

EnumData is currently an experimental feature and as such should not be used in a production environment.
CellTypeElements = types.hdmf_common.VectorData(...
'description', 'fixed set of elements referenced by cell_type' ...
, 'data', {'aa', 'bb', 'cc'} ... % the enumerated elements
);
CellType = types.hdmf_experimental.EnumData( ...
'description', 'this column holds categorical variables' ... % properties derived from VectorData
, 'data', [0, 1, 2, 1, 0] ... % zero-indexed offset to elements.
, 'elements', types.untyped.ObjectView(CellTypeElements) ...
);
 
MyTable = types.hdmf_common.DynamicTable('description', 'an example table');
MyTable.vectordata.set('cell_type_elements', CellTypeElements); % the *_elements format is required for compatibility with pynwb
MyTable.addColumn('cell_type', CellType);

Ragged array columns

A table column with a different number of elements for each row is called a "ragged array column." To define a table with a ragged array column, pass both the VectorData and the corresponding VectorIndex as columns of the DynamicTable object. The VectorData columns will contain the data values. The VectorIndex column serves to indicate how to arrange the data across rows. By convention the VectorIndex object corresponding to a particular column must have have the same name with the addition of the '_index' suffix.
Below, the VectorIndex values indicate to place the 1st to 3rd (inclusive) elements of the VectorData into the first row and 4th element into the second row. The resulting table will have the cell {'1a'; '1b'; '1c'} in the first row and the cell {'2a'} in the second row.
 
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', {'1a'; '1b'; '1c'; '2a'} ...
);
 
col1_index = types.hdmf_common.VectorIndex( ...
'description', 'column #1 index', ...
'target',types.untyped.ObjectView(col1), ... % object view of target column
'data', [3; 4] ...
);
 
table_ragged_col = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1'}, ...
'col1', col1, ...
'col1_index', col1_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1]) ... % 0-indexed, for compatibility with Python
);

Adding ragged array rows

You can add a new row to the ragged array column. Under the hood, the addRow method will add the appropriate value to the VectorIndex column to maintain proper formatting.
table_ragged_col.addRow('col1', {'3a'; '3b'; '3c'}, 'id', 2);

Accessing row elements

You can access data from entire rows of a DynamicTable object by calling the getRow method for the corresponding object. You can supply either an individual row number or a list of row numbers.
my_table.getRow(1)
ans = 1×4 table
 col1col2col3col4
11'a'100'a1'
If you want to access values for just a subset of columns you can pass in the 'columns' arguement along with a cell array with the desired column names
my_table.getRow(1:3, 'columns', {'col1'})
ans = 3×1 table
 col1
11
22
33
You can also access specific rows by their corresponding row ID's, if they have been defined, by supplying a 'true' Boolean to the 'useId' parameter
my_table.getRow(1, 'useId', true)
ans = 1×4 table
 col1col2col3col4
12'b'200'b2'
For a ragged array columns, the getRow method will return a cell with different number of elements for each row
table_ragged_col.getRow(1:2)
ans = 2×1 table
 col1
1[{'1a'};{'1b'};{'1c'}]
21×1 cell

Accessing column elements

To acess all rows from a particular column use the .get method on the vectordata field of the DynamicTable object
 
my_table.vectordata.get('col2').data
ans = 3×1 cell
'a'
'b'
'c'

Referencing rows of other tables

You can create a column that references rows of other tables by adding a DynamicTableRegion object as a column of a DynamicTable. This is analogous to a foreign key in a relational database. The DynamicTableRegion class takes in an ObjectView object as arguement. ObjectView objects create links from one object type referencing another.
dtr_col = types.hdmf_common.DynamicTableRegion( ...
'description', 'references multiple rows of earlier table', ...
'data', [0; 1; 1; 0], ... # 0-indexed
'table',types.untyped.ObjectView(my_table) ... % object view of target table
);
 
data_col = types.hdmf_common.VectorData( ...
'description', 'data column', ...
'data', {'a'; 'b'; 'c'; 'd'} ...
);
 
dtr_table = types.hdmf_common.DynamicTable( ...
'description', 'test table with DynamicTableRegion', ...
'colnames', {'dtr_col','data_col'}, ...
'dtr_col', dtr_col, ...
'data_col',data_col, ...
'id',types.hdmf_common.ElementIdentifiers('data', [0; 1; 2; 3]) ...
);

Converting a DynamicTable to a MATLAB table

You can convert a DynamicTable object to a MATLAB table by making use of the object's toTable method. This is a useful way to view the whole table in a human-readable format.
my_table.toTable()
ans = 3×5 table
 idcol1col2col3col4
101'a'100'a1'
212'b'200'b2'
323'c'300'c3'
When the DynamicTable object contains a column that references other tables, you can pass in a Boolean to indicate whether to include just the row indices of the referenced table. Passing in false will result in inclusion of the referenced rows as nested tables.
dtr_table.toTable(false)
ans = 4×3 table
 iddtr_coldata_col
101×4 table'a'
211×4 table'b'
321×4 table'c'
431×4 table'd'

Creating an expandable table

When using the default HDF5 backend, each column of these tables is an HDF5 Dataset, which by default are set to an unchangeable size. This means that once a file is written, it is not possible to add a new row. If you want to be able to save this file, load it, and add more rows to the table, you will need to set this up when you create the VectorData and ElementIdentifiers columns of a DynamicTable. Specifically, you must wrap the column data with a DataPipe object. The DataPipe class takes in maxSize and axis as arguments to indicate the maximum desired size for each axis and the axis to whcih to append to, respectively. For example, creating a DataPipe object with a maxSize value equal to [Inf, 1] indicates that the number of rows may increase indifinetely. In contrast, setting maxSize equal to [8, 1] would allow the column to grow to a maximum height of 8.
% create NwbFile object with required fields
file= NwbFile( ...
'session_start_time', '2021-01-01 00:00:00', ...
'identifier', 'ident1', ...
'session_description', 'ExpandableTableTutorial' ...
);
 
% create VectorData objects with DataPipe objects
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', [1, 2], ... # data must be numerical
'maxSize', Inf ...
) ...
);
 
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', [2, 3], ... #data must be numerical
'maxSize', Inf ...
) ...
);
 
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(5, 2), ... #data must be numerical
'maxSize', [5, Inf], ...
'axis', 2 ...
) ...
);
 
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int32([0; 1]), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'id', ids_exp ...
);
% export file
nwbExport(file, 'expandableTableTestFile.nwb');
Now, you can read in the file, add more rows, and save again to file
readFile = nwbRead('expandableTableTestFile.nwb', 'ignorecache');
readFile.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 4, ...
'randomvalues', rand(5,1), ...
'id', 2 ...
)
nwbExport(readFile, 'expandableTableTestFile.nwb');
Note: DataPipe objects change how the dimension of the datasets for each column map onto the shape of HDF5 datasets. See README for more details.

Multidimensional Columns

The order of dimensions of multidimensional columns in MatNWB is reversed relative to the Python HDMF package (see README for detailed explanation). Therefore, the height of a multidimensional column belonging to a DynamicTable object is defined by the shape of its last dimension. A valid DynamicTable must have matched height across columns.

Constructing multidimensional columns

% Define 1D column
simple_col = types.hdmf_common.VectorData( ...
'description', '1D column',...
'data', rand(10,1) ...
);
% Define ND column
multi_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional column',...
'data', rand(3,2,10) ...
);
% construct table
multi_dim_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'simple','multi'}, ...
'simple', simple_col, ...
'multi', multi_col, ...
'id', types.hdmf_common.ElementIdentifiers('data', (0:9)') ... % 0-indexed, for compatibility with Python
);
 

Multidimensional ragged array columns

DynamicTable objects with multidimensional ragged array columns can be constructed by passing in the corresponding VectorIndex column
% Define column with data
multi_ragged_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional ragged array column',...
'data', rand(2,3,5) ...
);
% Define column with VectorIndex
multi_ragged_index = types.hdmf_common.VectorIndex( ...
'description', 'index to multi_ragged_col', ...
'target', types.untyped.ObjectView(multi_ragged_col),'data', [2; 3; 5] ...
);
 
multi_ragged_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'multi_ragged'}, ...
'multi_ragged', multi_ragged_col, ...
'multi_ragged_index', multi_ragged_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1; 2]) ... % 0-indexed, for compatibility with Python
);

Adding rows to multidimensional array columns

DynamicTable objects with multidimensional array columns can also be constructed by adding a single row at a time. This method makes use of DataPipe objects due to the fact that MATLAB doesn't support singleton dimensions for arrays with more than 2 dimensions. The code block below demonstates how to build a DynamicTable object with a mutidimensional raaged array column in this manner.
% Create file
file= NwbFile( ...
'session_start_time', '2021-01-01 00:00:00', ...
'identifier', 'ident1', ...
'session_description', 'test_file' ...
);
 
% Define Vector Data Objects with first row of table
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', 1, ...
'maxSize', Inf ...
) ...
);
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', 10, ...
'maxSize', Inf ...
) ...
);
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(3,2,5), ... #random data
'maxSize', [3, 2, Inf], ...
'axis', 3 ...
) ...
);
random_exp_index = types.hdmf_common.VectorIndex( ...
'description', 'index to random data column', ...
'target',types.untyped.ObjectView(random_exp), ...
'data', types.untyped.DataPipe( ...
'data', uint64(5), ...
'maxSize', [Inf, 1], ...
'axis', 1 ...
) ...
);
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int64(0), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% Create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'randomvalues_index', random_exp_index, ...
'id', ids_exp ...
);
% Export file
nwbExport(file, 'multiRaggedExpandableTableTest.nwb');
% Read in file
read_file = nwbRead('multiRaggedExpandableTableTest.nwb', 'ignorecache');
% add individual rows
read_file.intervals_trials.addRow( ...
'start_time', 2, ...
'stop_time', 20, ...
'randomvalues', rand(3,2,6), ...
'id', 1 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 30, ...
'randomvalues', rand(3,2,3), ...
'id', 2 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 4, ...
'stop_time', 40, ...
'randomvalues', rand(3,2,8), ...
'id', 3 ...
);
 

Learn More!

Python Tutorial

+

Adding rows

You can add rows to an existing DynamicTable using the object's addRow method. One way of using this method is to pass in the names of columns as parameter names followed by the elements to append. The class of the elements of the column must match the elements to append.
my_table.addRow('col1', 3, 'col2', {'c'}, 'id', 2);

Adding columns

You can add new columns to an existing DynamicTable object using the addColumn method. One way of using this method is to pass in the names of each new column followed by the corresponding values for each new column. The height of the new columns must match the height of the table.
col3 = types.hdmf_common.VectorData('description', 'column #3', ...
'data', [100; 200; 300]);
col4 = types.hdmf_common.VectorData('description', 'column #4', ...
'data', {'a1'; 'b2'; 'c3'});
 
my_table.addColumn('col3', col3,'col4', col4);

Enumerated (categorical) data

EnumData is a special type of column for storing an enumerated data type. This way each unique value is stored once, and the data references those values by index. Using this method is more efficient than storing a single value many times, and has the advantage of communicating to downstream tools that the data is categorical in nature.

Warning Regarding EnumData

EnumData is currently an experimental feature and as such should not be used in a production environment.
CellTypeElements = types.hdmf_common.VectorData(...
'description', 'fixed set of elements referenced by cell_type' ...
, 'data', {'aa', 'bb', 'cc'} ... % the enumerated elements
);
CellType = types.hdmf_experimental.EnumData( ...
'description', 'this column holds categorical variables' ... % properties derived from VectorData
, 'data', [0, 1, 2, 1, 0] ... % zero-indexed offset to elements.
, 'elements', types.untyped.ObjectView(CellTypeElements) ...
);
 
MyTable = types.hdmf_common.DynamicTable('description', 'an example table');
MyTable.vectordata.set('cell_type_elements', CellTypeElements); % the *_elements format is required for compatibility with pynwb
MyTable.addColumn('cell_type', CellType);

Ragged array columns

A table column with a different number of elements for each row is called a "ragged array column." To define a table with a ragged array column, pass both the VectorData and the corresponding VectorIndex as columns of the DynamicTable object. The VectorData columns will contain the data values. The VectorIndex column serves to indicate how to arrange the data across rows. By convention the VectorIndex object corresponding to a particular column must have have the same name with the addition of the '_index' suffix.
Below, the VectorIndex values indicate to place the 1st to 3rd (inclusive) elements of the VectorData into the first row and 4th element into the second row. The resulting table will have the cell {'1a'; '1b'; '1c'} in the first row and the cell {'2a'} in the second row.
 
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', {'1a'; '1b'; '1c'; '2a'} ...
);
 
col1_index = types.hdmf_common.VectorIndex( ...
'description', 'column #1 index', ...
'target',types.untyped.ObjectView(col1), ... % object view of target column
'data', [3; 4] ...
);
 
table_ragged_col = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1'}, ...
'col1', col1, ...
'col1_index', col1_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1]) ... % 0-indexed, for compatibility with Python
);

Adding ragged array rows

You can add a new row to the ragged array column. Under the hood, the addRow method will add the appropriate value to the VectorIndex column to maintain proper formatting.
table_ragged_col.addRow('col1', {'3a'; '3b'; '3c'}, 'id', 2);

Accessing row elements

You can access data from entire rows of a DynamicTable object by calling the getRow method for the corresponding object. You can supply either an individual row number or a list of row numbers.
my_table.getRow(1)
ans = 1×4 table
 col1col2col3col4
11'a'100'a1'
If you want to access values for just a subset of columns you can pass in the 'columns' arguement along with a cell array with the desired column names
my_table.getRow(1:3, 'columns', {'col1'})
ans = 3×1 table
 col1
11
22
33
You can also access specific rows by their corresponding row ID's, if they have been defined, by supplying a 'true' Boolean to the 'useId' parameter
my_table.getRow(1, 'useId', true)
ans = 1×4 table
 col1col2col3col4
12'b'200'b2'
For a ragged array columns, the getRow method will return a cell with different number of elements for each row
table_ragged_col.getRow(1:2)
ans = 2×1 table
 col1
1[{'1a'};{'1b'};{'1c'}]
21×1 cell

Accessing column elements

To acess all rows from a particular column use the .get method on the vectordata field of the DynamicTable object
 
my_table.vectordata.get('col2').data
ans = 3×1 cell
'a'
'b'
'c'

Referencing rows of other tables

You can create a column that references rows of other tables by adding a DynamicTableRegion object as a column of a DynamicTable. This is analogous to a foreign key in a relational database. The DynamicTableRegion class takes in an ObjectView object as arguement. ObjectView objects create links from one object type referencing another.
dtr_col = types.hdmf_common.DynamicTableRegion( ...
'description', 'references multiple rows of earlier table', ...
'data', [0; 1; 1; 0], ... # 0-indexed
'table',types.untyped.ObjectView(my_table) ... % object view of target table
);
 
data_col = types.hdmf_common.VectorData( ...
'description', 'data column', ...
'data', {'a'; 'b'; 'c'; 'd'} ...
);
 
dtr_table = types.hdmf_common.DynamicTable( ...
'description', 'test table with DynamicTableRegion', ...
'colnames', {'dtr_col','data_col'}, ...
'dtr_col', dtr_col, ...
'data_col',data_col, ...
'id',types.hdmf_common.ElementIdentifiers('data', [0; 1; 2; 3]) ...
);

Converting a DynamicTable to a MATLAB table

You can convert a DynamicTable object to a MATLAB table by making use of the object's toTable method. This is a useful way to view the whole table in a human-readable format.
my_table.toTable()
ans = 3×5 table
 idcol1col2col3col4
101'a'100'a1'
212'b'200'b2'
323'c'300'c3'
When the DynamicTable object contains a column that references other tables, you can pass in a Boolean to indicate whether to include just the row indices of the referenced table. Passing in false will result in inclusion of the referenced rows as nested tables.
dtr_table.toTable(false)
ans = 4×3 table
 iddtr_coldata_col
101×4 table'a'
211×4 table'b'
321×4 table'c'
431×4 table'd'

Creating an expandable table

When using the default HDF5 backend, each column of these tables is an HDF5 Dataset, which by default are set to an unchangeable size. This means that once a file is written, it is not possible to add a new row. If you want to be able to save this file, load it, and add more rows to the table, you will need to set this up when you create the VectorData and ElementIdentifiers columns of a DynamicTable. Specifically, you must wrap the column data with a DataPipe object. The DataPipe class takes in maxSize and axis as arguments to indicate the maximum desired size for each axis and the axis to whcih to append to, respectively. For example, creating a DataPipe object with a maxSize value equal to [Inf, 1] indicates that the number of rows may increase indifinetely. In contrast, setting maxSize equal to [8, 1] would allow the column to grow to a maximum height of 8.
% create NwbFile object with required fields
file= NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'ExpandableTableTutorial' ...
);
 
% create VectorData objects with DataPipe objects
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', [1, 2], ... # data must be numerical
'maxSize', Inf ...
) ...
);
 
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', [2, 3], ... #data must be numerical
'maxSize', Inf ...
) ...
);
 
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(5, 2), ... #data must be numerical
'maxSize', [5, Inf], ...
'axis', 2 ...
) ...
);
 
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int32([0; 1]), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'id', ids_exp ...
);
% export file
nwbExport(file, 'expandableTableTestFile.nwb');
Now, you can read in the file, add more rows, and save again to file
readFile = nwbRead('expandableTableTestFile.nwb', 'ignorecache');
readFile.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 4, ...
'randomvalues', rand(5,1), ...
'id', 2 ...
)
nwbExport(readFile, 'expandableTableTestFile.nwb');
Note: DataPipe objects change how the dimension of the datasets for each column map onto the shape of HDF5 datasets. See README for more details.

Multidimensional Columns

The order of dimensions of multidimensional columns in MatNWB is reversed relative to the Python HDMF package (see README for detailed explanation). Therefore, the height of a multidimensional column belonging to a DynamicTable object is defined by the shape of its last dimension. A valid DynamicTable must have matched height across columns.

Constructing multidimensional columns

% Define 1D column
simple_col = types.hdmf_common.VectorData( ...
'description', '1D column',...
'data', rand(10,1) ...
);
% Define ND column
multi_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional column',...
'data', rand(3,2,10) ...
);
% construct table
multi_dim_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'simple','multi'}, ...
'simple', simple_col, ...
'multi', multi_col, ...
'id', types.hdmf_common.ElementIdentifiers('data', (0:9)') ... % 0-indexed, for compatibility with Python
);
 

Multidimensional ragged array columns

DynamicTable objects with multidimensional ragged array columns can be constructed by passing in the corresponding VectorIndex column
% Define column with data
multi_ragged_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional ragged array column',...
'data', rand(2,3,5) ...
);
% Define column with VectorIndex
multi_ragged_index = types.hdmf_common.VectorIndex( ...
'description', 'index to multi_ragged_col', ...
'target', types.untyped.ObjectView(multi_ragged_col),'data', [2; 3; 5] ...
);
 
multi_ragged_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'multi_ragged'}, ...
'multi_ragged', multi_ragged_col, ...
'multi_ragged_index', multi_ragged_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1; 2]) ... % 0-indexed, for compatibility with Python
);

Adding rows to multidimensional array columns

DynamicTable objects with multidimensional array columns can also be constructed by adding a single row at a time. This method makes use of DataPipe objects due to the fact that MATLAB doesn't support singleton dimensions for arrays with more than 2 dimensions. The code block below demonstates how to build a DynamicTable object with a mutidimensional raaged array column in this manner.
% Create file
file = NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'test_file' ...
);
 
% Define Vector Data Objects with first row of table
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', 1, ...
'maxSize', Inf ...
) ...
);
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', 10, ...
'maxSize', Inf ...
) ...
);
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(3,2,5), ... #random data
'maxSize', [3, 2, Inf], ...
'axis', 3 ...
) ...
);
random_exp_index = types.hdmf_common.VectorIndex( ...
'description', 'index to random data column', ...
'target',types.untyped.ObjectView(random_exp), ...
'data', types.untyped.DataPipe( ...
'data', uint64(5), ...
'maxSize', Inf ...
) ...
);
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int64(0), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% Create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'randomvalues_index', random_exp_index, ...
'id', ids_exp ...
);
% Export file
nwbExport(file, 'multiRaggedExpandableTableTest.nwb');
% Read in file
read_file = nwbRead('multiRaggedExpandableTableTest.nwb', 'ignorecache');
% add individual rows
read_file.intervals_trials.addRow( ...
'start_time', 2, ...
'stop_time', 20, ...
'randomvalues', rand(3,2,6), ...
'id', 1 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 30, ...
'randomvalues', rand(3,2,3), ...
'id', 2 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 4, ...
'stop_time', 40, ...
'randomvalues', rand(3,2,8), ...
'id', 3 ...
);
 

Learn More!

Python Tutorial