diff --git a/.github/workflows/phantoms.yaml b/.github/workflows/phantoms.yaml index 39452265..053b7e5f 100644 --- a/.github/workflows/phantoms.yaml +++ b/.github/workflows/phantoms.yaml @@ -67,6 +67,18 @@ jobs: brew install dcm2niix fi + - name: Debug + uses: mxschmitt/action-tmate@v3 + if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }} + timeout-minutes: 15 + with: + limit-access-to-actor: true + + - name: Check dcm2niix is installed and on path + if: runner.os != 'Windows' + run: + dcm2niix -h + - name: Install dcm2niix windows if: runner.os == 'Windows' run: | diff --git a/.gitignore b/.gitignore index a3fb25a1..f145782a 100644 --- a/.gitignore +++ b/.gitignore @@ -58,6 +58,12 @@ D:\\BIDS* *OpenNeuroPET-Phantoms* **PHANTOMS.zip ecat_validation/ECAT7_multiframe.v +variables.env +ecat_testing/matlab/ +ecat_testing/steps/ +ecat_testing/**/*.nii* +*osem* +*fbp* + +matlab/SiemensHRRTparameters.txt -ecat_testing/ -SiemensHRRTparameters.txt \ No newline at end of file diff --git a/CITATION.cff b/CITATION.cff index 0f29efc0..f1bfda08 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -31,11 +31,17 @@ authors: - family-names: "Ganz-Benjaminsen" given-names: "Melanie" orcid: "https://orcid.org/0000-0002-9120-8098" +- family-names: "Bilgel" + given-names: "Murat" + orcid: "https://orcid.org/0000-0001-5042-7422" +- family-names: "Eierud" + given-names: "Cyrus" + orcid: "https://orcid.org/0000-0002-9942-676X" - family-names: "Pernet" given-names: "Cyril" orcid: "https://orcid.org/0000-0003-4010-4632" title: "PET2BIDS" -version: 1.0.0 +version: 1.3.20240502 url: "https://github.com/openneuropet/PET2BIDS" date-released: 2022-10-01 license: MIT diff --git a/JOSS_metadata/persons_info.txt b/JOSS_metadata/persons_info.txt index eeb76abb..9045d569 100644 --- a/JOSS_metadata/persons_info.txt +++ b/JOSS_metadata/persons_info.txt @@ -21,3 +21,5 @@ 0000-0002-9120-8098 Melanie Ganz-Benjaminse Department of Computer Science, University of Copenhagen, Copenhagen, Denmark 0000-0003-4010-4632 Cyril Pernet Neurobiology Research Unit, Rigshospitalet, Copenhagen, Denmark + +0000-0001-5042-7422 Murat Bilgel National Institute on Aging Intramural Research Program, Baltimore, MD, USA diff --git a/contributors.md b/contributors.md index 3c2da6d7..5ec83807 100644 --- a/contributors.md +++ b/contributors.md @@ -3,7 +3,7 @@ The following individuals have contributed to the PET2BIDS project (in alphabetical order). If you contributed and some icons needs to be added or your name is not listed, please add it. -Murat Bilgel 💻 🐛 💡 +Murat Bilgel 💻 🐛 💡 Anthony Galassi 💻 📖 💬 🎨 💡 ⚠️ Melanie Ganz-Benjaminsen 🔍 💬 🤔 📋 Gabriel Gonzalez-Escamilla 💻 ⚠️ 🐛 👀 diff --git a/ecat_testing/ints/README.md b/ecat_testing/ints/README.md new file mode 100644 index 00000000..acdf0891 --- /dev/null +++ b/ecat_testing/ints/README.md @@ -0,0 +1,60 @@ +# matlab_numpy_ints + +This is a very simple example of testing matlab vs python reading and writing of ecat files scaling up to a more +complex set of testing that reads real ecat data and writes it to niftis. + +## Dead Simple Testing / Proofing + +Then simplest test cases that use "imaging" data made of small 2 x 1 arrays of integer data can found in: + +`ints.m` and `ints.py` + +## Outline of Testing on Real Data (e.g. steps required to read and write ecat into Nifti) + +A part of this debugging process is to capture the state of inputs and outputs through each iteration of the python +and matlab code. These outputs are cataloged and saved in this directory (`ecat_testing/ints/`) within subdirectory +`steps/`. The naming, order, and description of each step are laid out in the table below for easy reference. At each +of these steps the outputs are saved in the `steps/` directory with the filestem (and the python or matlab code used to +make them) as described. These temporary files are written if and only if the environment variable `ECAT_SAVE_STEPS=1` +is set. + +| Step | Description | Matlab | Python | FileStem | +|------|-------------------------------------------------------|---------------|----------------|------------------------------| +| 1 | Read main header | `read_ECAT7.m` | `read_ecat.py` | `1_read_mh_ecat*` | +| 2 | Read subheaders | `read_EACT7.m` | `read_ecat.py` | `2_read_sh_ecat*` | +| 3 | Determine file/data type | `read_ECAT7.m` | `read_ecat.py` | `3_determine_data_type*` | +| 4 | Read image data | `read_ECAT7.m` | `read_ecat.py` | `4_read_img_ecat*` | +| 5 | scale if calibrated | `read_ECAT7.m` | `read_ecat.py` | `5_scale_img_ecat*` | +| 6 | Pass Data to ecat2nii | `ecat2nii.m` | `ecat2nii.py` | `6_ecat2nii*` | +| 7 | Flip ECAT data into Nifti space | `ecat2nii.m` | `ecat2nii.py` | `7_flip_ecat2nii*` | +| 8 | Rescale to 16 bits | `ecat2nii.m` | `ecat2nii.py` | `8_rescale_to_16_ecat2nii*` | +| 9 | Calibration Units Scaling | `ecat2nii.m` | `ecat2nii.py` | `9_scal_cal_units_ecat2nii*` | +| 10 | Save to Nifti | `ecat2nii.m` | `ecat2nii.py` | `10_save_nii_ecat2nii*` | +| 11 | Check to see if values are recorded as they should be | `ecat2nii.m` | `ecat2nii.py` | `11_read_saved_nii*` | + + +1. Read main header: this is the first step in reading the ecat file, the main header contains information about the + file, the subheaders, and the image data. These will be saved as jsons for comparison. +2. Read subheaders: the subheaders contain information about the image data, these will be saved as jsons for comparison + as well. +3. Determine file/data type: this step is to determine the type of data in the image data, this will be saved a json or + a text file with a single line specifying the datatype and endianness, but probably a json. +4. Read image data: this is the final step in reading the ecat file, the image data is read in and will be examined at + 3 different time points (if available). E.g. the first frame, the middle frame, and the final frame. Only a single 2D + slice will be saved from each of the time points, and it too will be taken from the "middle" of its 3D volume. We're + only attempting to compare whether python and matlab have done a decent job of reading the data as recorded in. +5. Repeat step 4 but scale the data if it should be scaled +6. Save objects for comparison (as best as one can) before they are passed to the ecat2nii function. This will include + the mainheader, subheaders, and image data. +7. Return the transformed data to the nifti space from ecat. This follows the 3 flip dimension steps performed across + the 3D image data. This output will use the same frames as step 4 and 5. +8. Rescale the data to 16 bits: this should only occur if the data is 12bit as is sometimes the case with ecat data. As + a note to self, attempting these steps in Python will start to lead to wildly different values when compared to + matlab. It's most likely not necessary to do this step as the data is handled in numpy, but this writer won't promise + to eat his hat if it turns out to be necessary. +9. Calibration Units: (Search for 'calibration_units == 1' to locate this in code) Here we can potentially alter the + data again by scaling, rounding, and converting from int to float. As in steps 4 and 5 we will save the first, + middle, and last frames of the data as 2D slices in the middle of their respective 3D volumes for comparison. +10. Save to nifti: the data is saved to a nifti file and the output is saved for comparison as .nii files named in line + with the FileStem column above. +11. Additional Steps: TBD diff --git a/ecat_testing/ints/check_ecat_read.py b/ecat_testing/ints/check_ecat_read.py new file mode 100644 index 00000000..bceb9924 --- /dev/null +++ b/ecat_testing/ints/check_ecat_read.py @@ -0,0 +1,14 @@ +import numpy +import nibabel +import dotenv +import os + + + +# load the environment variables +dotenv.load_dotenv('variables.env') + +# get the path to the input ecat file from the environment variables +ecat_file = os.getenv('WELL_BEHAVED_ECAT_FILE') +wustl_fbp_ecat=os.getenv('WUSTL_FBP_ECAT') +wustl_osem_ecat=os.getenv('WUSTL_OSEM_ECAT') \ No newline at end of file diff --git a/ecat_testing/ints/ints.m b/ecat_testing/ints/ints.m new file mode 100644 index 00000000..0f15b1f9 --- /dev/null +++ b/ecat_testing/ints/ints.m @@ -0,0 +1,55 @@ +env = loadenv('variables.env'); +ecat_file=env('WELL_BEHAVED_ECAT_FILE'); +wustl_fbp_ecat=env('WUSTL_FBP_ECAT'); +wustl_osem_ecat=env('WUSTL_OSEM_ECAT'); + +% convert the following ints to bytes and write them to a file +ints_to_write = [-32768, 32767] + + +% use the same format as the python version of this code +destination_file = 'matlab_sample_bytes.bytes' + +% write all the ints as bytes to the file +fid = fopen(destination_file, 'w'); +disp("writing ints_to_write to sample file"); +disp(ints_to_write); +fwrite(fid, ints_to_write, 'int16', 'b'); +%for i = 1:length(ints_to_write) +% fwrite(fid, typecast(ints_to_write(i), 'int16'), 'int16', 'b'); +%end +fclose(fid); + +% read out the bytes as written to the sample file +fid = fopen('matlab_sample_bytes.bytes', 'r'); +bytes = fread(fid); +disp("bytes written from matlab in matlab_sample_bytes.byte and read with fread (no arguments)"); +disp(bytes); +fclose(fid); + +% read in bytes from python, at least I have a good idea of what that is written as +fid = fopen('python_sample_bytes.bytes', 'r'); +python_bytes = fread(fid); +disp("bytes written from python in python_sample_bytes.byte and read with fread (no arguments)"); +disp(python_bytes); + +various_data_types = {'int16=>int16', 'int16', 'uint16'}; + +disp("now we open the matlab file and read the bytes using multiple arguments for fread"); + +% read in the bytes as int16 + +calibration_factor = 0.7203709074867248; + +for t = 1:length(various_data_types) + % oddly enough varying the second argument to fread doesn't seem to change the output + fid = fopen(destination_file, 'r', 'ieee-be'); + various_bytes = fread(fid, various_data_types{t}); + disp(['datatype used for reading in bytes: ', various_data_types{t}]); + disp(various_bytes); + disp(various_data_types{t}); + fclose(fid); + + % scale the data to see what happens + scaled_bytes = calibration_factor * various_bytes +end diff --git a/ecat_testing/ints/ints.py b/ecat_testing/ints/ints.py new file mode 100644 index 00000000..15b419fa --- /dev/null +++ b/ecat_testing/ints/ints.py @@ -0,0 +1,97 @@ +import numpy +import nibabel +import dotenv +import os + +# load the environment variables +dotenv.load_dotenv('variables.env') + +# get the path to the input ecat file from the environment variables +ecat_file = os.getenv('WELL_BEHAVED_ECAT_FILE') +wustl_fbp_ecat=os.getenv('WUSTL_FBP_ECAT') +wustl_osem_ecat=os.getenv('WUSTL_OSEM_ECAT') + + +def create_sample_bytes(destination_file='python_sample_bytes.bytes'): + """ + Create a file with two 16-bit integers, one positive and one negative. + + :param destination_file: _description_, defaults to 'sample_bytes.bytes' + :type destination_file: str, optional + """ + bytes_to_write = [-32768, 32767] + with open(destination_file, 'wb') as file: + byte_order = 'big' + signed = True + for b in bytes_to_write: + print(f"Writing {b} to {destination_file}, byte order: {byte_order}, signed: {signed}") + file.write(b.to_bytes(2, byteorder=byte_order, signed=True)) + + +def read_bytes(file_name='python_sample_bytes.bytes'): + """ + Open a byte file and read in the bytes without any extra fluff. + + :param file_name: the file to read bytes from, defaults to 'sample_bytes.bytes' + :type file_name: str, optional + :return: the bytes read from the file + :rtype: bytes + """ + with open(file_name, 'rb') as file: + bytes_read = file.read() + print(f"Read these bytes from {file_name}: {bytes_read}") + return bytes_read + + +# create the bytes to be read +create_sample_bytes() +# read the bytes +read_nums = read_bytes() + +# these are some of the datatypes we wish to test with numpy +# i2 = integer 16 signed, >i2 = integer 16 big endian signed, H = integer 16 unsigned +dtypes = ['i2', '>i2', 'H'] + +# create a dictionary to hold the arrays we create with numpy +arrays_by_dtype = {} + +# iterate through the datatypes and create arrays with numpy +for d in dtypes: + numpy_int16 = numpy.frombuffer(read_nums, dtype=d) + print(f"Reading bytes with numpy given datatype: {d}\nArray is listing this as : {numpy_int16.dtype} {numpy_int16}") + arrays_by_dtype[d] = numpy_int16 + +print(f"Arrays by dtype: {arrays_by_dtype}") + +# next we go through the same steps that the ecat converter does but in miniature since a 1x2 array is easier for us +# hairless apes to comprehend than a near as enough to make no difference N dimensional array + +# scale it the calibration factor we have been dealing with in these wustl ecats is 0.7203709074867248 +calibration_factor = 0.7203709074867248 + +scaled_arrays = {} + +for k, v in arrays_by_dtype.items(): + # try recasting the scaled array after the multiplication + scaled_arrays[k] = v * calibration_factor + +print(f"These are the arrays after being scaled by {calibration_factor}: {scaled_arrays}") + +# write these out to nifti's +for k, v in scaled_arrays.items(): + nifti = nibabel.Nifti1Image(v, affine=numpy.eye(4)) + input_data_type = v.dtype + nibabel_data_type = nifti.get_data_dtype() + print(f"Input data type: {input_data_type}, Nibabel data type: {nibabel_data_type}") + nibabel.save(nifti, f"nibabel_{k}.nii.gz") + print(f"Saved array to numpy_{k}.nii.gz") + print(f"loading that array results in the following: {nibabel.load(f'nibabel_{k}.nii.gz').get_fdata()}") + +# what happens if we don't scale the arrays and write them to nifti +for k, v in arrays_by_dtype.items(): + input_data_type = v.dtype + nibabel_data_type = nifti.get_data_dtype() + print(f"Input data type: {input_data_type}, Nibabel data type: {nibabel_data_type}") + nibabel.save(nifti, f"nibabel_{k}_unscaled.nii.gz") + print(f"Saved array to numpy_{k}_unscaled.nii.gz") + print(f"loading that array results in the following: {nibabel.load(f'nibabel_{k}_unscaled.nii.gz').get_fdata()}") diff --git a/ecat_testing/ints/matlab_sample_bytes.bytes b/ecat_testing/ints/matlab_sample_bytes.bytes new file mode 100644 index 00000000..68756cd4 Binary files /dev/null and b/ecat_testing/ints/matlab_sample_bytes.bytes differ diff --git a/ecat_testing/ints/nii_tool.m b/ecat_testing/ints/nii_tool.m new file mode 100755 index 00000000..c0b4540e --- /dev/null +++ b/ecat_testing/ints/nii_tool.m @@ -0,0 +1,1249 @@ +function varargout = nii_tool(cmd, varargin) +% +% Copyright (c) 2016, Xiangrui Li https://github.com/xiangruili/dicm2nii +% BSD-2-Clause License :: commit Mar 8, 2021 +% Basic function to create, load and save NIfTI file. +% +% :format: - rst = nii_tool('cmd', para); +% +% % .. note:: To list all command, type 'nii_tool ?' +% +% To get help information for each command, include '?' in cmd, for example: +% nii_tool init? +% nii_tool('init?') +% +% Here is a list of all command: +% +% nii_tool('default', 'version', 1, 'rgb_dim', 1); +% nii = nii_tool('init', img); +% nii = nii_tool('update', nii, mat); +% nii_tool('save', nii, filename, force_3D); +% hdr = nii_tool('hdr', filename); +% img = nii_tool('img', filename_or_hdr); +% ext = nii_tool('ext', filename_or_hdr); +% nii = nii_tool('load', filename_or_hdr); +% nii = nii_tool('cat3D', filenames); +% nii_tool('RGBStyle', 'afni'); +% +% Detail for each command is described below. +% +% oldVal = nii_tool('default', 'version', 1, 'rgb_dim', 1); +% oldVal = nii_tool('default', struct('version', 1, 'rgb_dim', 1)); +% +% - Set/query default NIfTI version and/or rgb_dim. To check the setting, run +% nii_tool('default') without other input. The input for 'default' command can +% be either a struct with fields of 'version' and/or 'rgb_dim', or +% parameter/value pairs. See nii_tool('RGBstyle') for meaning of rgb_dim. +% +% Note that the setting will be saved for future use. If one wants to change the +% settting temporarily, it is better to return the oldVal, and to restore it +% after done: +% +% oldVal = nii_tool('default', 'version', 2); % set version 2 as default +% % 'init' and 'save' NIfTI using above version +% nii_tool('default', 'version', oldVal); % restore default setting +% +% The default version setting affects 'init' command only. If you 'load' a NIfTI +% file, modify it, and then 'save' it, the version will be the same as the +% original file, unless it is changed explicitly (see help for 'save' command). +% All 'load' command ('load', 'hdr', 'ext', 'img') will read any version +% correctly, regardless of version setting. +% +% nii = nii_tool('init', img, RGB_dim); +% +% - Initialize nii struct based on img, normally 3D or 4D array. Most fields in +% the returned nii.hdr contain default values, and need to be updated based on +% dicom or other information. Important ones include pixdim, s/qform_code and +% related parameters. +% +% The NIfTI datatype will depend on data type of img. Most Matlab data types are +% supported, including 8/16/32/64 bit signed and unsigned integers, single and +% double floating numbers. Single/double complex and logical array are also +% supported. +% +% The optional third input, RGB_dim, is needed only if img contains RGB/RGBA +% data. It specifies which dimension in img encodes RGB or RGBA. In other words, +% if a non-empty RGB_dim is provided, img will be interpreted as RGB/RGBA data. +% +% Another way to signify RGB/RGBA data is to permute color dim to 8th-dim of img +% (RGB_dim of 8 can be omitted then). Since NIfTI img can have up to 7 dim, +% nii_tool chooses to store RGB/RGBA in 8th dim. Although this looks lengthy +% (4th to 7th dim are often all ones), nii_tool can deal with up to 7 dim +% without causing any confusion. This is why the returned nii.img always stores +% RGB in 8th dim. +% +% +% nii = nii_tool('update', nii, mat); +% +% - Update nii.hdr according to nii.img. This is useful if one changes nii.img +% type or dimension. The 'save' command calls this internally, so it is not +% necessary to call this before 'save'. A useful case to call 'update' is that +% one likes to use nii struct without saving it to a file, and 'update' will +% make nii.hdr.dim and others correct. +% +% If the 3rd input, new transformation matrix is provided, it will be set as the +% sform transformation matrix. +% +% +% hdr = nii_tool('hdr', filename); +% +% - Return hdr struct of the provided NIfTI file. This is useful to check NIfTI +% hdr, and it is much faster than 'load', especially for .gz file. +% +% +% img = nii_tool('img', filename_or_hdr); +% +% - Return image data in a NIfTI file. The second input can be NIfTI file name, +% or hdr struct returned by nii_tool('hdr', filename). +% +% +% ext = nii_tool('ext', filename_or_hdr); +% +% - Return NIfTI extension in a NIfTI file. The second input can be NIfTI file +% name, or hdr struct returned by nii_tool('hdr', filename). The returned ext +% will have field 'edata_decoded' if 'ecode' is of known type, such as dicom +% (2), text (4 or 6) or Matlab (40). +% +% Here is an example to add data in myFile.mat as extension to nii struct, which +% can be from 'init' or 'load': +% +% fid = fopen('myFile.mat'); % open the MAT file +% myEdata = fread(fid, inf, '*uint8'); % load all bytes as byte column +% fclose(fid); +% len = int32(numel(myEdata)); % number of bytes in int32 +% myEdata = [typecast(len, 'uint8')'; myEdata]; % include len in myEdata +% nii.ext.ecode = 40; % 40 for Matlab extension +% nii.ext.edata = myEdata; % myEdata must be uint8 array +% +% nii_tool will take care of rest when you 'save' nii to a file. +% +% In case a NIfTI ext causes problem (for example, some FSL builds have problem +% in reading NIfTI img with ecode>30), one can remove the ext easily: +% +% nii = nii_tool('load', 'file_with_ext.nii'); % load the file with ext +% nii.ext = []; % or nii = rmfield(nii, 'ext'); % remove ext +% nii_tool('save', nii, 'file_without_ext.nii'); % save it +% +% +% nii = nii_tool('load', filename_or_hdr); +% +% - Load NIfTI file into nii struct. The returned struct includes NIfTI 'hdr' +% and 'img', as well as 'ext' if the file contains NIfTI extension. +% +% nii_tool returns nii.img with the same data type as stored in the file, while +% numeric values in hdr are in double precision for convenience. +% +% +% nii_tool('save', nii, filename, force_3D); +% +% - Save struct nii into filename. The format of the file is determined by the +% file extension, such as .img, .nii, .img.gz, .nii.gz etc. If filename is not +% provided, nii.hdr.file_name must contain a file name. Note that 'save' command +% always overwrites file in case of name conflict. +% +% If filename has no extension, '.nii' will be used as default. +% +% If the 4th input, force_3D, is true (default false), the output file will be +% 3D only, which means multiple volume data will be split into multiple files. +% This is the format SPM likes. You can use this command to convert 4D into 3D +% by 'load' a 4D file, then 'save' it as 3D files. The 3D file names will have +% 5-digit like '_00001' appended to indicate volume index. +% +% The NIfTI version can be set by nii_tool('default'). One can override the +% default version by specifying it in nii.hdr.version. To convert between +% versions, load a NIfTI file, specify new version, and save it. For example: +% +% nii = nii_tool('load', 'file_nifti1.nii'); % load version 1 file +% nii.hdr.version = 2; % force to NIfTI-2 +% nii_tool('save', nii, 'file_nifti2.nii'); % save as version 2 file +% +% Following example shows how to change data type of a nii file: +% nii = nii_tool('load', 'file_int16.nii'); % load int16 type file +% nii.img = single(nii.img); % change data type to single/float32 +% nii_tool('save', nii, 'file_float.nii'); % nii_tool will take care of hdr +% +% +% nii = nii_tool('cat3D', files); +% +% - Concatenate SPM 3D files into a 4D dataset. The input 'files' can be cellstr +% with file names, or char with wildcards (* or ?). If it is cellstr, the volume +% order in the 4D data corresponds to those files. If wildcards are used, the +% volume order is based on alphabetical order of file names. +% +% Note that the files to be concatenated must have the same datatype, dim, voxel +% size, scaling slope and intercept, transformation matrix, etc. This is +% normally true if files are for the same dicom series. +% +% Following example shows how to convert a series of 3D files into a 4D file: +% +% nii = nii_tool('cat3D', './data/fSubj2-0003*.nii'); % load files for series 3 +% nii_tool('save', nii, './data/fSubj2-0003_4D.nii'); % save as a 4D file +% +% +% oldStyle = nii_tool('RGBStyle', 'afni'); +% +% - Set/query the method to read/save RGB or RGBA NIfTI file. The default method +% can be set by nii_tool('default', 'rgb_dim', dimN), where dimN can be 1, 3 or +% 4, or 'afni', 'mricron' or 'fsl', as explained below. +% +% The default is 'afni' style (or 1), which is defined by NIfTI standard, but is +% not well supported by fslview till v5.0.8 or mricron till v20140804. +% +% If the second input is set to 'mricron' (or 3), nii_tool will save file using +% the old RGB fashion (dim 3 for RGB). This works for mricron v20140804 or +% earlier. +% +% If the second input is set to 'fsl' (or 4), nii_tool will save RGB or RGBA +% layer into 4th dimension, and the file is not encoded as RGB data, but as +% normal 4D NIfTI. This violates the NIfTI rule, but it seems it is the only way +% to work for fslview (at least till fsl v5.0.8). +% +% If no new style (second input) is provided, it means to query the current +% style (one of 'afni', 'mricron' and 'fsl'). +% +% The GUI method to convert between different RGB style can be found in +% nii_viewer. Following shows how to convert other style into fsl style: +% +% nii_tool('RGBStyle', 'afni'); % we are loading afni style RGB +% nii = nii_tool('load', 'afni_style.nii'); % load RGB file +% nii_tool('RGBStyle', 'fsl'); % switch to fsl style for later save +% nii_tool('save', nii, 'fslRGB.nii'); % fsl can read it as RGB +% +% Note that, if one wants to convert fsl style (non-RGB file by NIfTI standard) +% to other styles, an extra step is needed to change the RGB dim from 4th to 8th +% dim before 'save': +% +% nii = nii_tool('load', 'fslStyleFile.nii'); % it is normal NIfTI +% nii.img = permute(nii.img, [1:3 5:8 4]); % force it to be RGB data +% nii_tool('RGBStyle', 'afni'); % switch to NIfTI RGB style if needed +% nii_tool('save', nii, 'afni_RGB.nii'); % now AFNI can read it as RGB +% +% Also note that the setting by nii_tool('RGBStyle') is effective only for +% current Matlab session. If one clears all or starts a new Matlab session, the +% default style by nii_tool('default') will take effect. +% +% See also NII_VIEWER, NII_XFORM, DICM2NII + +% More information for NIfTI format: +% Official NIfTI website: http://nifti.nimh.nih.gov/ +% Another excellent site: http://brainder.org/2012/09/23/the-nifti-file-format/ + +% History (yymmdd) +% 150109 Write it based on Jimmy Shen's NIfTI tool (xiangrui.li@gmail.com) +% 150202 Include renamed pigz files for Windows +% 150203 Fix closeFile and deleteTmpFile order +% 150205 Add hdr.machine: needed for .img fopen +% 150208 Add 4th input for 'save', allowing to save SPM 3D files +% 150210 Add 'cat3D' to load SPM 3D files +% 150226 Assign all 8 char for 'magic' (version 2 needs it) +% 150321 swapbytes(nByte) for ecode=40 with big endian +% 150401 Add 'default' to set/query version and rgb_dim default setting +% 150514 read_ext: decode txt edata by dicm2nii.m +% 150517 func_handle: provide a way to use gunzipOS etc from outside +% 150617 auto detect rgb_dim 1&3 for 'load' etc using ChrisR method +% 151025 Change subfunc img2datatype as 'update' for outside access +% 151109 Include dd.exe from WinAVR-20100110 for partial gz unzip +% 151205 Partial gunzip: fix fname with space & unknown pigz | dd error. +% 151222 Take care of img for intent_code 2003/2004: anyone uses it? +% 160110 Use matlab pref method to replace para file. +% 160120 check_gzip: use "" for included pigz; ignore dd error if err is false. +% 160326 fix setpref for older Octave: set each parameter separately. +% 160531 fopen uses 'W' for 'w': performance benefit according to Yair. +% 160701 subFuncHelp: bug fix for mfile case. +% 161018 gunzipOS: use unique name for outName, to avoid problem with parfor. +% 161025 Make included linux pigz executible; fix "dd" for windows. +% 161031 gunzip_mem(), nii_bytes() for hdr/ext read: read uint8 then parse; +% Replace hdr.machine with hdr.swap_endian. +% 170212 Extract decode_ext() from 'ext' cmd so call it in 'update' cmd. +% 170215 gunzipOS: use -c > rather than copyfile for performance. +% 170322 gzipOS: stop using background gz to avoid file not exist error. +% 170410 read_img(): turn off auto RGB dim detection, and use rgb_dim. +% 170714 'save': force to version 2 if img dim exceeds 2^15-1. +% 170716 Add functionSignatures.json file for tab auto-completion. +% 171031 'LocalFunc' makes eaiser to call local functions. +% 171206 Allow file name ext other than .nii, .hdr, .img. +% 180104 check_gzip: add /usr/local/bin to PATH for unix if needed. +% 180119 use jsystem for better speed. +% 180710 bug fix for cal_max/cal_min in 'update'. +% 210302 take care of unicode char in hdr (Thx Yong). + +persistent C para; % C columns: name, length, format, value, offset +if isempty(C) + [C, para] = niiHeader; + if exist('OCTAVE_VERSION', 'builtin') + warning('off', 'Octave:fopen-mode'); % avoid 'W' warning + more off; + end +end + +if ~ischar(cmd) + error('Provide a string command as the first input for nii_tool'); +end +if any(cmd=='?'), subFuncHelp(mfilename, cmd); return; end + +if strcmpi(cmd, 'init') + if nargin<2, error('nii_tool(''%s'') needs second input', cmd); end + nii.hdr = cell2struct(C(:,4), C(:,1)); + nii.img = varargin{1}; + if numel(size(nii.img))>8 + error('NIfTI img can have up to 7 dimension'); + end + if nargin>2 + i = varargin{2}; + if i<0 || i>8 || mod(i,1)>0, error('Invalid RGB_dim number'); end + nii.img = permute(nii.img, [1:i-1 i+1:8 i]); % RGB to dim8 + end + nii.hdr.file_name = inputname(2); + if isempty(nii.hdr.file_name), nii.hdr.file_name = 'tmpNII'; end + varargout{1} = nii_tool('update', nii); % set datatype etc + +elseif strcmpi(cmd, 'save') + if nargin<2, error('nii_tool(''%s'') needs second input', cmd); end + nii = varargin{1}; + if ~isstruct(nii) || ~isfield(nii, 'hdr') || ~isfield(nii, 'img') + error(['nii_tool(''save'') needs a struct from nii_tool(''init'')' ... + ' or nii_tool(''load'') as the second input']); + end + + % Check file name to save + if nargin>2 + fname = varargin{2}; + if ~ischar(fname), error('Invalid name for NIfTI file: %s', fname); end + elseif isfield(nii.hdr, 'file_name') + fname = nii.hdr.file_name; + else + error('Provide a valid file name as the third input'); + end + if ~ispc && strncmp(fname, '~/', 2) % matlab may err with this abbrevation + fname = [getenv('HOME') fname(2:end)]; + end + [pth, fname, fext] = fileparts(fname); + do_gzip = strcmpi(fext, '.gz'); + if do_gzip + [~, fname, fext] = fileparts(fname); % get .nii .img .hdr + end + if isempty(fext), fext = '.nii'; end % default single .nii file + fname = fullfile(pth, fname); % without file ext + if nargout, varargout{1} = []; end + isNii = strcmpi(fext, '.nii'); % will use .img/.hdr if not .nii + + % Deal with NIfTI version and sizeof_hdr + niiVer = para.version; + if isfield(nii.hdr, 'version'), niiVer = nii.hdr.version; end + if niiVer<2 && any(nii.hdr.dim(2:end) > 32767), niiVer = 2; end + + if niiVer == 1 + nii.hdr.sizeof_hdr = 348; % in case it was loaded from other version + elseif niiVer == 2 + nii.hdr.sizeof_hdr = 540; % version 2 + else + error('Unsupported NIfTI version: %g', niiVer); + end + + if niiVer ~= para.version + C0 = niiHeader(niiVer); + else + C0 = C; + end + + % Update datatype/bitpix/dim in case nii.img is changed + [nii, fmt] = nii_tool('update', nii); + + % This 'if' block: lazy implementation to split into 3D SPM files + if nargin>3 && ~isempty(varargin{3}) && varargin{3} && nii.hdr.dim(5)>1 + if do_gzip, fext = [fext '.gz']; end + nii0 = nii; + for i = 1:nii.hdr.dim(5) + fname0 = sprintf('%s_%05g%s', fname, i, fext); + nii0.img = nii.img(:,:,:,i,:,:,:,:); % one vol + if i==1 && isfield(nii, 'ext'), nii0.ext = nii.ext; + elseif i==2 && isfield(nii0, 'ext'), nii0 = rmfield(nii0, 'ext'); + end + nii_tool('save', nii0, fname0); + end + return; + end + + % re-arrange img for special datatype: RGB/RGBA/Complex. + if any(nii.hdr.datatype == [128 511 2304]) % RGB or RGBA + if para.rgb_dim == 1 % AFNI style + nii.img = permute(nii.img, [8 1:7]); + elseif para.rgb_dim == 3 % old mricron style + nii.img = permute(nii.img, [1 2 8 3:7]); + elseif para.rgb_dim == 4 % for fslview + nii.img = permute(nii.img, [1:3 8 4:7]); % violate nii rule + dim = size(nii.img); + if numel(dim)>6 % dim7 is not 1 + i = find(dim(5:7)==1, 1, 'last') + 4; + nii.img = permute(nii.img, [1:i-1 i+1:8 i]); + end + nii = nii_tool('update', nii); % changed to non-RGB datatype + end + elseif any(nii.hdr.datatype == [32 1792]) % complex single/double + nii.img = [real(nii.img(:))'; imag(nii.img(:))']; + end + + % Check nii extension: update esize to x16 + nExt = 0; esize = 0; + nii.hdr.extension = [0 0 0 0]; % no nii ext + if isfield(nii, 'ext') && isstruct(nii.ext) ... + && isfield(nii.ext(1), 'edata') && ~isempty(nii.ext(1).edata) + nExt = numel(nii.ext); + nii.hdr.extension = [1 0 0 0]; % there is nii ext + for i = 1:nExt + if ~isfield(nii.ext(i), 'ecode') || ~isfield(nii.ext(i), 'edata') + error('NIfTI header ext struct must have ecode and edata'); + end + + n0 = numel(nii.ext(i).edata) + 8; % 8 byte for esize and ecode + n1 = ceil(n0/16) * 16; % esize: multiple of 16 + nii.ext(i).esize = n1; + nii.ext(i).edata(end+(1:n1-n0)) = 0; % pad zeros + esize = esize + n1; + end + end + + % Set magic, vox_offset, and open file for .nii or .hdr + if isNii + % version 1 will take only the first 4 + nii.hdr.magic = sprintf('n+%g%s', niiVer, char([0 13 10 26 10])); + nii.hdr.vox_offset = nii.hdr.sizeof_hdr + 4 + esize; + fid = fopen(strcat(fname, fext), 'W'); + else + nii.hdr.magic = sprintf('ni%g%s', niiVer, char([0 13 10 26 10])); + nii.hdr.vox_offset = 0; + fid = fopen(strcat(fname, '.hdr'), 'W'); + end + + % Write nii hdr + for i = 1:size(C0,1) + if isfield(nii.hdr, C0{i,1}) + val = nii.hdr.(C0{i,1}); + else % niiVer=2 omit some fields, also take care of other cases + val = C0{i,4}; + end + fmt0 = C0{i,3}; + if strcmp(C0{i,3}, 'char') + if ~ischar(val), val = char(val); end % avoid val=[] error etc + val = unicode2native(val); % may have more bytes than numel(val) + fmt0 = 'uint8'; + end + n = numel(val); + len = C0{i,2}; + if n>len + val(len+1:n) = []; % remove extra, normally for char + elseif n0, nByte = hdr.vox_offset + 64; % .nii arbituary +64 + else, nByte = inf; + end + b = nii_bytes(fname, nByte); + varargout{1} = read_ext(b, hdr); + end + if nargout>1, varargout{2} = hdr; end + +elseif strcmpi(cmd, 'RGBStyle') + styles = {'afni' '' 'mricron' 'fsl'}; + curStyle = styles{para.rgb_dim}; + if nargin<2, varargout{1} = curStyle; return; end % query only + irgb = varargin{1}; + if isempty(irgb), irgb = 1; end % default as 'afni' + if ischar(irgb) + if strncmpi(irgb, 'fsl', 3), irgb = 4; + elseif strncmpi(irgb, 'mricron', 4), irgb = 3; + else, irgb = 1; + end + end + if ~any(irgb == [1 3 4]) + error('nii_tool(''RGBStyle'') can have 1, 3, or 4 as second input'); + end + if nargout, varargout{1} = curStyle; end % return old one + para.rgb_dim = irgb; % no save to pref + +elseif strcmpi(cmd, 'cat3D') + if nargin<2, error('nii_tool(''%s'') needs second input', cmd); end + fnames = varargin{1}; + if ischar(fnames) % guess it is like run1*.nii + f = dir(fnames); + f = sort({f.name}); + fnames = strcat([fileparts(fnames) '/'], f); + end + + n = numel(fnames); + if n<2 || (~iscellstr(fnames) && (exist('strings', 'builtin') && ~isstring(fnames))) + error('Invalid input for nii_tool(''cat3D''): %s', varargin{1}); + end + + nii = nii_tool('load', fnames{1}); % all for first file + nii.img(:,:,:,2:n) = 0; % pre-allocate + % For now, omit all consistence check between files + for i = 2:n, nii.img(:,:,:,i) = nii_tool('img', fnames{i}); end + varargout{1} = nii_tool('update', nii); % update dim + +elseif strcmpi(cmd, 'default') + flds = {'version' 'rgb_dim'}; % may add more in the future + pf = getpref('nii_tool_para'); + for i = 1:numel(flds), val.(flds{i}) = pf.(flds{i}); end + if nargin<2, varargout{1} = val; return; end % query only + if nargout, varargout{1} = val; end % return old val + in2 = varargin; + if ~isstruct(in2), in2 = struct(in2{:}); end + nam = fieldnames(in2); + for i = 1:numel(nam) + ind = strcmpi(nam{i}, flds); + if isempty(ind), continue; end + para.(flds{ind}) = in2.(nam{i}); + setpref('nii_tool_para', flds{ind}, in2.(nam{i})); + end + if val.version ~= para.version, C = niiHeader(para.version); end + +elseif strcmpi(cmd, 'update') % old img2datatype subfunction + if nargin<2, error('nii_tool(''%s'') needs second input', cmd); end + nii = varargin{1}; + if ~isstruct(nii) || ~isfield(nii, 'hdr') || ~isfield(nii, 'img') + error(['nii_tool(''update'') needs a struct from nii_tool(''init'')' ... + ' or nii_tool(''load'') as the second input']); + end + + dim = size(nii.img); + ndim = numel(dim); + dim(ndim+1:7) = 1; + + if nargin>2 % set new sform mat + R = varargin{2}; + if size(R,2)~=4, error('Invalid matrix dimension.'); end + nii.hdr.srow_x = R(1,:); + nii.hdr.srow_y = R(2,:); + nii.hdr.srow_z = R(3,:); + end + + if ndim == 8 % RGB/RGBA data. Change img type to uint8/single if needed + valpix = dim(8); + if valpix == 4 % RGBA + typ = 'RGBA'; % error info only + nii.img = uint8(nii.img); % NIfTI only support uint8 for RGBA + elseif valpix == 3 % RGB, must be single or uint8 + typ = 'RGB'; + if max(nii.img(:))>1, nii.img = uint8(nii.img); + else, nii.img = single(nii.img); + end + else + error('Color dimension must have length of 3 for RGB or 4 for RGBA'); + end + + dim(8) = []; % remove color-dim so numel(dim)=7 for nii.hdr + ndim = find(dim>1, 1, 'last'); % update it + elseif isreal(nii.img) + typ = 'real'; + valpix = 1; + else + typ = 'complex'; + valpix = 2; + end + + if islogical(nii.img), imgFmt = 'ubit1'; + else, imgFmt = class(nii.img); + end + ind = find(strcmp(para.format, imgFmt) & para.valpix==valpix); + + if isempty(ind) % only RGB and complex can have this problem + error('nii_tool does not support %s image of ''%s'' type', typ, imgFmt); + elseif numel(ind)>1 % unlikely + error('Non-unique datatype found for %s image of ''%s'' type', typ, imgFmt); + end + + fmt = para.format{ind}; + nii.hdr.datatype = para.datatype(ind); + nii.hdr.bitpix = para.bitpix(ind); + nii.hdr.dim = [ndim dim]; + + mx = double(max(nii.img(:))); + mn = double(min(nii.img(:))); + if nii.hdr.cal_min>mx || nii.hdr.cal_max1, varargout{2} = fmt; end +elseif strcmp(cmd, 'func_handle') % make a local function avail to outside + varargout{1} = str2func(varargin{1}); +elseif strcmp(cmd, 'LocalFunc') % call local function from outside + [varargout{1:nargout}] = feval(varargin{:}); +else + error('Invalid command for nii_tool: %s', cmd); +end +% End of main function + +%% Subfunction: all nii header in the order in NIfTI-1/2 file +function [C, para] = niiHeader(niiVer) +pf = getpref('nii_tool_para'); +if isempty(pf) + setpref('nii_tool_para', 'version', 1); + setpref('nii_tool_para', 'rgb_dim', 1); + pf = getpref('nii_tool_para'); +end +if nargin<1 || isempty(niiVer), niiVer = pf.version; end + +if niiVer == 1 + C = { + % name len format value offset + 'sizeof_hdr' 1 'int32' 348 0 + 'data_type' 10 'char' '' 4 + 'db_name' 18 'char' '' 14 + 'extents' 1 'int32' 16384 32 + 'session_error' 1 'int16' 0 36 + 'regular' 1 'char' 'r' 38 + 'dim_info' 1 'uint8' 0 39 + 'dim' 8 'int16' ones(1,8) 40 + 'intent_p1' 1 'single' 0 56 + 'intent_p2' 1 'single' 0 60 + 'intent_p3' 1 'single' 0 64 + 'intent_code' 1 'int16' 0 68 + 'datatype' 1 'int16' 0 70 + 'bitpix' 1 'int16' 0 72 + 'slice_start' 1 'int16' 0 74 + 'pixdim' 8 'single' zeros(1,8) 76 + 'vox_offset' 1 'single' 352 108 + 'scl_slope' 1 'single' 1 112 + 'scl_inter' 1 'single' 0 116 + 'slice_end' 1 'int16' 0 120 + 'slice_code' 1 'uint8' 0 122 + 'xyzt_units' 1 'uint8' 0 123 + 'cal_max' 1 'single' 0 124 + 'cal_min' 1 'single' 0 128 + 'slice_duration' 1 'single' 0 132 + 'toffset' 1 'single' 0 136 + 'glmax' 1 'int32' 0 140 + 'glmin' 1 'int32' 0 144 + 'descrip' 80 'char' '' 148 + 'aux_file' 24 'char' '' 228 + 'qform_code' 1 'int16' 0 252 + 'sform_code' 1 'int16' 0 254 + 'quatern_b' 1 'single' 0 256 + 'quatern_c' 1 'single' 0 260 + 'quatern_d' 1 'single' 0 264 + 'qoffset_x' 1 'single' 0 268 + 'qoffset_y' 1 'single' 0 272 + 'qoffset_z' 1 'single' 0 276 + 'srow_x' 4 'single' [1 0 0 0] 280 + 'srow_y' 4 'single' [0 1 0 0] 296 + 'srow_z' 4 'single' [0 0 1 0] 312 + 'intent_name' 16 'char' '' 328 + 'magic' 4 'char' 'n+1' 344 + 'extension' 4 'uint8' [0 0 0 0] 348 + }; + +elseif niiVer == 2 + C = { + 'sizeof_hdr' 1 'int32' 540 0 + 'magic' 8 'char' 'n+2' 4 + 'datatype' 1 'int16' 0 12 + 'bitpix' 1 'int16' 0 14 + 'dim' 8 'int64' ones(1,8) 16 + 'intent_p1' 1 'double' 0 80 + 'intent_p2' 1 'double' 0 88 + 'intent_p3' 1 'double' 0 96 + 'pixdim' 8 'double' zeros(1,8) 104 + 'vox_offset' 1 'int64' 544 168 + 'scl_slope' 1 'double' 1 176 + 'scl_inter' 1 'double' 0 184 + 'cal_max' 1 'double' 0 192 + 'cal_min' 1 'double' 0 200 + 'slice_duration' 1 'double' 0 208 + 'toffset' 1 'double' 0 216 + 'slice_start' 1 'int64' 0 224 + 'slice_end' 1 'int64' 0 232 + 'descrip' 80 'char' '' 240 + 'aux_file' 24 'char' '' 320 + 'qform_code' 1 'int32' 0 344 + 'sform_code' 1 'int32' 0 348 + 'quatern_b' 1 'double' 0 352 + 'quatern_c' 1 'double' 0 360 + 'quatern_d' 1 'double' 0 368 + 'qoffset_x' 1 'double' 0 376 + 'qoffset_y' 1 'double' 0 384 + 'qoffset_z' 1 'double' 0 392 + 'srow_x' 4 'double' [1 0 0 0] 400 + 'srow_y' 4 'double' [0 1 0 0] 432 + 'srow_z' 4 'double' [0 0 1 0] 464 + 'slice_code' 1 'int32' 0 496 + 'xyzt_units' 1 'int32' 0 500 + 'intent_code' 1 'int32' 0 504 + 'intent_name' 16 'char' '' 508 + 'dim_info' 1 'uint8' 0 524 + 'unused_str' 15 'char' '' 525 + 'extension' 4 'uint8' [0 0 0 0] 540 + }; +else + error('Nifti version %g is not supported', niiVer); +end +if nargout<2, return; end + +% class datatype bitpix valpix +D = { + 'ubit1' 1 1 1 % neither mricron nor fsl support this + 'uint8' 2 8 1 + 'int16' 4 16 1 + 'int32' 8 32 1 + 'single' 16 32 1 + 'single' 32 64 2 % complex + 'double' 64 64 1 + 'uint8' 128 24 3 % RGB + 'int8' 256 8 1 + 'single' 511 96 3 % RGB, not in NIfTI standard? + 'uint16' 512 16 1 + 'uint32' 768 32 1 + 'int64' 1024 64 1 + 'uint64' 1280 64 1 +% 'float128' 1536 128 1 % long double, for 22nd century? + 'double' 1792 128 2 % complex +% 'float128' 2048 256 2 % long double complex + 'uint8' 2304 32 4 % RGBA + }; + +para.format = D(:,1)'; +para.datatype = [D{:,2}]; +para.bitpix = [D{:,3}]; +para.valpix = [D{:,4}]; +para.rgb_dim = pf.rgb_dim; % dim of RGB/RGBA in NIfTI FILE +para.version = niiVer; + +%% Subfunction: use pigz or system gzip if available (faster) +function gzipOS(fname) +persistent cmd; % command to gzip +if isempty(cmd) + cmd = check_gzip('gzip'); + if ischar(cmd) + cmd = @(nam){cmd '-nf' nam}; + elseif islogical(cmd) && ~cmd + fprintf(2, ['None of system pigz, gzip or Matlab gzip available. ' ... + 'Files are not compressed into gz.\n']); + end +end + +if islogical(cmd) + if cmd, gzip(fname); deleteFile(fname); end + return; +end + +[err, str] = jsystem(cmd(fname)); +if err && ~exist(strcat(fname, '.gz'), 'file') + try + gzip(fname); deleteFile(fname); + catch + fprintf(2, 'Error during compression: %s\n', str); + end +end + +%% Deal with pigz/gzip on path or in nii_tool folder, and matlab gzip/gunzip +function cmd = check_gzip(gz_unzip) +m_dir = fileparts(mfilename('fullpath')); +% away from pwd, so use OS pigz if both exist. Avoid error if pwd changed later +if strcmpi(pwd, m_dir), cd ..; clnObj = onCleanup(@() cd(m_dir)); end +if isunix + pth1 = getenv('PATH'); + if isempty(strfind(pth1, '/usr/local/bin')) + pth1 = [pth1 ':/usr/local/bin']; + setenv('PATH', pth1); + end +end + +% first, try system pigz +[err, ~] = jsystem({'pigz' '-V'}); +if ~err, cmd = 'pigz'; return; end + +% next, try pigz included with nii_tool +cmd = [m_dir '/pigz']; +if ismac % pigz for mac is not included in the package + if strcmp(gz_unzip, 'gzip') + fprintf(2, [' Please install pigz for fast compression: ' ... + 'http://macappstore.org/pigz/\n']); + end +elseif isunix % linux + [st, val] = fileattrib(cmd); + if st && ~val.UserExecute, fileattrib(cmd, '+x'); end +end + +[err, ~] = jsystem({cmd '-V'}); +if ~err, return; end + +% Third, try system gzip/gunzip +[err, ~] = jsystem({gz_unzip '-V'}); % gzip/gunzip on system path? +if ~err, cmd = gz_unzip; return; end + +% Lastly, use Matlab gzip/gunzip if java avail +cmd = usejava('jvm'); + +%% check dd command, return empty if not available +function dd = check_dd +m_dir = fileparts(mfilename('fullpath')); +if strcmpi(pwd, m_dir), cd ..; clnObj = onCleanup(@() cd(m_dir)); end +[err, ~] = jsystem({'dd' '--version'}); +if ~err, dd = 'dd'; return; end % dd with linix/mac, and maybe windows + +if ispc % rename it as exe + dd = [m_dir '\dd']; + [err, ~] = jsystem({dd '--version'}); + if ~err, return; end +end +dd = ''; + +%% Try to use in order of pigz, system gunzip, then matlab gunzip +function outName = gunzipOS(fname, nByte) +persistent cmd dd pth uid; % command to run gupzip, dd tool, and temp_path +if isempty(cmd) + cmd = check_gzip('gunzip'); % gzip -dc has problem in PC + if ischar(cmd) + cmd = @(nam)sprintf('"%s" -nfdc "%s" ', cmd, nam); % f for overwrite + elseif islogical(cmd) && ~cmd + cmd = []; + error('None of system pigz, gunzip or Matlab gunzip is available'); + end + dd = check_dd; + if ~isempty(dd) + dd = @(n,out)sprintf('| "%s" count=%g of="%s"', dd, ceil(n/512), out); + end + + if ispc % matlab tempdir could be slow due to cd in and out + pth = getenv('TEMP'); + if isempty(pth), pth = pwd; end + else + pth = getenv('TMP'); + if isempty(pth), pth = getenv('TMPDIR'); end + if isempty(pth), pth = '/tmp'; end % last resort + end + uid = @()sprintf('_%s_%03x', datestr(now, 'yymmddHHMMSSfff'), randi(999)); +end + +fname = char(fname); +if islogical(cmd) + outName = gunzip(fname, pth); + outName = outName{1}; + return; +end + +[~, outName, ext] = fileparts(fname); +if strcmpi(ext, '.gz') % likely always true + [~, outName, ext1] = fileparts(outName); + outName = [outName uid() ext1]; +else + outName = [outName uid()]; +end +outName = fullfile(pth, outName); +if ~isempty(dd) && nargin>1 && ~isinf(nByte) % unzip only part of data + try + [err, ~] = system([cmd(fname) dd(nByte, outName)]); + if err==0, return; end + end +end + +[err, str] = system([cmd(fname) '> "' outName '"']); +% [err, str] = jsystem({'pigz' '-nfdc' fname '>' outName}); +if err + try + outName = gunzip(fname, pth); + catch + error('Error during gunzip:\n%s', str); + end +end + +%% cast bytes into a type, swapbytes if needed +function out = cast_swap(b, typ, swap) +out = typecast(b, typ); +if swap, out = swapbytes(out); end +out = double(out); % for convenience + +%% subfunction: read hdr +function hdr = read_hdr(b, C, fname) +n = typecast(b(1:4), 'int32'); +if n==348, niiVer = 1; swap = false; +elseif n==540, niiVer = 2; swap = false; +else + n = swapbytes(n); + if n==348, niiVer = 1; swap = true; + elseif n==540, niiVer = 2; swap = true; + else, error('Not valid NIfTI file: %s', fname); + end +end + +if niiVer>1, C = niiHeader(niiVer); end % C defaults for version 1 +for i = 1:size(C,1) + try a = b(C{i,5}+1 : C{i+1,5}); + catch + if C{i,5}==numel(b), a = []; + else, a = b(C{i,5} + (1:C{i,2})); % last item extension is in bytes + end + end + if strcmp(C{i,3}, 'char') + a = deblank(native2unicode(a)); + else + a = cast_swap(a, C{i,3}, swap); + a = double(a); + end + hdr.(C{i,1}) = a; +end + +hdr.version = niiVer; % for 'save', unless user asks to change +hdr.swap_endian = swap; +hdr.file_name = fname; + +%% subfunction: read ext, and decode it if known ecode +function ext = read_ext(b, hdr) +ext = []; % avoid error if no ext but hdr.extension(1) was set +nEnd = hdr.vox_offset; +if nEnd == 0, nEnd = numel(b); end % .hdr file + +swap = hdr.swap_endian; +j = hdr.sizeof_hdr + 4; % 4 for hdr.extension +while j < nEnd + esize = cast_swap(b(j+(1:4)), 'int32', swap); j = j+4; % x16 + if isempty(esize) || mod(esize,16), return; end % just to be safe + i = numel(ext) + 1; + ext(i).esize = esize; %#ok<*AGROW> + ext(i).ecode = cast_swap(b(j+(1:4)), 'int32', swap); j = j+4; + ext(i).edata = b(j+(1:esize-8))'; % -8 for esize & ecode + j = j + esize - 8; +end +ext = decode_ext(ext, swap); + +%% subfunction +function ext = decode_ext(ext, swap) +% Decode edata if we know ecode +for i = 1:numel(ext) + if isfield(ext(i), 'edata_decoded'), continue; end % done + if ext(i).ecode == 40 % Matlab: any kind of matlab variable + nByte = cast_swap(ext(i).edata(1:4), 'int32', swap); % MAT data + tmp = [tempname '.mat']; % temp MAT file to save edata + fid1 = fopen(tmp, 'W'); + fwrite(fid1, ext(i).edata(5:nByte+4)); % exclude padded zeros + fclose(fid1); + deleteMat = onCleanup(@() deleteFile(tmp)); % delete temp file after done + ext(i).edata_decoded = load(tmp); % load into struct + elseif any(ext(i).ecode == [4 6 32 44]) % 4 AFNI, 6 plain text, 32 CIfTI, 44 MRS (json) + str = char(ext(i).edata(:)'); + if isempty(strfind(str, 'dicm2nii.m')) + ext(i).edata_decoded = deblank(str); + else % created by dicm2nii.m + ss = struct; + ind = strfind(str, [';' char([0 10])]); % strsplit error in Octave + ind = [-2 ind]; % -2+3=1: start of first para + for k = 1:numel(ind)-1 + a = str(ind(k)+3 : ind(k+1)); + a(a==0) = []; % to be safe. strtrim wont remove null + a = strtrim(a); + if isempty(a), continue; end + try + eval(['ss.' a]); % put all into struct + catch + try + a = regexp(a, '(.*?)\s*=\s*(.*?);', 'tokens', 'once'); + ss.(a{1}) = a{2}; + catch me + fprintf(2, '%s\n', me.message); + fprintf(2, 'Unrecognized text: %s\n', a); + end + end + end + flds = fieldnames(ss); % make all vector column + for k = 1:numel(flds) + val = ss.(flds{k}); + if isnumeric(val) && isrow(val), ss.(flds{k}) = val'; end + end + ext(i).edata_decoded = ss; + end + elseif ext(i).ecode == 2 % dicom + tmp = [tempname '.dcm']; + fid1 = fopen(tmp, 'W'); + fwrite(fid1, ext(i).edata); + fclose(fid1); + deleteDcm = onCleanup(@() deleteFile(tmp)); + ext(i).edata_decoded = dicm_hdr(tmp); + end +end + +%% subfunction: read img +% memory gunzip may be slow and error for large img, so use file unzip +function img = read_img(hdr, para) +ind = para.datatype == hdr.datatype; +if ~any(ind) + error('Datatype %g is not supported by nii_tool.', hdr.datatype); +end + +dim = hdr.dim(2:8); +dim(hdr.dim(1)+1:7) = 1; % avoid some error in file +dim(dim<1) = 1; +valpix = para.valpix(ind); + +fname = nii_name(hdr.file_name, '.img'); % in case of .hdr/.img pair +fid = fopen(fname); +sig = fread(fid, 2, '*uint8')'; +if isequal(sig, [31 139]) % .gz + fclose(fid); + fname = gunzipOS(fname); + cln = onCleanup(@() deleteFile(fname)); % delete gunzipped file + fid = fopen(fname); +end + +% if ~exist('cln', 'var') && valpix==1 && ~hdr.swap_endian +% m = memmapfile(fname, 'Offset', hdr.vox_offset, ... +% 'Format', {para.format{ind}, dim, 'img'}); +% nii = m.Data; +% return; +% end + +if hdr.swap_endian % switch between LE and BE + [~, ~, ed] = fopen(fid); % default endian: almost always ieee-le + fclose(fid); + if isempty(strfind(ed, '-le')), ed = strrep(ed, '-be', '-le'); %#ok<*STREMP> + else, ed = strrep(ed, '-le', '-be'); + end + fid = fopen(fname, 'r', ed); % re-open with changed endian +end + +fseek(fid, hdr.vox_offset, 'bof'); +img = fread(fid, prod(dim)*valpix, ['*' para.format{ind}]); % * to keep original class +fclose(fid); + +if any(hdr.datatype == [128 511 2304]) % RGB or RGBA +% a = reshape(single(img), valpix, n); % assume rgbrgbrgb... +% d1 = abs(a - a(:,[2:end 1])); % how similar are voxels to their neighbor +% a = reshape(a, prod(dim(1:2)), valpix*prod(dim(3:7))); % rr...rgg...gbb...b +% d2 = abs(a - a([2:end 1],:)); +% j = (sum(d1(:))>sum(d2(:)))*2 + 1; % 1 for afni, 3 for mricron + j = para.rgb_dim; % auto detection may get wrong for noisy background + dim = [dim(1:j-1) valpix dim(j:7)]; % length=8 now + img = reshape(img, dim); + img = permute(img, [1:j-1 j+1:8 j]); % put RGB(A) to dim8 +elseif any(hdr.datatype == [32 1792]) % complex single/double + img = reshape(img, [2 dim]); + img = complex(permute(img(1,:,:,:,:,:,:,:), [2:8 1]), ... % real + permute(img(2,:,:,:,:,:,:,:), [2:8 1])); % imag +else % all others: valpix=1 + if hdr.datatype==1, img = logical(img); end + img = reshape(img, dim); +end + +% RGB triplet in 5th dim OR RGBA quadruplet in 5th dim +c = hdr.intent_code; +if (c == 2003 && dim(5) == 3) || (c == 2004 && dim(5) == 4) + img = permute(img, [1:4 6:8 5]); +end + +%% Return requested fname with ext, useful for .hdr and .img files +function fname = nii_name(fname, ext) +if strcmpi(ext, '.img') + i = regexpi(fname, '.hdr(.gz)?$'); + if ~isempty(i), fname(i(end)+(0:3)) = ext; end +elseif strcmpi(ext, '.hdr') + i = regexpi(fname, '.img(.gz)?$'); + if ~isempty(i), fname(i(end)+(0:3)) = ext; end +end + +%% Read NIfTI file as bytes, gunzip if needed, but ignore endian +function [b, fname] = nii_bytes(fname, nByte) +if nargin<2, nByte = inf; end +[fid, err] = fopen(fname); % system default endian +if fid<1, error([err ': ' fname]); end +b = fread(fid, nByte, '*uint8')'; +fname = fopen(fid); +fclose(fid); +if isequal(b(1:2), [31 139]) % gz, tgz file + b = gunzip_mem(b, fname, nByte)'; +end + +%% subfunction: get help for a command +function subFuncHelp(mfile, cmd) +str = fileread(which(mfile)); +i = regexp(str, '\n\s*%', 'once'); % start of 1st % line +str = regexp(str(i:end), '.*?(?=\n\s*[^%])', 'match', 'once'); % help text +str = regexprep(str, '\r?\n\s*%', '\n'); % remove '\r' and leading % + +dashes = regexp(str, '\n\s*-{1,4}\s+') + 1; % lines starting with 1 to 4 - +if isempty(dashes), disp(str); return; end % Show all help text + +prgrfs = regexp(str, '(\n\s*){2,}'); % blank lines +nTopic = numel(dashes); +topics = ones(1, nTopic+1); +for i = 1:nTopic + ind = regexpi(str(1:dashes(i)), [mfile '\s*\(']); % syntax before ' - ' + if isempty(ind), continue; end % no syntax before ' - ', assume start with 1 + ind = find(prgrfs < ind(end), 1, 'last'); % previous paragraph + if isempty(ind), continue; end + topics(i) = prgrfs(ind) + 1; % start of this topic +end +topics(end) = numel(str); % end of last topic + +cmd = strrep(cmd, '?', ''); % remove ? in case it is in subcmd +if isempty(cmd) % help for main function + disp(str(1:topics(1))); % subfunction list before first topic + return; +end + +expr = [mfile '\s*\(\s*''' cmd '''']; +for i = 1:nTopic + if isempty(regexpi(str(topics(i):dashes(i)), expr, 'once')), continue; end + disp(str(topics(i):topics(i+1))); + return; +end + +fprintf(2, ' Unknown command for %s: %s\n', mfile, cmd); % no cmd found + +%% gunzip bytes in memory if possible. 2nd/3rd input for fallback file gunzip +% Trick: try-block avoid error for partial file unzip. +function bytes = gunzip_mem(gz_bytes, fname, nByte) +bytes = []; +try + bais = java.io.ByteArrayInputStream(gz_bytes); + try, gzis = java.util.zip.GZIPInputStream(bais); %#ok<*NOCOM> + catch, try, gzis = java.util.zip.InflaterInputStream(bais); catch me; end + end + buff = java.io.ByteArrayOutputStream; + try org.apache.commons.io.IOUtils.copy(gzis, buff); catch me; end + gzis.close; + bytes = typecast(buff.toByteArray, 'uint8'); + if isempty(bytes), error(me.message); end +catch + if nargin<3 || isempty(nByte), nByte = inf; end + if nargin<2 || isempty(fname) + fname = [tempname '.gz']; % temp gz file + fid = fopen(fname, 'W'); + if fid<0, return; end + cln = onCleanup(@() deleteFile(fname)); % delete temp gz file + fwrite(fid, gz_bytes, 'uint8'); + fclose(fid); + end + + try %#ok<*TRYNC> + fname = gunzipOS(fname, nByte); + fid = fopen(fname); + bytes = fread(fid, nByte, '*uint8'); + fclose(fid); + deleteFile(fname); % unzipped file + end +end + +%% Delete file in background +function deleteFile(fname) +if ispc, system(['start "" /B del "' fname '"']); +else, system(['rm "' fname '" &']); +end + +%% faster than system: based on https://github.com/avivrosenberg/matlab-jsystem +function [err, out] = jsystem(cmd) +% cmd is cell str, no quotation marks needed for file names with space. +cmd = cellstr(cmd); +try + pb = java.lang.ProcessBuilder(cmd); + pb.redirectErrorStream(true); % ErrorStream to InputStream + process = pb.start(); + scanner = java.util.Scanner(process.getInputStream).useDelimiter('\A'); + if scanner.hasNext(), out = char(scanner.next()); else, out = ''; end + err = process.exitValue; % err = process.waitFor() may hang + if err, error('java.lang.ProcessBuilder error'); end +catch % fallback to system() if java fails like for Octave + cmd = regexprep(cmd, '.+? .+', '"$0"'); % double quotes if with middle space + [err, out] = system(sprintf('%s ', cmd{:}, '2>&1')); +end + +%% Return true if input is char or single string (R2016b+) +function tf = ischar(A) +tf = builtin('ischar', A); +if tf, return; end +if exist('strings', 'builtin'), tf = isstring(A) && numel(A)==1; end +%% \ No newline at end of file diff --git a/ecat_testing/ints/python_sample_bytes.bytes b/ecat_testing/ints/python_sample_bytes.bytes new file mode 100644 index 00000000..68756cd4 Binary files /dev/null and b/ecat_testing/ints/python_sample_bytes.bytes differ diff --git a/ecat_testing/ints/template.variables.env b/ecat_testing/ints/template.variables.env new file mode 100644 index 00000000..a5a3f304 --- /dev/null +++ b/ecat_testing/ints/template.variables.env @@ -0,0 +1,4 @@ +# place paths to real files here and rename this file to variables.env +WELL_BEHAVED_ECAT_FILE= +WUSTL_FBP_ECAT= +WUSTL_OSEM_ECAT= \ No newline at end of file diff --git a/ecat_testing/read_matlab_nii.py b/ecat_testing/read_matlab_nii.py new file mode 100644 index 00000000..0de93b83 --- /dev/null +++ b/ecat_testing/read_matlab_nii.py @@ -0,0 +1,42 @@ +import nibabel +import pathlib +import numpy + +steps_dir = pathlib.Path(__file__).parent / 'steps' + +def first_middle_last_frames_to_text(four_d_array_like_object, output_folder, step_name='_step_name_'): + frames = [0, four_d_array_like_object.shape[-1] // 2, four_d_array_like_object.shape[-1] - 1] + frames_to_record = [] + for f in frames: + frames_to_record.append(four_d_array_like_object[:, :, :, f]) + + # now collect a single 2d slice from the "middle" of the 3d frames in frames_to_record + for index, frame in enumerate(frames_to_record): + numpy.savetxt(output_folder / f"{step_name}_frame_{frames[index]}.tsv", + frames_to_record[index][:, :, frames_to_record[index].shape[2] // 2], + delimiter="\t", fmt='%s') + +path_to_matlab_nii = pathlib.Path('matlab_nii.nii') +path_to_python_nii = pathlib.Path('python_nii.nii.gz') + + +python_nii = nibabel.load(path_to_python_nii) +matlab_nii = nibabel.load(path_to_matlab_nii) + +python_data = python_nii.get_fdata() +matlab_data = matlab_nii.get_fdata() + +# compare the two arrays in each +print(numpy.allclose(python_data, matlab_data, rtol=0.01)) + + +# subtract the two arrays +diff = python_data - matlab_data +print(f"difference max and min: {diff.max()}, {diff.min()}") +print(f"mean difference: {np.sqrt(np.mean(diff ** 2))}") + +# save diff as nii +diff_nii = python_nii.__class__(diff, python_nii.affine, python_nii.header) +nibabel.save(diff_nii, steps_dir / '12_diff_between_written_niis.nii.gz') + +first_middle_last_frames_to_text(diff, steps_dir, step_name='13_diff_between_written_niis') \ No newline at end of file diff --git a/ecat_testing/test_nii_vs_ecat.m b/ecat_testing/test_nii_vs_ecat.m new file mode 100644 index 00000000..dab8da71 --- /dev/null +++ b/ecat_testing/test_nii_vs_ecat.m @@ -0,0 +1,22 @@ +[mh,sh,data] = readECAT7; % read in p9816fvat1_osem.ecat +nii = MRIread('p9816fvat1_osem.nii'); + +figure; imagesc(rot90(fliplr(squeeze(data{10}(:,:,40))))) +figure; imagesc(squeeze(nii.vol(:,:,40,10))) + +ecat = rot90(fliplr(squeeze(data{10}(:,:,40)))); +turku = squeeze(nii.vol(:,:,40,10)); + +val_ecat = ecat(10,6); +val_turku = turku(10,6); + +scale_factor = sh{10}.scale_factor; +calibration_factor = mh.ecat_calibration_factor; + +cf = scale_factor*calibration_factor; + +print(val_ecatcf,val_turku) + +vinci = [12055.6,50513.8,22912.1]; +figure; plot(turku(24:26,65),vinci,'.') +mdl = fitlm(vinci,turku(24:26,65)); \ No newline at end of file diff --git a/matlab/ecat2nii.m b/matlab/ecat2nii.m index e58025d9..a780f21f 100644 --- a/matlab/ecat2nii.m +++ b/matlab/ecat2nii.m @@ -48,6 +48,16 @@ gz = true; % compress nifti savemat = false; % save ecat data as .mat +%% debebugging +% ------------ +ecat_save_steps = getenv('ECAT_SAVE_STEPS'); +ecat_save_steps_dir = mfilename('fullpath'); +if contains(ecat_save_steps_dir, 'LiveEditorEvaluationHelper') + ecat_save_steps_dir = matlab.desktop.editor.getActiveFilename; +end +parts = strsplit(ecat_save_steps_dir, filesep); +ecat_save_steps_dir = strjoin([parts(1:end-2), 'ecat_testing', 'steps'], filesep); + %% check inputs % ------------ @@ -143,7 +153,10 @@ end pet_file = [pet_file ext]; - [mh,sh] = readECAT7([pet_path filesep pet_file]); % loading the whole file here and iterating to flipdim below only minuimally improves time (0.6sec on NRU server) + [mh,sh,data] = readECAT7([pet_path filesep pet_file]); % loading the whole file here and iterating to flipdim below only minuimally improves time (0.6sec on NRU server) + if (ecat_save_steps == '1') + first_middle_last_frames_to_text_cell(data,ecat_save_steps_dir, '6_ecat2nii_matlab'); + end if sh{1}.data_type ~= 6 error('Conversion for 16 bit data only (type 6 in ecat file) - error loading ecat file'); end @@ -166,7 +179,13 @@ Randoms(i) = 0; end end - + + % save debugging steps 6 and 7 + if (ecat_save_steps == '1') + first_middle_last_frames_to_text_cell(data,ecat_save_steps_dir, '6_ecat2nii_matlab'); + first_middle_last_frames_to_text(img_temp,ecat_save_steps_dir, '7_flip_ecat2nii_matlab'); + end + % rescale to 16 bits rg = max(img_temp(:))-min(img_temp(:)); if rg ~= 32767 % same as range(img_temp(:)) but no need of stats toolbox @@ -178,8 +197,22 @@ img_temp = img_temp/MinImg*(-32768); Sca = Sca*MinImg/(-32768); end + + % save scaling factor to file located at ecat_save_steps_dir/8.5_sca_matlab.txt + fid = fopen([ecat_save_steps_dir filesep '8.5_sca_matlab.txt'],'w'); + fprintf(fid,'Scaling factor: %10e\n',Sca); + x = mh.ecat_calibration_factor * Sca; + fprintf(fid,'Scaling factor * ECAT Cal Factor: %10.10f\n',x); + fclose(fid); end - + + + + % save debugging step 8 - rescale to 16 bits + if (ecat_save_steps == '1') + first_middle_last_frames_to_text(img_temp,ecat_save_steps_dir, '8_rescale_to_16_ecat2nii_matlab'); + end + % check names if ~exist('FileListOut','var') [~,pet_filename] = fileparts(pet_file); @@ -215,7 +248,7 @@ end % save raw data - if savemat + if savemat or ecat_save_steps == '1' if mh.calibration_units == 1 % see line 337 ecat = img_temp.*Sca; else @@ -349,6 +382,9 @@ if mh.calibration_units == 1 % do calibrate img_temp = single(round(img_temp).*(Sca*mh.ecat_calibration_factor)); % scale and dose calibrated + if ecat_save_steps == '1' + first_middle_last_frames_to_text(img_temp,ecat_save_steps_dir, '9_scal_cal_units_ecat2nii_matlab'); + end warning('it looks like the source data are not dose calibrated - ecat2nii is thus scaling the data') else % do not calibrate img_temp = single(round(img_temp).*Sca); % just the 16 bit scaling, img_temp is already dose calibrated @@ -428,9 +464,19 @@ if gz fnm = [fnm '.gz']; %#ok<*AGROW> end - + + + if ecat_save_steps == '1' + first_middle_last_frames_to_text(img_temp, ecat_save_steps_dir, '10_save_nii_cat2nii_matlab'); + end nii_tool('save', nii, fnm); FileListOut{j} = fnm; + + if ecat_save_steps == '1' + % open the nifti file just written and check to see if the written values differ from the original + nii = nii_tool('load', fnm); + first_middle_last_frames_to_text(nii.img, ecat_save_steps_dir, '11_read_saved_nii_matlab'); + end % optionally one can use niftiwrite from the Image Processing Toolbox % warning different versions of matlab may provide different nifti results @@ -445,7 +491,7 @@ % end catch conversionerr - FileListOut{j} = sprintf('%s failed to convert:%s',FileListIn{j},conversionerr.message); + FileListOut{j} = sprintf('%s failed to convert:%s',FileListIn{j},conversionerr.message, conversionerr.stack.line); end if exist('newfile','var') % i.e. decompresed .nii.gz diff --git a/matlab/first_middle_last_frames_to_text.m b/matlab/first_middle_last_frames_to_text.m new file mode 100644 index 00000000..18a3b01d --- /dev/null +++ b/matlab/first_middle_last_frames_to_text.m @@ -0,0 +1,32 @@ +function [output_folder, step_name] = first_middle_last_frames_to_text(four_d_array_like_object,output_folder, step_name) +%takes a times series of 3d images and writes out sub sections of that time +%series as 2D frames. Frames are labled as zero indexed to aligne with +%python conventions. Up to 3 frames are selected from the time series +%corresponding to the first, middle, and last frames. +% four_d_array_like_object: input time series +% output_folder: path to write the selected 2D frames to +% step_name: name to apply to output files +% +output_folder = output_folder; +step_name = step_name; +data = four_d_array_like_object; +data_size = size(data); + +% get first, middle, and last frame of data +frames = [ 1, floor(data_size(4)/2) + 1, data_size(4)] +frames_to_record = cell(length(frames)); +for i = 1:length(frames) + frame_number = frames(i); + frame_to_slice = data(:,:,:,frame_number); + size_of_frame_to_slice = size(frame_to_slice) + middle_of_frame = int16(size_of_frame_to_slice(3)/2); + slice = data(:,:,middle_of_frame,frame_number); + frames_to_record{i} = slice; +end + +for i = 1:length(frames_to_record) + frame = frames_to_record{i}; + frame_string = string(frames(i) - 1); + filename = strcat(output_folder, filesep, step_name, '_frame_', frame_string, '.tsv'); + writematrix(frame, filename, 'Delimiter', 'tab', 'FileType', 'text'); +end diff --git a/matlab/first_middle_last_frames_to_text_cell.m b/matlab/first_middle_last_frames_to_text_cell.m new file mode 100644 index 00000000..cc17dd5d --- /dev/null +++ b/matlab/first_middle_last_frames_to_text_cell.m @@ -0,0 +1,39 @@ +function [output_folder, step_name] = first_middle_last_frames_to_text(four_d_array_like_object,output_folder, step_name) +%takes a times series of 3d images and writes out sub sections of that time +%series as 2D frames. Frames are labled as zero indexed to aligne with +%python conventions. Up to 3 frames are selected from the time series +%corresponding to the first, middle, and last frames. +% four_d_array_like_object: input time series +% output_folder: path to write the selected 2D frames to +% step_name: name to apply to output files +% +output_folder = output_folder; +step_name = step_name; +data = four_d_array_like_object; + +% get first, middle, and last frame of data +frames = [ 1, floor(length(data)/2) + 1, length(data)]; +frames_to_record = cell(length(frames)); +for i = 1:length(frames) + frame_number = frames(i); + try + frame_to_slice = data{frame_number}; + catch + frame_to_slice = data(frame_number); + end + size_of_frame_to_slice = size(frame_to_slice); + middle_of_frame = int16(size_of_frame_to_slice(3)/2); + try + slice = data{frame_number}(:,:,middle_of_frame); + catch + slice = data(:,:,middle_of_frame,frame_number); + end + frames_to_record{i} = slice; +end + +for i = 1:length(frames_to_record) + frame = frames_to_record{i}; + frame_string = string(frames(i) - 1); + filename = strcat(output_folder, filesep, step_name, '_frame_', frame_string, '.tsv'); + writematrix(frame, filename, 'Delimiter', 'tab', 'FileType', 'text'); +end diff --git a/matlab/readECAT7.m b/matlab/readECAT7.m index 51ff3e3c..fe365e44 100644 --- a/matlab/readECAT7.m +++ b/matlab/readECAT7.m @@ -55,6 +55,8 @@ fs = []; end +%ecat_save_steps = '1' + if length(fs) == 0 origpwd = pwd; if length(lastpath_) > 0; cd(lastpath_); end @@ -77,7 +79,8 @@ end % open file as ieee big-endian -fid = fopen(fs,'r','ieee-be'); +file_endianess = 'ieee-be'; +fid = fopen(fs,'r', file_endianess); if (fid < 0) error('readECAT: Problem opening file %s', fs); end @@ -86,6 +89,23 @@ % Read in the main header info mh = readMainheader(fid); +ecat_save_steps = getenv('ECAT_SAVE_STEPS'); +ecat_save_steps_dir = mfilename('fullpath'); +if contains(ecat_save_steps_dir, 'LiveEditorEvaluationHelper') + ecat_save_steps_dir = matlab.desktop.editor.getActiveFilename; +end +parts = strsplit(ecat_save_steps_dir, filesep); +ecat_save_steps_dir = strjoin([parts(1:end-2), 'ecat_testing', 'steps'], filesep); + +% save main header output to json +if (ecat_save_steps == '1') + % save main header + main_header_json = (jsonencode(mh, PrettyPrint=true)); + step_1_file = fopen([ecat_save_steps_dir filesep '1_read_mh_ecat_matlab.json'], 'w'); + fprintf(step_1_file, '%s', main_header_json); + fclose(step_1_file); +end + if (nargout == 1) return end @@ -129,6 +149,7 @@ sh = cell(nmat,1); data = cell(nmat,1); +pixel_data_type = ''; % select proper sh routine switch mh.file_type @@ -173,15 +194,19 @@ switch sh{m}.data_type case 5 % IEEE float (32 bit) data{m} = fread(fid, [sz(1)*sz(2) sz(3)],'float32'); + pixel_data_type = 'float32'; case 6 % SUN int16 if (matlabVersion(1) < 6) warning('old matlab version, using int16 to read') data{m} = int16(fread(fid, [sz(1)*sz(2) sz(3)],'int16')); + pixel_data_type = 'int16'; else data{m} = fread(fid, [sz(1)*sz(2) sz(3)],'int16=>int16'); + pixel_data_type = 'int16=>int16'; end otherwise warning('readECAT7: unrecognized data type'); + pixel_data_type = 'unrecognized data type'; end % other sh{m}.data_type: 2 = VAX int16, 3 = VAX int32, 4 = VAX F-float (32 bit), 7 = SUN int32 @@ -196,6 +221,31 @@ end % loop over matrices fclose(fid); +if (ecat_save_steps == '1') + % save subheaders header + sub_header_json = (jsonencode(sh, PrettyPrint=true)); + step_2_file = fopen([ecat_save_steps_dir filesep '1_read_sh_ecat_matlab.json'], 'w'); + fprintf(step_2_file, '%s', sub_header_json); + fclose(step_2_file); + + % write out endianess and datatype of the pixel data matrix + step_3_struct = {}; + x.data_type = class(data{1}); + x.endianess = file_endianess; + step_3_json = (jsonencode(x, PrettyPrint=true)); + step_3_file = fopen([ecat_save_steps_dir filesep '3_determine_data_type_matlab.json'], 'w'); + fprintf(step_3_file, '%s', step_3_json); + fclose(step_3_file); + + first_middle_last_frames_to_text_cell(data, ecat_save_steps_dir, '4_read_img_ecat_matlab') + + % scale if calibrated + % data has already been saved in the previous step (step 4) but we go ahead and do this step once more because the + % numbering system that's been imposed upon us to test these outputs + if calibrated ~0 + first_middle_last_frames_to_text_cell(data, ecat_save_steps_dir, '5_scale_img_ecat_matlab') + end +end return diff --git a/pypet2bids/poetry.lock b/pypet2bids/poetry.lock index c7405fdc..f0e7fba1 100644 --- a/pypet2bids/poetry.lock +++ b/pypet2bids/poetry.lock @@ -207,13 +207,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -221,13 +221,13 @@ test = ["pytest (>=6)"] [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -243,13 +243,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.2" +version = "7.1.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"}, - {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"}, + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, ] [package.dependencies] @@ -258,17 +258,17 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" -version = "6.3.2" +version = "6.4.0" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.3.2-py3-none-any.whl", hash = "sha256:f41f4098b16cd140a97d256137cfd943d958219007990b2afb00439fc623f580"}, - {file = "importlib_resources-6.3.2.tar.gz", hash = "sha256:963eb79649252b0160c1afcfe5a1d3fe3ad66edd0a8b114beacffb70c0674223"}, + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, ] [package.dependencies] @@ -276,7 +276,7 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.collections", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -308,13 +308,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "joblib" -version = "1.3.2" +version = "1.4.0" description = "Lightweight pipelining with Python functions" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, - {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, + {file = "joblib-1.4.0-py3-none-any.whl", hash = "sha256:42942470d4062537be4d54c83511186da1fc14ba354961a2114da91efa9a4ed7"}, + {file = "joblib-1.4.0.tar.gz", hash = "sha256:1eb0dc091919cd384490de890cb5dfd538410a6d4b3b54eef09fb8c50b409b1c"}, ] [[package]] @@ -578,13 +578,13 @@ tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -655,13 +655,13 @@ hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"] [[package]] name = "pyinstaller-hooks-contrib" -version = "2024.3" +version = "2024.5" description = "Community maintained hooks for PyInstaller" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstaller-hooks-contrib-2024.3.tar.gz", hash = "sha256:d18657c29267c63563a96b8fc78db6ba9ae40af6702acb2f8c871df12c75b60b"}, - {file = "pyinstaller_hooks_contrib-2024.3-py2.py3-none-any.whl", hash = "sha256:6701752d525e1f4eda1eaec2c2affc206171e15c7a4e188a152fcf3ed3308024"}, + {file = "pyinstaller_hooks_contrib-2024.5-py2.py3-none-any.whl", hash = "sha256:0852249b7fb1e9394f8f22af2c22fa5294c2c0366157969f98c96df62410c4c6"}, + {file = "pyinstaller_hooks_contrib-2024.5.tar.gz", hash = "sha256:aa5dee25ea7ca317ad46fa16b5afc8dba3b0e43f2847e498930138885efd3cab"}, ] [package.dependencies] @@ -789,56 +789,56 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "scipy" -version = "1.10.1" +version = "1.9.3" description = "Fundamental algorithms for scientific computing in Python" optional = false -python-versions = "<3.12,>=3.8" +python-versions = ">=3.8" files = [ - {file = "scipy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019"}, - {file = "scipy-1.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e"}, - {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f"}, - {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2"}, - {file = "scipy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1"}, - {file = "scipy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd"}, - {file = "scipy-1.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5"}, - {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35"}, - {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d"}, - {file = "scipy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f"}, - {file = "scipy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35"}, - {file = "scipy-1.10.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88"}, - {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1"}, - {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f"}, - {file = "scipy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415"}, - {file = "scipy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9"}, - {file = "scipy-1.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6"}, - {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353"}, - {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601"}, - {file = "scipy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea"}, - {file = "scipy-1.10.1.tar.gz", hash = "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5"}, + {file = "scipy-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1884b66a54887e21addf9c16fb588720a8309a57b2e258ae1c7986d4444d3bc0"}, + {file = "scipy-1.9.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:83b89e9586c62e787f5012e8475fbb12185bafb996a03257e9675cd73d3736dd"}, + {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a72d885fa44247f92743fc20732ae55564ff2a519e8302fb7e18717c5355a8b"}, + {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01e1dd7b15bd2449c8bfc6b7cc67d630700ed655654f0dfcf121600bad205c9"}, + {file = "scipy-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:68239b6aa6f9c593da8be1509a05cb7f9efe98b80f43a5861cd24c7557e98523"}, + {file = "scipy-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b41bc822679ad1c9a5f023bc93f6d0543129ca0f37c1ce294dd9d386f0a21096"}, + {file = "scipy-1.9.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:90453d2b93ea82a9f434e4e1cba043e779ff67b92f7a0e85d05d286a3625df3c"}, + {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c06e62a390a9167da60bedd4575a14c1f58ca9dfde59830fc42e5197283dab"}, + {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abaf921531b5aeaafced90157db505e10345e45038c39e5d9b6c7922d68085cb"}, + {file = "scipy-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:06d2e1b4c491dc7d8eacea139a1b0b295f74e1a1a0f704c375028f8320d16e31"}, + {file = "scipy-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a04cd7d0d3eff6ea4719371cbc44df31411862b9646db617c99718ff68d4840"}, + {file = "scipy-1.9.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:545c83ffb518094d8c9d83cce216c0c32f8c04aaf28b92cc8283eda0685162d5"}, + {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d54222d7a3ba6022fdf5773931b5d7c56efe41ede7f7128c7b1637700409108"}, + {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff3a5295234037e39500d35316a4c5794739433528310e117b8a9a0c76d20fc"}, + {file = "scipy-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:2318bef588acc7a574f5bfdff9c172d0b1bf2c8143d9582e05f878e580a3781e"}, + {file = "scipy-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d644a64e174c16cb4b2e41dfea6af722053e83d066da7343f333a54dae9bc31c"}, + {file = "scipy-1.9.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:da8245491d73ed0a994ed9c2e380fd058ce2fa8a18da204681f2fe1f57f98f95"}, + {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4db5b30849606a95dcf519763dd3ab6fe9bd91df49eba517359e450a7d80ce2e"}, + {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c68db6b290cbd4049012990d7fe71a2abd9ffbe82c0056ebe0f01df8be5436b0"}, + {file = "scipy-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:5b88e6d91ad9d59478fafe92a7c757d00c59e3bdc3331be8ada76a4f8d683f58"}, + {file = "scipy-1.9.3.tar.gz", hash = "sha256:fbc5c05c85c1a02be77b1ff591087c83bc44579c6d2bd9fb798bb64ea5e1a027"}, ] [package.dependencies] -numpy = ">=1.19.5,<1.27.0" +numpy = ">=1.18.5,<1.26.0" [package.extras] -dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"] -doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] -test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +dev = ["flake8", "mypy", "pycodestyle", "typing_extensions"] +doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-panels (>=0.5.2)", "sphinx-tabs"] +test = ["asv", "gmpy2", "mpmath", "pytest", "pytest-cov", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "setuptools" -version = "69.2.0" +version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1107,5 +1107,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" -python-versions = ">=3.8,<=3.11" -content-hash = "e11469c3f7c69bb302f1303d8ba5eb39ac46d6de982f19feb10767a3fd484f1c" +python-versions = ">=3.8,<=3.12" +content-hash = "d5e5bf221e17dd550fa3411f510dc5df546aac9dd2b97a40e98fd2cd8fe834c2" diff --git a/pypet2bids/pypet2bids/dcm2niix4pet.py b/pypet2bids/pypet2bids/dcm2niix4pet.py index a3b3956d..d68a3c98 100644 --- a/pypet2bids/pypet2bids/dcm2niix4pet.py +++ b/pypet2bids/pypet2bids/dcm2niix4pet.py @@ -1063,7 +1063,9 @@ def cli(): help="Provide a reconstruction method to be used in the output file name", ) parser.add_argument( - "--version", '-v', action='version', + "--version", + "-v", + action="version", version=f"{helper_functions.get_version()}", ) diff --git a/pypet2bids/pypet2bids/ecat.py b/pypet2bids/pypet2bids/ecat.py index e3b36dec..a900dbd1 100644 --- a/pypet2bids/pypet2bids/ecat.py +++ b/pypet2bids/pypet2bids/ecat.py @@ -290,7 +290,7 @@ def populate_sidecar(self, **kwargs): "ISOTOPE_NAME", None ) self.sidecar_template["PharmaceuticalName"] = self.ecat_header.get( - "RADIOPHARAMCEUTICAL", None + "RADIOPHARMACEUTICAL", None ) # collect frame time start and populate various subheader fields @@ -340,9 +340,19 @@ def populate_sidecar(self, **kwargs): # collect dose start time dose_start_time = self.ecat_header.get("DOSE_START_TIME", None) - if dose_start_time: - parsed_dose_time = parse_this_date(dose_start_time) - self.sidecar_template["PharmaceuticalDoseTime"] = parsed_dose_time + if dose_start_time is not None: + if ( + dose_start_time != 0 + and dose_start_time != "0" + and dose_start_time != 0.0 + ): + dose_start_time = parse_this_date(dose_start_time) + parsed_dose_time = parse_this_date(dose_start_time) + self.sidecar_template["PharmaceuticalDoseTime"] = parsed_dose_time + self.sidecar_template["InjectionStart"] = parsed_dose_time + else: + self.sidecar_template["PharmaceuticalDoseTime"] = int(dose_start_time) + self.sidecar_template["InjectionStart"] = int(dose_start_time) # if decay correction exists mark decay correction boolean as true if len(self.decay_factors) > 0: @@ -397,6 +407,22 @@ def populate_sidecar(self, **kwargs): "AcquisitionTime" ] + # set scan start and pharmaceutical dose time relative to time zero. + times_make_relative = ["ScanStart", "PharmaceuticalDoseTime", "InjectionStart"] + time_zero_datetime = datetime.datetime.strptime( + self.sidecar_template.get("TimeZero"), "%H:%M:%S" + ) + for t in times_make_relative: + t_value = self.sidecar_template.get(t) + # sometimes we start with 0 or time in seconds, we first check for this + try: + int(t_value) + self.sidecar_template[t] = t_value + except ValueError: + t_datetime = datetime.datetime.strptime(t_value, "%H:%M:%S") + time_diff = t_datetime - time_zero_datetime + self.sidecar_template[t] = time_diff.total_seconds() + # clear any nulls from json sidecar and replace with none's self.sidecar_template = helper_functions.replace_nones(self.sidecar_template) @@ -414,7 +440,7 @@ def prune_sidecar(self): full_fields = list(self.sidecar_template) exclude_list = [] for field, value in self.sidecar_template.items(): - if value: + if value is not None and value != "": # check to make sure value isn't a list of null types # e.g. if value = [None, None, None] we don't want to include it. if type(value) is list: diff --git a/pypet2bids/pypet2bids/ecat2nii.py b/pypet2bids/pypet2bids/ecat2nii.py index 0448f565..fa187088 100644 --- a/pypet2bids/pypet2bids/ecat2nii.py +++ b/pypet2bids/pypet2bids/ecat2nii.py @@ -10,7 +10,10 @@ import nibabel import numpy import pathlib -from pypet2bids.read_ecat import read_ecat +from pypet2bids.read_ecat import ( + read_ecat, + code_dir, +) # we use code_dir for ecat debugging import os import pickle import logging @@ -21,7 +24,15 @@ import pypet2bids.helper_functions as helper_functions -# logger = helper_functions.logger('pypet2bids') +# debug variable +# save steps for debugging, for mor infor see ecat_testing/README.md +ecat_save_steps = os.environ.get("ECAT_SAVE_STEPS", 0) +if ecat_save_steps == "1": + # check to see if the code directory is available, if it's not create it and + # the steps dir to save outputs created if ecat_save_steps is set to 1 + steps_dir = code_dir.parent / "ecat_testing" / "steps" + if not steps_dir.is_dir(): + os.makedirs(code_dir.parent / "ecat_testing" / "steps", exist_ok=True) logger = logging.getLogger("pypet2bids") @@ -97,11 +108,39 @@ def ecat2nii( f"type(ecat_pixel_data)={type(ecat_pixel_data)} instead." ) + # debug step #6 view data as passed to ecat2nii method + if ecat_save_steps == "1": + # collect only the first, middle, and last frames from pixel_data_matrix_4d as first, middle, and last + # frames are typically the most interesting + frames = [0, len(data) // 2, -1] + frames_to_record = [] + for f in frames: + frames_to_record.append(data[:, :, :, f]) + + # now collect a single 2d slice from the "middle" of the 3d frames in frames_to_record + slice_to_record = [] + for index, frame in enumerate(frames_to_record): + numpy.savetxt( + steps_dir / f"6_ecat2nii_python_{index}.tsv", + frames_to_record[index][:, :, frames_to_record[index].shape[2] // 2], + delimiter="\t", + fmt="%s", + ) + + helper_functions.first_middle_last_frames_to_text( + four_d_array_like_object=data, + output_folder=steps_dir, + step_name="6_ecat2nii_python", + ) + + # set the byte order and the pixel data type from the input array + pixel_data_type = data.dtype + # check for TimeZero supplied via kwargs if kwargs.get("TimeZero", None): TimeZero = kwargs["TimeZero"] else: - logger.warn( + logger.warning( "Metadata TimeZero is missing -- set to ScanStart or empty to use the scanning time as " "injection time" ) @@ -133,7 +172,7 @@ def ecat2nii( sub_headers[0]["Z_DIMENSION"], main_header["NUM_FRAMES"], ), - dtype=numpy.dtype(">f4"), + dtype=">f4", ) # collect timing information @@ -147,13 +186,10 @@ def ecat2nii( range(img_shape[3]) ): # Don't throw stones working from existing matlab code print(f"Loading frame {index + 1}") - # save out our slice of data before flip to a text file to compare w/ matlab data img_temp[:, :, :, index] = numpy.flip( numpy.flip( numpy.flip( - data[:, :, :, index].astype(numpy.dtype(">f4")) - * sub_headers[index]["SCALE_FACTOR"], - 1, + data[:, :, :, index] * sub_headers[index]["SCALE_FACTOR"], 1 ), 2, ), @@ -179,13 +215,59 @@ def ecat2nii( prompts.append(0) randoms.append(0) + # debug step #7 view data after flipping into nifti space/orientation + if ecat_save_steps == "1": + helper_functions.first_middle_last_frames_to_text( + four_d_array_like_object=img_temp, + output_folder=steps_dir, + step_name="7_flip_ecat2nii_python", + ) + + # so the only real difference between the matlab code and the python code is that that we aren't manually + # scaling the date to 16 bit integers. + rg = img_temp.max() - img_temp.min() + if rg != 32767: + max_img = img_temp.max() + img_temp = img_temp / max_img * 32767 + sca = max_img / 32767 + min_img = img_temp.min() + if min_img < -32768: + img_temp = img_temp / (min_img * -32768) + sca = sca * (min_img * -32768) + if ecat_save_steps == "1": + with open(os.path.join(steps_dir, "8.5_sca.txt"), "w") as sca_file: + sca_file.write(f"Scaling factor: {sca}\n") + sca_file.write( + f"Scaling factor * ECAT Cal Factor: {sca * main_header['ECAT_CALIBRATION_FACTOR']}\n" + ) + + # scale image to 16 bit + final_image = img_temp.astype(numpy.single) + + # debug step 8 check after "rescaling" to 16 bit + if ecat_save_steps == "1": + helper_functions.first_middle_last_frames_to_text( + four_d_array_like_object=final_image, + output_folder=steps_dir, + step_name="8_rescale_to_16_ecat2nii_python", + ) + ecat_cal_units = main_header[ "CALIBRATION_UNITS" ] # Header field designating whether data has already been calibrated if ecat_cal_units == 1: # Calibrate if it hasn't been already - final_image = img_temp * main_header["ECAT_CALIBRATION_FACTOR"] + final_image = ( + numpy.round(final_image) * main_header["ECAT_CALIBRATION_FACTOR"] * sca + ) + # this debug step may not execute if we're not calibrating the scan, but that's okay + if ecat_save_steps == "1": + helper_functions.first_middle_last_frames_to_text( + four_d_array_like_object=final_image, + output_folder=steps_dir, + step_name="9_scal_cal_units_ecat2nii_python", + ) else: # And don't calibrate if CALIBRATION_UNITS is anything else but 1 - final_image = img_temp + final_image = numpy.round(final_image) * sca qoffset_x = -1 * ( ( @@ -210,90 +292,49 @@ def ecat2nii( # build affine if it's not included in function call if not affine: - t = numpy.identity(4) - t[0, 0] = sub_headers[0]["X_PIXEL_SIZE"] * 10 - t[1, 1] = sub_headers[0]["Y_PIXEL_SIZE"] * 10 - t[2, 2] = sub_headers[0]["Z_PIXEL_SIZE"] * 10 - - t[3, 0] = qoffset_x - t[3, 1] = qoffset_y - t[3, 2] = qoffset_z - - # note this affine is the transform of of a nibabel ecat object's affine - affine = t + mat = ( + numpy.diag( + [ + sub_headers[0]["X_PIXEL_SIZE"], + sub_headers[0]["Y_PIXEL_SIZE"], + sub_headers[0]["Z_PIXEL_SIZE"], + ] + ) + * 10 + ) + affine = nibabel.affines.from_matvec(mat, [qoffset_x, qoffset_y, qoffset_z]) img_nii = nibabel.Nifti1Image(final_image, affine=affine) - # populating nifti header - if img_nii.header["sizeof_hdr"] != 348: - img_nii.header["sizeof_hdr"] = 348 - # img_nii.header['dim_info'] is populated on object creation - # img_nii.header['dim'] is populated on object creation - img_nii.header["intent_p1"] = 0 - img_nii.header["intent_p2"] = 0 - img_nii.header["intent_p3"] = 0 - # img_nii.header['datatype'] # created on invocation seems to be 16 or int16 - # img_nii.header['bitpix'] # also automatically created and inferred 32 as of testing w/ cimbi dataset - # img_nii.header['slice_type'] # defaults to 0 - # img_nii.header['pixdim'] # appears as 1d array of length 8 we rescale this - img_nii.header["pixdim"] = numpy.array( - [ - 1, - sub_headers[0]["X_PIXEL_SIZE"] * 10, - sub_headers[0]["Y_PIXEL_SIZE"] * 10, - sub_headers[0]["Z_PIXEL_SIZE"] * 10, - 0, - 0, - 0, - 0, - ] - ) - img_nii.header["vox_offset"] = 352 - - # TODO img_nii.header['scl_slope'] # this is a NaN array by default but apparently it should be the dose calibration - # factor img_nii.header['scl_inter'] # defaults to NaN array - img_nii.header["scl_slope"] = 0 - img_nii.header["scl_inter"] = 0 - img_nii.header["slice_end"] = 0 - img_nii.header["slice_code"] = 0 - img_nii.header["xyzt_units"] = 10 - img_nii.header["cal_max"] = final_image.min() - img_nii.header["cal_min"] = final_image.max() - img_nii.header["slice_duration"] = 0 - img_nii.header["toffset"] = 0 + # debug step 10, check to see what's happened after we've converted our numpy array in to a nibabel object + if ecat_save_steps == "1": + helper_functions.first_middle_last_frames_to_text( + four_d_array_like_object=img_nii.dataobj, + output_folder=steps_dir, + step_name="10_save_nii_ecat2nii_python", + ) + + img_nii.header.set_slope_inter(slope=1, inter=0) + img_nii.header.set_xyzt_units("mm", "sec") + img_nii.header.set_qform(affine, code=1) + img_nii.header.set_sform(affine, code=1) + # No setter methods for these + img_nii.header["cal_max"] = final_image.max() + img_nii.header["cal_min"] = final_image.min() img_nii.header["descrip"] = "OpenNeuroPET ecat2nii.py conversion" - # img_nii.header['aux_file'] # ignoring as this is set to '' in matlab - img_nii.header["qform_code"] = 0 - img_nii.header["sform_code"] = 1 # 0: Arbitrary coordinates; - # 1: Scanner-based anatomical coordinates; - # 2: Coordinates aligned to another file's, or to anatomical "truth" (co-registration); - # 3: Coordinates aligned to Talairach-Tournoux Atlas; 4: MNI 152 normalized coordinates - - img_nii.header["quatern_b"] = 0 - img_nii.header["quatern_c"] = 0 - img_nii.header["quatern_d"] = 0 - # Please explain this - img_nii.header["qoffset_x"] = qoffset_x - img_nii.header["qoffset_y"] = qoffset_y - img_nii.header["qoffset_z"] = qoffset_z - img_nii.header["srow_x"] = numpy.array( - [sub_headers[0]["X_PIXEL_SIZE"] * 10, 0, 0, img_nii.header["qoffset_x"]] - ) - img_nii.header["srow_y"] = numpy.array( - [0, sub_headers[0]["Y_PIXEL_SIZE"] * 10, 0, img_nii.header["qoffset_y"]] - ) - img_nii.header["srow_z"] = numpy.array( - [0, 0, sub_headers[0]["Z_PIXEL_SIZE"] * 10, img_nii.header["qoffset_z"]] - ) - img_nii.header["intent_name"] = "" - img_nii.header["magic"] = "n + 1 " + nibabel.save(img_nii, nifti_file) - # nifti header items to include - img_nii.header.set_xyzt_units("mm", "unknown") + # run step 11 in debug + if ecat_save_steps == "1": + # load nifti file with nibabel + written_img_nii = nibabel.load(nifti_file) - # save nifti - nibabel.save(img_nii, nifti_file) + helper_functions.first_middle_last_frames_to_text( + four_d_array_like_object=written_img_nii.dataobj, + output_folder=steps_dir, + step_name="11_read_saved_nii_python", + ) # used for testing veracity of nibabel read and write. if save_binary: diff --git a/pypet2bids/pypet2bids/ecat_cli.py b/pypet2bids/pypet2bids/ecat_cli.py index 5eccfde1..5c6b831f 100644 --- a/pypet2bids/pypet2bids/ecat_cli.py +++ b/pypet2bids/pypet2bids/ecat_cli.py @@ -185,7 +185,9 @@ def cli(): "path/to/metadata.xlsx", ) parser.add_argument( - "--version", '-v', action='version', + "--version", + "-v", + action="version", version=f"{helper_functions.get_version()}", ) diff --git a/pypet2bids/pypet2bids/helper_functions.py b/pypet2bids/pypet2bids/helper_functions.py index e64fb7b4..8806a26a 100644 --- a/pypet2bids/pypet2bids/helper_functions.py +++ b/pypet2bids/pypet2bids/helper_functions.py @@ -998,7 +998,9 @@ def replace_nones(dictionary): return json.loads(json_fixed) -def check_pet2bids_config(variable: str = "DCM2NIIX_PATH", config_path=Path.home() / ".pet2bidsconfig"): +def check_pet2bids_config( + variable: str = "DCM2NIIX_PATH", config_path=Path.home() / ".pet2bidsconfig" +): """ Checks the config file at users home /.pet2bidsconfig for the variable passed in, defaults to checking for DCM2NIIX_PATH. However, we can use it for anything we like, @@ -1027,6 +1029,14 @@ def check_pet2bids_config(variable: str = "DCM2NIIX_PATH", config_path=Path.home f"Found {variable} in environment variables as {variable_value}, but no .pet2bidsconfig file found at {pypet2bids_config}" ) if variable == "DCM2NIIX_PATH": + # check to see if dcm2niix is on the path at all + check_on_path = subprocess.run( + "dcm2niix -h", + shell=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + if variable_value: # check dcm2niix path exists dcm2niix_path = Path(variable_value) @@ -1038,6 +1048,16 @@ def check_pet2bids_config(variable: str = "DCM2NIIX_PATH", config_path=Path.home ) if check.returncode == 0: return dcm2niix_path + elif ( + not variable_value + and pypet2bids_config.exists() + and check_on_path.returncode == 0 + ): + # do nothing + # log.info( + # f"DCM2NIIX_PATH not found in {pypet2bids_config}, but dcm2niix is on $PATH." + # ) + return None else: log.error( f"Unable to locate dcm2niix executable at {dcm2niix_path.__str__()}" @@ -1100,3 +1120,25 @@ def hash_fields(**fields): hash_hex = hashlib.md5(hash_string.encode("utf-8")).hexdigest() return f"{hash_return_string}{hash_hex}" + + +def first_middle_last_frames_to_text( + four_d_array_like_object, output_folder, step_name="_step_name_" +): + frames = [ + 0, + four_d_array_like_object.shape[-1] // 2, + four_d_array_like_object.shape[-1] - 1, + ] + frames_to_record = [] + for f in frames: + frames_to_record.append(four_d_array_like_object[:, :, :, f]) + + # now collect a single 2d slice from the "middle" of the 3d frames in frames_to_record + for index, frame in enumerate(frames_to_record): + numpy.savetxt( + output_folder / f"{step_name}_frame_{frames[index]}.tsv", + frames_to_record[index][:, :, frames_to_record[index].shape[2] // 2], + delimiter="\t", + fmt="%s", + ) diff --git a/pypet2bids/pypet2bids/is_pet.py b/pypet2bids/pypet2bids/is_pet.py index a1d5f799..08dbfb46 100644 --- a/pypet2bids/pypet2bids/is_pet.py +++ b/pypet2bids/pypet2bids/is_pet.py @@ -204,7 +204,9 @@ def pet_folder(folder_path: Path, skim=False, njobs=2) -> Union[str, list, bool] all_files = smaller_list # check if any files are pet files - files = read_files_in_parallel(all_files, pet_file, n_jobs=njobs, return_only_path=True) + files = read_files_in_parallel( + all_files, pet_file, n_jobs=njobs, return_only_path=True + ) files = [Path(f) for f in files if f is not None] # check through list of pet files and statuses for True values in parallel folders = set([f.parent for f in files]) @@ -268,7 +270,9 @@ def main(): sys.exit(1) elif args.filepath.is_dir(): - pet_folders = pet_folder(args.filepath.resolve(), skim=args.skim, njobs=args.njobs) + pet_folders = pet_folder( + args.filepath.resolve(), skim=args.skim, njobs=args.njobs + ) if len(pet_folders) > 0: for f in pet_folders: print(f"{f}") diff --git a/pypet2bids/pypet2bids/read_ecat.py b/pypet2bids/pypet2bids/read_ecat.py index 5ee6a736..ecae98f3 100644 --- a/pypet2bids/pypet2bids/read_ecat.py +++ b/pypet2bids/pypet2bids/read_ecat.py @@ -22,12 +22,22 @@ import pathlib import re import numpy -from pypet2bids.helper_functions import decompress +from pypet2bids.helper_functions import decompress, first_middle_last_frames_to_text parent_dir = pathlib.Path(__file__).parent.resolve() code_dir = parent_dir.parent data_dir = code_dir.parent +# debug variable +# save steps for debugging for more info see ecat_testing/README.md +ecat_save_steps = os.environ.get("ECAT_SAVE_STEPS", 0) +if ecat_save_steps == "1": + # check to see if the code directory is available, if it's not create it and + # the steps dir to save outputs created if ecat_save_steps is set to 1 + steps_dir = code_dir.parent / "ecat_testing" / "steps" + if not steps_dir.is_dir(): + os.makedirs(code_dir.parent / "ecat_testing" / "steps", exist_ok=True) + # collect ecat header maps, this program will not work without these as ECAT data varies in the byte location of its # data depending on the version of ECAT it was formatted with. @@ -95,7 +105,6 @@ def collect_specific_bytes( :param bytes_object: an opened bytes object :param start_position: the position to start to read at :param width: how far to read from the start position - :param relative_to: position relative to 0 -> start of file/object, 1 -> current position of seek head, 2 -> end of file/object :return: the bytes starting at position """ @@ -270,6 +279,9 @@ def read_ecat( for entry, dictionary in ecat_header_maps["ecat_headers"].items(): possible_ecat_headers[entry] = dictionary["mainheader"] + # set byte order + byte_order = ">" + confirmed_version = None for version, dictionary in possible_ecat_headers.items(): try: @@ -288,6 +300,10 @@ def read_ecat( ecat_main_header = ecat_header_maps["ecat_headers"][confirmed_version]["mainheader"] main_header, read_to = get_header_data(ecat_main_header, ecat_file) + + if ecat_save_steps == "1": + with open(steps_dir / "1_read_mh_ecat_python.json", "w") as outfile: + json.dump(main_header, outfile) """ Some notes about the file directory/sorted directory: @@ -405,7 +421,8 @@ def read_ecat( formatting = ">f4" pixel_data_type = numpy.dtype(formatting) elif dt_val == 6: - pixel_data_type = ">H" + # >H is unsigned short e.g. >u2, reverting to int16 e.g. i2 to align with commit 9beee53 + pixel_data_type = ">i2" else: raise ValueError( f"Unable to determine pixel data type from value: {dt_val} extracted from {subheader}" @@ -416,6 +433,17 @@ def read_ecat( dtype=pixel_data_type, count=image_size[0] * image_size[1] * image_size[2], ).reshape(*image_size, order="F") + # pixel_data_matrix_3d.newbyteorder(byte_order) + + if ecat_save_steps == "1": + with open( + steps_dir / f"4_determine_data_type_python.json", "w" + ) as outfile: + json.dump( + {"DATA_TYPE": dt_val, "formatting": pixel_data_type}, + outfile, + ) + else: raise Exception( f"Unable to determine frame image size, unsupported image type {subheader_type_number}" @@ -437,6 +465,10 @@ def read_ecat( subheaders.append(subheader) + if ecat_save_steps == "1": + with open(steps_dir / f"2_read_sh_ecat_python.json", "w") as outfile: + json.dump({"subheaders": subheaders}, outfile) + if collect_pixel_data: # return 4d array instead of list of 3d arrays pixel_data_matrix_4d = numpy.zeros( @@ -445,6 +477,32 @@ def read_ecat( for index, frame in enumerate(data): pixel_data_matrix_4d[:, :, :, index] = frame + if ecat_save_steps == "1": + + # write out the endianess and datatype of the pixel data matrix + step_3_dict = { + "datatype": pixel_data_matrix_4d.dtype.name, + "endianness": pixel_data_matrix_4d.dtype.byteorder, + } + + with open(steps_dir / f"3_determine_data_type_python.json", "w") as outfile: + json.dump(step_3_dict, outfile) + + # record out step 4 + first_middle_last_frames_to_text( + four_d_array_like_object=pixel_data_matrix_4d, + output_folder=steps_dir, + step_name="4_read_img_ecat_python", + ) + + # record out step 5 + if calibrated: + first_middle_last_frames_to_text( + four_d_array_like_object=calibrated_pixel_data_matrix_3d, + output_folder=steps_dir, + step_name="5_scale_img_ecat_python", + ) + else: pixel_data_matrix_4d = None diff --git a/pypet2bids/pyproject.toml b/pypet2bids/pyproject.toml index 30955951..bc8e236f 100644 --- a/pypet2bids/pyproject.toml +++ b/pypet2bids/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pypet2bids" -version = "1.3.10" +version = "1.3.12" description = "A python library for converting PET imaging and blood data to BIDS." authors = ["anthony galassi <28850131+bendhouseart@users.noreply.github.com>"] license = "MIT" @@ -14,7 +14,7 @@ repository = "https://github.com/openneuropet/pet2bids/" readme = "README.md" [tool.poetry.dependencies] -python = ">=3.8,<=3.11" +python = ">=3.8,<=3.12" nibabel = ">=3.2.1" numpy = "^1.21.3" pyparsing = "^3.0.4" diff --git a/scripts/python_conversions.sh b/scripts/python_conversions.sh index f8429e24..5cad4373 100644 --- a/scripts/python_conversions.sh +++ b/scripts/python_conversions.sh @@ -9,7 +9,6 @@ # --kwargs accepts arguments passed to in in the form of JS or Python types: int, float, string, list/array. Where lists/arrays should be # wrapped in double quotes. - # set paths where the repo is parent_path=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) repo_path=$( cd "$(dirname "${parent_path}")" ; pwd -P )