BUG: Use manager to set title
1 |
# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
|
|
2 |
# vi: set ft=python sts=4 ts=4 sw=4 et:
|
|
3 |
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
|
|
4 |
#
|
|
5 |
# See COPYING file distributed along with the NiBabel package for the
|
|
6 |
# copyright and license terms.
|
|
7 |
#
|
|
8 |
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
|
|
9 | 33 |
""" Read / write access to NIfTI1 image format
|
10 |
|
|
11 |
NIfTI1 format defined at http://nifti.nimh.nih.gov/nifti-1/
|
|
12 |
"""
|
|
13 | 33 |
import warnings |
14 | 33 |
from io import BytesIO |
15 |
|
|
16 | 33 |
import numpy as np |
17 | 33 |
import numpy.linalg as npl |
18 | 33 |
from numpy.compat.py3k import asstr |
19 |
|
|
20 | 33 |
from .filebasedimages import SerializableImage |
21 | 33 |
from .volumeutils import Recoder, make_dt_codes, endian_codes |
22 | 33 |
from .spatialimages import HeaderDataError, ImageFileError |
23 | 33 |
from .batteryrunners import Report |
24 | 33 |
from .quaternions import fillpositive, quat2mat, mat2quat |
25 | 33 |
from . import analyze # module import |
26 | 33 |
from .spm99analyze import SpmAnalyzeHeader |
27 | 33 |
from .casting import have_binary128 |
28 | 33 |
from .pydicom_compat import have_dicom, pydicom as pdcm |
29 |
|
|
30 |
# nifti1 flat header definition for Analyze-like first 348 bytes
|
|
31 |
# first number in comments indicates offset in file header in bytes
|
|
32 | 33 |
header_dtd = [ |
33 |
('sizeof_hdr', 'i4'), # 0; must be 348 |
|
34 |
('data_type', 'S10'), # 4; unused |
|
35 |
('db_name', 'S18'), # 14; unused |
|
36 |
('extents', 'i4'), # 32; unused |
|
37 |
('session_error', 'i2'), # 36; unused |
|
38 |
('regular', 'S1'), # 38; unused |
|
39 |
('dim_info', 'u1'), # 39; MRI slice ordering code |
|
40 |
('dim', 'i2', (8,)), # 40; data array dimensions |
|
41 |
('intent_p1', 'f4'), # 56; first intent parameter |
|
42 |
('intent_p2', 'f4'), # 60; second intent parameter |
|
43 |
('intent_p3', 'f4'), # 64; third intent parameter |
|
44 |
('intent_code', 'i2'), # 68; NIFTI intent code |
|
45 |
('datatype', 'i2'), # 70; it's the datatype |
|
46 |
('bitpix', 'i2'), # 72; number of bits per voxel |
|
47 |
('slice_start', 'i2'), # 74; first slice index |
|
48 |
('pixdim', 'f4', (8,)), # 76; grid spacings (units below) |
|
49 |
('vox_offset', 'f4'), # 108; offset to data in image file |
|
50 |
('scl_slope', 'f4'), # 112; data scaling slope |
|
51 |
('scl_inter', 'f4'), # 116; data scaling intercept |
|
52 |
('slice_end', 'i2'), # 120; last slice index |
|
53 |
('slice_code', 'u1'), # 122; slice timing order |
|
54 |
('xyzt_units', 'u1'), # 123; units of pixdim[1..4] |
|
55 |
('cal_max', 'f4'), # 124; max display intensity |
|
56 |
('cal_min', 'f4'), # 128; min display intensity |
|
57 |
('slice_duration', 'f4'), # 132; time for 1 slice |
|
58 |
('toffset', 'f4'), # 136; time axis shift |
|
59 |
('glmax', 'i4'), # 140; unused |
|
60 |
('glmin', 'i4'), # 144; unused |
|
61 |
('descrip', 'S80'), # 148; any text |
|
62 |
('aux_file', 'S24'), # 228; auxiliary filename |
|
63 |
('qform_code', 'i2'), # 252; xform code |
|
64 |
('sform_code', 'i2'), # 254; xform code |
|
65 |
('quatern_b', 'f4'), # 256; quaternion b param |
|
66 |
('quatern_c', 'f4'), # 260; quaternion c param |
|
67 |
('quatern_d', 'f4'), # 264; quaternion d param |
|
68 |
('qoffset_x', 'f4'), # 268; quaternion x shift |
|
69 |
('qoffset_y', 'f4'), # 272; quaternion y shift |
|
70 |
('qoffset_z', 'f4'), # 276; quaternion z shift |
|
71 |
('srow_x', 'f4', (4,)), # 280; 1st row affine transform |
|
72 |
('srow_y', 'f4', (4,)), # 296; 2nd row affine transform |
|
73 |
('srow_z', 'f4', (4,)), # 312; 3rd row affine transform |
|
74 |
('intent_name', 'S16'), # 328; name or meaning of data |
|
75 |
('magic', 'S4') # 344; must be 'ni1\0' or 'n+1\0' |
|
76 |
]
|
|
77 |
|
|
78 |
# Full header numpy dtype
|
|
79 | 33 |
header_dtype = np.dtype(header_dtd) |
80 |
|
|
81 |
# datatypes not in analyze format, with codes
|
|
82 |
if have_binary128(): |
|
83 |
# Only enable 128 bit floats if we really have IEEE binary 128 longdoubles
|
|
84 | 2 |
_float128t = np.longdouble |
85 | 2 |
_complex256t = np.longcomplex |
86 |
else: |
|
87 | 31 |
_float128t = np.void |
88 | 31 |
_complex256t = np.void |
89 |
|
|
90 | 33 |
_dtdefs = ( # code, label, dtype definition, niistring |
91 |
(0, 'none', np.void, ""), |
|
92 |
(1, 'binary', np.void, ""), |
|
93 |
(2, 'uint8', np.uint8, "NIFTI_TYPE_UINT8"), |
|
94 |
(4, 'int16', np.int16, "NIFTI_TYPE_INT16"), |
|
95 |
(8, 'int32', np.int32, "NIFTI_TYPE_INT32"), |
|
96 |
(16, 'float32', np.float32, "NIFTI_TYPE_FLOAT32"), |
|
97 |
(32, 'complex64', np.complex64, "NIFTI_TYPE_COMPLEX64"), |
|
98 |
(64, 'float64', np.float64, "NIFTI_TYPE_FLOAT64"), |
|
99 |
(128, 'RGB', np.dtype([('R', 'u1'), |
|
100 |
('G', 'u1'), |
|
101 |
('B', 'u1')]), "NIFTI_TYPE_RGB24"), |
|
102 |
(255, 'all', np.void, ''), |
|
103 |
(256, 'int8', np.int8, "NIFTI_TYPE_INT8"), |
|
104 |
(512, 'uint16', np.uint16, "NIFTI_TYPE_UINT16"), |
|
105 |
(768, 'uint32', np.uint32, "NIFTI_TYPE_UINT32"), |
|
106 |
(1024, 'int64', np.int64, "NIFTI_TYPE_INT64"), |
|
107 |
(1280, 'uint64', np.uint64, "NIFTI_TYPE_UINT64"), |
|
108 |
(1536, 'float128', _float128t, "NIFTI_TYPE_FLOAT128"), |
|
109 |
(1792, 'complex128', np.complex128, "NIFTI_TYPE_COMPLEX128"), |
|
110 |
(2048, 'complex256', _complex256t, "NIFTI_TYPE_COMPLEX256"), |
|
111 |
(2304, 'RGBA', np.dtype([('R', 'u1'), |
|
112 |
('G', 'u1'), |
|
113 |
('B', 'u1'), |
|
114 |
('A', 'u1')]), "NIFTI_TYPE_RGBA32"), |
|
115 |
)
|
|
116 |
|
|
117 |
# Make full code alias bank, including dtype column
|
|
118 | 33 |
data_type_codes = make_dt_codes(_dtdefs) |
119 |
|
|
120 |
# Transform (qform, sform) codes
|
|
121 | 33 |
xform_codes = Recoder(( # code, label, niistring |
122 |
(0, 'unknown', "NIFTI_XFORM_UNKNOWN"), |
|
123 |
(1, 'scanner', "NIFTI_XFORM_SCANNER_ANAT"), |
|
124 |
(2, 'aligned', "NIFTI_XFORM_ALIGNED_ANAT"), |
|
125 |
(3, 'talairach', "NIFTI_XFORM_TALAIRACH"), |
|
126 |
(4, 'mni', "NIFTI_XFORM_MNI_152"), |
|
127 |
(5, 'template', "NIFTI_XFORM_TEMPLATE_OTHER"), |
|
128 |
), fields=('code', 'label', 'niistring')) |
|
129 |
|
|
130 |
# unit codes
|
|
131 | 33 |
unit_codes = Recoder(( # code, label |
132 |
(0, 'unknown'), |
|
133 |
(1, 'meter'), |
|
134 |
(2, 'mm'), |
|
135 |
(3, 'micron'), |
|
136 |
(8, 'sec'), |
|
137 |
(16, 'msec'), |
|
138 |
(24, 'usec'), |
|
139 |
(32, 'hz'), |
|
140 |
(40, 'ppm'), |
|
141 |
(48, 'rads')), fields=('code', 'label')) |
|
142 |
|
|
143 | 33 |
slice_order_codes = Recoder(( # code, label |
144 |
(0, 'unknown'), |
|
145 |
(1, 'sequential increasing', 'seq inc'), |
|
146 |
(2, 'sequential decreasing', 'seq dec'), |
|
147 |
(3, 'alternating increasing', 'alt inc'), |
|
148 |
(4, 'alternating decreasing', 'alt dec'), |
|
149 |
(5, 'alternating increasing 2', 'alt inc 2'), |
|
150 |
(6, 'alternating decreasing 2', 'alt dec 2')), fields=('code', 'label')) |
|
151 |
|
|
152 | 33 |
intent_codes = Recoder(( |
153 |
# code, label, parameters description tuple
|
|
154 |
(0, 'none', (), "NIFTI_INTENT_NONE"), |
|
155 |
(2, 'correlation', ('p1 = DOF',), "NIFTI_INTENT_CORREL"), |
|
156 |
(3, 't test', ('p1 = DOF',), "NIFTI_INTENT_TTEST"), |
|
157 |
(4, 'f test', ('p1 = numerator DOF', 'p2 = denominator DOF'), |
|
158 |
"NIFTI_INTENT_FTEST"), |
|
159 |
(5, 'z score', (), "NIFTI_INTENT_ZSCORE"), |
|
160 |
(6, 'chi2', ('p1 = DOF',), "NIFTI_INTENT_CHISQ"), |
|
161 |
# two parameter beta distribution
|
|
162 |
(7, 'beta', |
|
163 |
('p1=a', 'p2=b'), |
|
164 |
"NIFTI_INTENT_BETA"), |
|
165 |
# Prob(x) = (p1 choose x) * p2^x * (1-p2)^(p1-x), for x=0,1,...,p1
|
|
166 |
(8, 'binomial', |
|
167 |
('p1 = number of trials', 'p2 = probability per trial'), |
|
168 |
"NIFTI_INTENT_BINOM"), |
|
169 |
# 2 parameter gamma
|
|
170 |
# Density(x) proportional to # x^(p1-1) * exp(-p2*x)
|
|
171 |
(9, 'gamma', |
|
172 |
('p1 = shape, p2 = scale', 2), |
|
173 |
"NIFTI_INTENT_GAMMA"), |
|
174 |
(10, 'poisson', |
|
175 |
('p1 = mean',), |
|
176 |
"NIFTI_INTENT_POISSON"), |
|
177 |
(11, 'normal', |
|
178 |
('p1 = mean', 'p2 = standard deviation',), |
|
179 |
"NIFTI_INTENT_NORMAL"), |
|
180 |
(12, 'non central f test', |
|
181 |
('p1 = numerator DOF', |
|
182 |
'p2 = denominator DOF', |
|
183 |
'p3 = numerator noncentrality parameter',), |
|
184 |
"NIFTI_INTENT_FTEST_NONC"), |
|
185 |
(13, 'non central chi2', |
|
186 |
('p1 = DOF', 'p2 = noncentrality parameter',), |
|
187 |
"NIFTI_INTENT_CHISQ_NONC"), |
|
188 |
(14, 'logistic', |
|
189 |
('p1 = location', 'p2 = scale',), |
|
190 |
"NIFTI_INTENT_LOGISTIC"), |
|
191 |
(15, 'laplace', |
|
192 |
('p1 = location', 'p2 = scale'), |
|
193 |
"NIFTI_INTENT_LAPLACE"), |
|
194 |
(16, 'uniform', |
|
195 |
('p1 = lower end', 'p2 = upper end'), |
|
196 |
"NIFTI_INTENT_UNIFORM"), |
|
197 |
(17, 'non central t test', |
|
198 |
('p1 = DOF', 'p2 = noncentrality parameter'), |
|
199 |
"NIFTI_INTENT_TTEST_NONC"), |
|
200 |
(18, 'weibull', |
|
201 |
('p1 = location', 'p2 = scale, p3 = power'), |
|
202 |
"NIFTI_INTENT_WEIBULL"), |
|
203 |
# p1 = 1 = 'half normal' distribution
|
|
204 |
# p1 = 2 = Rayleigh distribution
|
|
205 |
# p1 = 3 = Maxwell-Boltzmann distribution.
|
|
206 |
(19, 'chi', ('p1 = DOF',), "NIFTI_INTENT_CHI"), |
|
207 |
(20, 'inverse gaussian', |
|
208 |
('pi = mu', 'p2 = lambda'), |
|
209 |
"NIFTI_INTENT_INVGAUSS"), |
|
210 |
(21, 'extreme value 1', |
|
211 |
('p1 = location', 'p2 = scale'), |
|
212 |
"NIFTI_INTENT_EXTVAL"), |
|
213 |
(22, 'p value', (), "NIFTI_INTENT_PVAL"), |
|
214 |
(23, 'log p value', (), "NIFTI_INTENT_LOGPVAL"), |
|
215 |
(24, 'log10 p value', (), "NIFTI_INTENT_LOG10PVAL"), |
|
216 |
(1001, 'estimate', (), "NIFTI_INTENT_ESTIMATE"), |
|
217 |
(1002, 'label', (), "NIFTI_INTENT_LABEL"), |
|
218 |
(1003, 'neuroname', (), "NIFTI_INTENT_NEURONAME"), |
|
219 |
(1004, 'general matrix', |
|
220 |
('p1 = M', 'p2 = N'), |
|
221 |
"NIFTI_INTENT_GENMATRIX"), |
|
222 |
(1005, 'symmetric matrix', ('p1 = M',), "NIFTI_INTENT_SYMMATRIX"), |
|
223 |
(1006, 'displacement vector', (), "NIFTI_INTENT_DISPVECT"), |
|
224 |
(1007, 'vector', (), "NIFTI_INTENT_VECTOR"), |
|
225 |
(1008, 'pointset', (), "NIFTI_INTENT_POINTSET"), |
|
226 |
(1009, 'triangle', (), "NIFTI_INTENT_TRIANGLE"), |
|
227 |
(1010, 'quaternion', (), "NIFTI_INTENT_QUATERNION"), |
|
228 |
(1011, 'dimensionless', (), "NIFTI_INTENT_DIMLESS"), |
|
229 |
(2001, 'time series', |
|
230 |
(),
|
|
231 |
"NIFTI_INTENT_TIME_SERIES", |
|
232 |
"NIFTI_INTENT_TIMESERIES"), # this mis-spell occurs in the wild |
|
233 |
(2002, 'node index', (), "NIFTI_INTENT_NODE_INDEX"), |
|
234 |
(2003, 'rgb vector', (), "NIFTI_INTENT_RGB_VECTOR"), |
|
235 |
(2004, 'rgba vector', (), "NIFTI_INTENT_RGBA_VECTOR"), |
|
236 |
(2005, 'shape', (), "NIFTI_INTENT_SHAPE"), |
|
237 |
# FSL-specific intent codes - codes used by FNIRT
|
|
238 |
# ($FSLDIR/warpfns/fnirt_file_reader.h:104)
|
|
239 |
(2006, 'fnirt disp field', (), 'FSL_FNIRT_DISPLACEMENT_FIELD'), |
|
240 |
(2007, 'fnirt cubic spline coef', (), 'FSL_CUBIC_SPLINE_COEFFICIENTS'), |
|
241 |
(2008, 'fnirt dct coef', (), 'FSL_DCT_COEFFICIENTS'), |
|
242 |
(2009, 'fnirt quad spline coef', (), 'FSL_QUADRATIC_SPLINE_COEFFICIENTS'), |
|
243 |
# FSL-specific intent codes - codes used by TOPUP
|
|
244 |
# ($FSLDIR/topup/topup_file_io.h:104)
|
|
245 |
(2016, 'topup cubic spline coef ', (), |
|
246 |
'FSL_TOPUP_CUBIC_SPLINE_COEFFICIENTS'), |
|
247 |
(2017, 'topup quad spline coef', (), |
|
248 |
'FSL_TOPUP_QUADRATIC_SPLINE_COEFFICIENTS'), |
|
249 |
(2018, 'topup field', (), 'FSL_TOPUP_FIELD'), |
|
250 |
), fields=('code', 'label', 'parameters', 'niistring')) |
|
251 |
|
|
252 |
|
|
253 | 33 |
class Nifti1Extension(object): |
254 |
"""Baseclass for NIfTI1 header extensions.
|
|
255 |
|
|
256 |
This class is sufficient to handle very simple text-based extensions, such
|
|
257 |
as `comment`. More sophisticated extensions should/will be supported by
|
|
258 |
dedicated subclasses.
|
|
259 |
"""
|
|
260 |
|
|
261 | 33 |
def __init__(self, code, content): |
262 |
"""
|
|
263 |
Parameters
|
|
264 |
----------
|
|
265 |
code : int or str
|
|
266 |
Canonical extension code as defined in the NIfTI standard, given
|
|
267 |
either as integer or corresponding label
|
|
268 |
(see :data:`~nibabel.nifti1.extension_codes`)
|
|
269 |
content : str
|
|
270 |
Extension content as read from the NIfTI file header. This content is
|
|
271 |
converted into a runtime representation.
|
|
272 |
"""
|
|
273 | 33 |
try: |
274 | 33 |
self._code = extension_codes.code[code] |
275 |
except KeyError: |
|
276 |
# XXX or fail or at least complain?
|
|
277 |
self._code = code |
|
278 | 33 |
self._content = self._unmangle(content) |
279 |
|
|
280 | 33 |
def _unmangle(self, value): |
281 |
"""Convert the extension content into its runtime representation.
|
|
282 |
|
|
283 |
The default implementation does nothing at all.
|
|
284 |
|
|
285 |
Parameters
|
|
286 |
----------
|
|
287 |
value : str
|
|
288 |
Extension content as read from file.
|
|
289 |
|
|
290 |
Returns
|
|
291 |
-------
|
|
292 |
The same object that was passed as `value`.
|
|
293 |
|
|
294 |
Notes
|
|
295 |
-----
|
|
296 |
Subclasses should reimplement this method to provide the desired
|
|
297 |
unmangling procedure and may return any type of object.
|
|
298 |
"""
|
|
299 | 33 |
return value |
300 |
|
|
301 | 33 |
def _mangle(self, value): |
302 |
"""Convert the extension content into NIfTI file header representation.
|
|
303 |
|
|
304 |
The default implementation does nothing at all.
|
|
305 |
|
|
306 |
Parameters
|
|
307 |
----------
|
|
308 |
value : str
|
|
309 |
Extension content in runtime form.
|
|
310 |
|
|
311 |
Returns
|
|
312 |
-------
|
|
313 |
str
|
|
314 |
|
|
315 |
Notes
|
|
316 |
-----
|
|
317 |
Subclasses should reimplement this method to provide the desired
|
|
318 |
mangling procedure.
|
|
319 |
"""
|
|
320 | 33 |
return value |
321 |
|
|
322 | 33 |
def get_code(self): |
323 |
"""Return the canonical extension type code."""
|
|
324 | 33 |
return self._code |
325 |
|
|
326 | 33 |
def get_content(self): |
327 |
"""Return the extension content in its runtime representation."""
|
|
328 | 33 |
return self._content |
329 |
|
|
330 | 33 |
def get_sizeondisk(self): |
331 |
"""Return the size of the extension in the NIfTI file.
|
|
332 |
"""
|
|
333 |
# need raw value size plus 8 bytes for esize and ecode
|
|
334 | 33 |
size = len(self._mangle(self._content)) |
335 | 33 |
size += 8 |
336 |
# extensions size has to be a multiple of 16 bytes
|
|
337 |
if size % 16 != 0: |
|
338 | 33 |
size += 16 - (size % 16) |
339 | 33 |
return size |
340 |
|
|
341 | 33 |
def __repr__(self): |
342 |
try: |
|
343 |
code = extension_codes.label[self._code] |
|
344 |
except KeyError: |
|
345 |
# deal with unknown codes
|
|
346 |
code = self._code |
|
347 |
|
|
348 |
s = f"Nifti1Extension('{code}', '{self._content}')" |
|
349 |
return s |
|
350 |
|
|
351 | 33 |
def __eq__(self, other): |
352 | 33 |
return (self._code, self._content) == (other._code, other._content) |
353 |
|
|
354 | 33 |
def __ne__(self, other): |
355 | 33 |
return not self == other |
356 |
|
|
357 | 33 |
def write_to(self, fileobj, byteswap): |
358 |
""" Write header extensions to fileobj
|
|
359 |
|
|
360 |
Write starts at fileobj current file position.
|
|
361 |
|
|
362 |
Parameters
|
|
363 |
----------
|
|
364 |
fileobj : file-like object
|
|
365 |
Should implement ``write`` method
|
|
366 |
byteswap : boolean
|
|
367 |
Flag if byteswapping the data is required.
|
|
368 |
|
|
369 |
Returns
|
|
370 |
-------
|
|
371 |
None
|
|
372 |
"""
|
|
373 | 33 |
extstart = fileobj.tell() |
374 | 33 |
rawsize = self.get_sizeondisk() |
375 |
# write esize and ecode first
|
|
376 | 33 |
extinfo = np.array((rawsize, self._code), dtype=np.int32) |
377 |
if byteswap: |
|
378 |
extinfo = extinfo.byteswap() |
|
379 | 33 |
fileobj.write(extinfo.tobytes()) |
380 |
# followed by the actual extension content
|
|
381 |
# XXX if mangling upon load is implemented, it should be reverted here
|
|
382 | 33 |
fileobj.write(self._mangle(self._content)) |
383 |
# be nice and zero out remaining part of the extension till the
|
|
384 |
# next 16 byte border
|
|
385 | 33 |
fileobj.write(b'\x00' * (extstart + rawsize - fileobj.tell())) |
386 |
|
|
387 |
|
|
388 | 33 |
class Nifti1DicomExtension(Nifti1Extension): |
389 |
"""NIfTI1 DICOM header extension
|
|
390 |
|
|
391 |
This class is a thin wrapper around pydicom to read a binary DICOM
|
|
392 |
byte string. If pydicom is available, content is exposed as a Dicom Dataset.
|
|
393 |
Otherwise, this silently falls back to the standard NiftiExtension class
|
|
394 |
and content is the raw bytestring loaded directly from the nifti file
|
|
395 |
header.
|
|
396 |
"""
|
|
397 | 33 |
def __init__(self, code, content, parent_hdr=None): |
398 |
"""
|
|
399 |
Parameters
|
|
400 |
----------
|
|
401 |
code : int or str
|
|
402 |
Canonical extension code as defined in the NIfTI standard, given
|
|
403 |
either as integer or corresponding label
|
|
404 |
(see :data:`~nibabel.nifti1.extension_codes`)
|
|
405 |
content : bytes or pydicom Dataset or None
|
|
406 |
Extension content - either a bytestring as read from the NIfTI file
|
|
407 |
header or an existing pydicom Dataset. If a bystestring, the content
|
|
408 |
is converted into a Dataset on initialization. If None, a new empty
|
|
409 |
Dataset is created.
|
|
410 |
parent_hdr : :class:`~nibabel.nifti1.Nifti1Header`, optional
|
|
411 |
If a dicom extension belongs to an existing
|
|
412 |
:class:`~nibabel.nifti1.Nifti1Header`, it may be provided here to
|
|
413 |
ensure that the DICOM dataset is written with correctly corresponding
|
|
414 |
endianness; otherwise it is assumed the dataset is little endian.
|
|
415 |
|
|
416 |
Notes
|
|
417 |
-----
|
|
418 |
|
|
419 |
code should always be 2 for DICOM.
|
|
420 |
"""
|
|
421 |
|
|
422 | 25 |
self._code = code |
423 |
if parent_hdr: |
|
424 | 25 |
self._is_little_endian = parent_hdr.endianness == '<' |
425 |
else: |
|
426 | 25 |
self._is_little_endian = True |
427 |
if isinstance(content, pdcm.dataset.Dataset): |
|
428 | 25 |
self._is_implicit_VR = False |
429 | 25 |
self._raw_content = self._mangle(content) |
430 | 25 |
self._content = content |
431 |
elif isinstance(content, bytes): # Got a byte string - unmangle it |
|
432 | 25 |
self._raw_content = content |
433 | 25 |
self._is_implicit_VR = self._guess_implicit_VR() |
434 | 25 |
ds = self._unmangle(content, self._is_implicit_VR, |
435 |
self._is_little_endian) |
|
436 | 25 |
self._content = ds |
437 |
elif content is None: # initialize a new dicom dataset |
|
438 | 25 |
self._is_implicit_VR = False |
439 | 25 |
self._content = pdcm.dataset.Dataset() |
440 |
else: |
|
441 | 25 |
raise TypeError(f"content must be either a bytestring or a pydicom Dataset. " |
442 |
f"Got {content.__class__}") |
|
443 |
|
|
444 | 33 |
def _guess_implicit_VR(self): |
445 |
"""Try to guess DICOM syntax by checking for valid VRs.
|
|
446 |
|
|
447 |
Without a DICOM Transfer Syntax, it's difficult to tell if Value
|
|
448 |
Representations (VRs) are included in the DICOM encoding or not.
|
|
449 |
This reads where the first VR would be and checks it against a list of
|
|
450 |
valid VRs
|
|
451 |
"""
|
|
452 | 25 |
potential_vr = self._raw_content[4:6].decode() |
453 |
if potential_vr in pdcm.values.converters.keys(): |
|
454 | 25 |
implicit_VR = False |
455 |
else: |
|
456 | 25 |
implicit_VR = True |
457 | 25 |
return implicit_VR |
458 |
|
|
459 | 33 |
def _unmangle(self, value, is_implicit_VR=False, is_little_endian=True): |
460 | 25 |
bio = BytesIO(value) |
461 | 25 |
ds = pdcm.filereader.read_dataset(bio, |
462 |
is_implicit_VR, |
|
463 |
is_little_endian) |
|
464 | 25 |
return ds |
465 |
|
|
466 | 33 |
def _mangle(self, dataset): |
467 | 25 |
bio = BytesIO() |
468 | 25 |
dio = pdcm.filebase.DicomFileLike(bio) |
469 | 25 |
dio.is_implicit_VR = self._is_implicit_VR |
470 | 25 |
dio.is_little_endian = self._is_little_endian |
471 | 25 |
ds_len = pdcm.filewriter.write_dataset(dio, dataset) |
472 | 25 |
dio.seek(0) |
473 | 25 |
return dio.read(ds_len) |
474 |
|
|
475 |
|
|
476 |
# NIfTI header extension type codes (ECODE)
|
|
477 |
# see nifti1_io.h for a complete list of all known extensions and
|
|
478 |
# references to their description or contacts of the respective
|
|
479 |
# initiators
|
|
480 | 33 |
extension_codes = Recoder(( |
481 |
(0, "ignore", Nifti1Extension), |
|
482 |
(2, "dicom", Nifti1DicomExtension if have_dicom else Nifti1Extension), |
|
483 |
(4, "afni", Nifti1Extension), |
|
484 |
(6, "comment", Nifti1Extension), |
|
485 |
(8, "xcede", Nifti1Extension), |
|
486 |
(10, "jimdiminfo", Nifti1Extension), |
|
487 |
(12, "workflow_fwds", Nifti1Extension), |
|
488 |
(14, "freesurfer", Nifti1Extension), |
|
489 |
(16, "pypickle", Nifti1Extension), |
|
490 |
), fields=('code', 'label', 'handler')) |
|
491 |
|
|
492 |
|
|
493 | 33 |
class Nifti1Extensions(list): |
494 |
"""Simple extension collection, implemented as a list-subclass.
|
|
495 |
"""
|
|
496 |
|
|
497 | 33 |
def count(self, ecode): |
498 |
"""Returns the number of extensions matching a given *ecode*.
|
|
499 |
|
|
500 |
Parameters
|
|
501 |
----------
|
|
502 |
code : int | str
|
|
503 |
The ecode can be specified either literal or as numerical value.
|
|
504 |
"""
|
|
505 | 33 |
count = 0 |
506 | 33 |
code = extension_codes.code[ecode] |
507 |
for e in self: |
|
508 |
if e.get_code() == code: |
|
509 | 33 |
count += 1 |
510 | 33 |
return count |
511 |
|
|
512 | 33 |
def get_codes(self): |
513 |
"""Return a list of the extension code of all available extensions"""
|
|
514 |
return [e.get_code() for e in self] |
|
515 |
|
|
516 | 33 |
def get_sizeondisk(self): |
517 |
"""Return the size of the complete header extensions in the NIfTI file.
|
|
518 |
"""
|
|
519 |
return np.sum([e.get_sizeondisk() for e in self]) |
|
520 |
|
|
521 | 33 |
def __repr__(self): |
522 |
return "Nifti1Extensions(%s)" % ', '.join(str(e) for e in self) |
|
523 |
|
|
524 | 33 |
def __cmp__(self, other): |
525 |
return cmp(list(self), list(other)) |
|
526 |
|
|
527 | 33 |
def write_to(self, fileobj, byteswap): |
528 |
""" Write header extensions to fileobj
|
|
529 |
|
|
530 |
Write starts at fileobj current file position.
|
|
531 |
|
|
532 |
Parameters
|
|
533 |
----------
|
|
534 |
fileobj : file-like object
|
|
535 |
Should implement ``write`` method
|
|
536 |
byteswap : boolean
|
|
537 |
Flag if byteswapping the data is required.
|
|
538 |
|
|
539 |
Returns
|
|
540 |
-------
|
|
541 |
None
|
|
542 |
"""
|
|
543 |
for e in self: |
|
544 | 33 |
e.write_to(fileobj, byteswap) |
545 |
|
|
546 | 33 |
@classmethod
|
547 | 5 |
def from_fileobj(klass, fileobj, size, byteswap): |
548 |
"""Read header extensions from a fileobj
|
|
549 |
|
|
550 |
Parameters
|
|
551 |
----------
|
|
552 |
fileobj : file-like object
|
|
553 |
We begin reading the extensions at the current file position
|
|
554 |
size : int
|
|
555 |
Number of bytes to read. If negative, fileobj will be read till its
|
|
556 |
end.
|
|
557 |
byteswap : boolean
|
|
558 |
Flag if byteswapping the read data is required.
|
|
559 |
|
|
560 |
Returns
|
|
561 |
-------
|
|
562 |
An extension list. This list might be empty in case not extensions
|
|
563 |
were present in fileobj.
|
|
564 |
"""
|
|
565 |
# make empty extension list
|
|
566 | 33 |
extensions = klass() |
567 |
# assume the file pointer is at the beginning of any extensions.
|
|
568 |
# read until the whole header is parsed (each extension is a multiple
|
|
569 |
# of 16 bytes) or in case of a separate header file till the end
|
|
570 |
# (break inside the body)
|
|
571 |
while size >= 16 or size < 0: |
|
572 |
# the next 8 bytes should have esize and ecode
|
|
573 | 33 |
ext_def = fileobj.read(8) |
574 |
# nothing was read and instructed to read till the end
|
|
575 |
# -> assume all extensions where parsed and break
|
|
576 |
if not len(ext_def) and size < 0: |
|
577 | 33 |
break
|
578 |
# otherwise there should be a full extension header
|
|
579 |
if not len(ext_def) == 8: |
|
580 |
raise HeaderDataError('failed to read extension header') |
|
581 | 33 |
ext_def = np.frombuffer(ext_def, dtype=np.int32) |
582 |
if byteswap: |
|
583 |
ext_def = ext_def.byteswap() |
|
584 |
# be extra verbose
|
|
585 | 33 |
ecode = ext_def[1] |
586 | 33 |
esize = ext_def[0] |
587 |
if esize % 16: |
|
588 | 33 |
warnings.warn( |
589 |
'Extension size is not a multiple of 16 bytes; '
|
|
590 |
'Assuming size is correct and hoping for the best', |
|
591 |
UserWarning) |
|
592 |
# read extension itself; esize includes the 8 bytes already read
|
|
593 | 33 |
evalue = fileobj.read(int(esize - 8)) |
594 |
if not len(evalue) == esize - 8: |
|
595 |
raise HeaderDataError('failed to read extension content') |
|
596 |
# note that we read a full extension
|
|
597 | 33 |
size -= esize |
598 |
# store raw extension content, but strip trailing NULL chars
|
|
599 | 33 |
evalue = evalue.rstrip(b'\x00') |
600 |
# 'extension_codes' also knows the best implementation to handle
|
|
601 |
# a particular extension type
|
|
602 | 33 |
try: |
603 | 33 |
ext = extension_codes.handler[ecode](ecode, evalue) |
604 |
except KeyError: |
|
605 |
# unknown extension type
|
|
606 |
# XXX complain or fail or go with a generic extension
|
|
607 |
ext = Nifti1Extension(ecode, evalue) |
|
608 | 33 |
extensions.append(ext) |
609 | 33 |
return extensions |
610 |
|
|
611 |
|
|
612 | 33 |
class Nifti1Header(SpmAnalyzeHeader): |
613 |
""" Class for NIfTI1 header
|
|
614 |
|
|
615 |
The NIfTI1 header has many more coded fields than the simpler Analyze
|
|
616 |
variants. NIfTI1 headers also have extensions.
|
|
617 |
|
|
618 |
Nifti allows the header to be a separate file, as part of a nifti image /
|
|
619 |
header pair, or to precede the data in a single file. The object needs to
|
|
620 |
know which type it is, in order to manage the voxel offset pointing to the
|
|
621 |
data, extension reading, and writing the correct magic string.
|
|
622 |
|
|
623 |
This class handles the header-preceding-data case.
|
|
624 |
"""
|
|
625 |
# Copies of module level definitions
|
|
626 | 33 |
template_dtype = header_dtype |
627 | 33 |
_data_type_codes = data_type_codes |
628 |
|
|
629 |
# fields with recoders for their values
|
|
630 | 33 |
_field_recoders = {'datatype': data_type_codes, |
631 |
'qform_code': xform_codes, |
|
632 |
'sform_code': xform_codes, |
|
633 |
'intent_code': intent_codes, |
|
634 |
'slice_code': slice_order_codes} |
|
635 |
|
|
636 |
# data scaling capabilities
|
|
637 | 33 |
has_data_slope = True |
638 | 33 |
has_data_intercept = True |
639 |
|
|
640 |
# Extension class; should implement __call__ for construction, and
|
|
641 |
# ``from_fileobj`` for reading from file
|
|
642 | 33 |
exts_klass = Nifti1Extensions |
643 |
|
|
644 |
# Signal whether this is single (header + data) file
|
|
645 | 33 |
is_single = True |
646 |
|
|
647 |
# Default voxel data offsets for single and pair
|
|
648 | 33 |
pair_vox_offset = 0 |
649 | 33 |
single_vox_offset = 352 |
650 |
|
|
651 |
# Magics for single and pair
|
|
652 | 33 |
pair_magic = b'ni1' |
653 | 33 |
single_magic = b'n+1' |
654 |
|
|
655 |
# Quaternion threshold near 0, based on float32 precision
|
|
656 | 33 |
quaternion_threshold = -np.finfo(np.float32).eps * 3 |
657 |
|
|
658 | 33 |
def __init__(self, |
659 |
binaryblock=None, |
|
660 |
endianness=None, |
|
661 |
check=True, |
|
662 |
extensions=()): |
|
663 |
""" Initialize header from binary data block and extensions
|
|
664 |
"""
|
|
665 | 33 |
super(Nifti1Header, self).__init__(binaryblock, |
666 |
endianness, |
|
667 |
check) |
|
668 | 33 |
self.extensions = self.exts_klass(extensions) |
669 |
|
|
670 | 33 |
def copy(self): |
671 |
""" Return copy of header
|
|
672 |
|
|
673 |
Take reference to extensions as well as copy of header contents
|
|
674 |
"""
|
|
675 | 33 |
return self.__class__( |
676 |
self.binaryblock, |
|
677 |
self.endianness, |
|
678 |
False, |
|
679 |
self.extensions) |
|
680 |
|
|
681 | 33 |
@classmethod
|
682 | 33 |
def from_fileobj(klass, fileobj, endianness=None, check=True): |
683 | 33 |
raw_str = fileobj.read(klass.template_dtype.itemsize) |
684 | 33 |
hdr = klass(raw_str, endianness, check) |
685 |
# Read next 4 bytes to see if we have extensions. The nifti standard
|
|
686 |
# has this as a 4 byte string; if the first value is not zero, then we
|
|
687 |
# have extensions.
|
|
688 | 33 |
extension_status = fileobj.read(4) |
689 |
# Need to test *slice* of extension_status to preserve byte string type
|
|
690 |
# on Python 3
|
|
691 |
if len(extension_status) < 4 or extension_status[0:1] == b'\x00': |
|
692 | 33 |
return hdr |
693 |
# If this is a detached header file read to end
|
|
694 |
if not klass.is_single: |
|
695 |
extsize = -1 |
|
696 |
else: # otherwise read until the beginning of the data |
|
697 | 33 |
extsize = hdr._structarr['vox_offset'] - fileobj.tell() |
698 | 33 |
byteswap = endian_codes['native'] != hdr.endianness |
699 | 33 |
hdr.extensions = klass.exts_klass.from_fileobj(fileobj, extsize, |
700 |
byteswap) |
|
701 | 33 |
return hdr |
702 |
|
|
703 | 33 |
def write_to(self, fileobj): |
704 |
# First check that vox offset is large enough; set if necessary
|
|
705 |
if self.is_single: |
|
706 | 33 |
vox_offset = self._structarr['vox_offset'] |
707 | 33 |
min_vox_offset = (self.single_vox_offset + |
708 |
self.extensions.get_sizeondisk()) |
|
709 |
if vox_offset == 0: # vox offset unset; set as necessary |
|
710 | 33 |
self._structarr['vox_offset'] = min_vox_offset |
711 |
elif vox_offset < min_vox_offset: |
|
712 | 33 |
raise HeaderDataError( |
713 |
f'vox offset set to {vox_offset}, but need at least {min_vox_offset}') |
|
714 | 33 |
super(Nifti1Header, self).write_to(fileobj) |
715 |
# Write extensions
|
|
716 |
if len(self.extensions) == 0: |
|
717 |
# If single file, write required 0 stream to signal no extensions
|
|
718 |
if self.is_single: |
|
719 | 33 |
fileobj.write(b'\x00' * 4) |
720 | 33 |
return
|
721 |
# Signal there are extensions that follow
|
|
722 | 33 |
fileobj.write(b'\x01\x00\x00\x00') |
723 | 33 |
byteswap = endian_codes['native'] != self.endianness |
724 | 33 |
self.extensions.write_to(fileobj, byteswap) |
725 |
|
|
726 | 33 |
def get_best_affine(self): |
727 |
""" Select best of available transforms """
|
|
728 | 33 |
hdr = self._structarr |
729 |
if hdr['sform_code'] != 0: |
|
730 | 33 |
return self.get_sform() |
731 |
if hdr['qform_code'] != 0: |
|
732 | 33 |
return self.get_qform() |
733 | 33 |
return self.get_base_affine() |
734 |
|
|
735 | 33 |
@classmethod
|
736 | 33 |
def default_structarr(klass, endianness=None): |
737 |
""" Create empty header binary block with given endianness """
|
|
738 | 33 |
hdr_data = super(Nifti1Header, klass).default_structarr(endianness) |
739 |
if klass.is_single: |
|
740 | 33 |
hdr_data['magic'] = klass.single_magic |
741 |
else: |
|
742 | 33 |
hdr_data['magic'] = klass.pair_magic |
743 | 33 |
return hdr_data |
744 |
|
|
745 | 33 |
@classmethod
|
746 | 33 |
def from_header(klass, header=None, check=True): |
747 |
""" Class method to create header from another header
|
|
748 |
|
|
749 |
Extend Analyze header copy by copying extensions from other Nifti
|
|
750 |
types.
|
|
751 |
|
|
752 |
Parameters
|
|
753 |
----------
|
|
754 |
header : ``Header`` instance or mapping
|
|
755 |
a header of this class, or another class of header for
|
|
756 |
conversion to this type
|
|
757 |
check : {True, False}
|
|
758 |
whether to check header for integrity
|
|
759 |
|
|
760 |
Returns
|
|
761 |
-------
|
|
762 |
hdr : header instance
|
|
763 |
fresh header instance of our own class
|
|
764 |
"""
|
|
765 | 33 |
new_hdr = super(Nifti1Header, klass).from_header(header, check) |
766 |
if isinstance(header, Nifti1Header): |
|
767 | 33 |
new_hdr.extensions[:] = header.extensions[:] |
768 | 33 |
return new_hdr |
769 |
|
|
770 | 33 |
def get_data_shape(self): |
771 |
""" Get shape of data
|
|
772 |
|
|
773 |
Examples
|
|
774 |
--------
|
|
775 |
>>> hdr = Nifti1Header()
|
|
776 |
>>> hdr.get_data_shape()
|
|
777 |
(0,)
|
|
778 |
>>> hdr.set_data_shape((1,2,3))
|
|
779 |
>>> hdr.get_data_shape()
|
|
780 |
(1, 2, 3)
|
|
781 |
|
|
782 |
Expanding number of dimensions gets default zooms
|
|
783 |
|
|
784 |
>>> hdr.get_zooms()
|
|
785 |
(1.0, 1.0, 1.0)
|
|
786 |
|
|
787 |
Notes
|
|
788 |
-----
|
|
789 |
Applies freesurfer hack for large vectors described in `issue 100`_ and
|
|
790 |
`save_nifti.m <save77_>`_.
|
|
791 |
|
|
792 |
Allows for freesurfer hack for 7th order icosahedron surface described
|
|
793 |
in `issue 309`_, load_nifti.m_, and `save_nifti.m <save50_>`_.
|
|
794 |
"""
|
|
795 | 33 |
shape = super(Nifti1Header, self).get_data_shape() |
796 |
# Apply freesurfer hack for large vectors
|
|
797 |
if shape[:3] == (-1, 1, 1): |
|
798 | 33 |
vec_len = int(self._structarr['glmin']) |
799 |
if vec_len == 0: |
|
800 | 33 |
raise HeaderDataError('-1 in dim[1] but 0 in glmin; ' |
801 |
'inconsistent freesurfer type header?') |
|
802 | 33 |
return (vec_len, 1, 1) + shape[3:] |
803 |
# Apply freesurfer hack for ico7 surface
|
|
804 |
elif shape[:3] == (27307, 1, 6): |
|
805 | 27 |
return (163842, 1, 1) + shape[3:] |
806 |
else: # Normal case |
|
807 | 33 |
return shape |
808 |
|
|
809 | 33 |
def set_data_shape(self, shape): |
810 |
""" Set shape of data # noqa
|
|
811 |
|
|
812 |
If ``ndims == len(shape)`` then we set zooms for dimensions higher than
|
|
813 |
``ndims`` to 1.0
|
|
814 |
|
|
815 |
Nifti1 images can have up to seven dimensions. For FreeSurfer-variant
|
|
816 |
Nifti surface files, the first dimension is assumed to correspond to
|
|
817 |
vertices/nodes on a surface, and dimensions two and three are
|
|
818 |
constrained to have depth of 1. Dimensions 4-7 are constrained only by
|
|
819 |
type bounds.
|
|
820 |
|
|
821 |
Parameters
|
|
822 |
----------
|
|
823 |
shape : sequence
|
|
824 |
sequence of integers specifying data array shape
|
|
825 |
|
|
826 |
Notes
|
|
827 |
-----
|
|
828 |
Applies freesurfer hack for large vectors described in `issue 100`_ and
|
|
829 |
`save_nifti.m <save77_>`_.
|
|
830 |
|
|
831 |
Allows for freesurfer hack for 7th order icosahedron surface described
|
|
832 |
in `issue 309`_, load_nifti.m_, and `save_nifti.m <save50_>`_.
|
|
833 |
|
|
834 |
The Nifti1 `standard header`_ allows for the following "point set"
|
|
835 |
definition of a surface, not currently implemented in nibabel.
|
|
836 |
|
|
837 |
::
|
|
838 |
|
|
839 |
To signify that the vector value at each voxel is really a
|
|
840 |
spatial coordinate (e.g., the vertices or nodes of a surface mesh):
|
|
841 |
- dataset must have a 5th dimension
|
|
842 |
- intent_code must be NIFTI_INTENT_POINTSET
|
|
843 |
- dim[0] = 5
|
|
844 |
- dim[1] = number of points
|
|
845 |
- dim[2] = dim[3] = dim[4] = 1
|
|
846 |
- dim[5] must be the dimensionality of space (e.g., 3 => 3D space).
|
|
847 |
- intent_name may describe the object these points come from
|
|
848 |
(e.g., "pial", "gray/white" , "EEG", "MEG").
|
|
849 |
|
|
850 |
.. _issue 100: https://github.com/nipy/nibabel/issues/100
|
|
851 |
.. _issue 309: https://github.com/nipy/nibabel/issues/309
|
|
852 |
.. _save77:
|
|
853 |
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/save_nifti.m#L77-L82
|
|
854 |
.. _save50:
|
|
855 |
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/save_nifti.m#L50-L56
|
|
856 |
.. _load_nifti.m:
|
|
857 |
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/load_nifti.m#L86-L89
|
|
858 |
.. _standard header: http://nifti.nimh.nih.gov/pub/dist/src/niftilib/nifti1.h
|
|
859 |
"""
|
|
860 | 33 |
hdr = self._structarr |
861 | 33 |
shape = tuple(shape) |
862 |
|
|
863 |
# Apply freesurfer hack for ico7 surface
|
|
864 |
if shape[:3] == (163842, 1, 1): |
|
865 | 27 |
shape = (27307, 1, 6) + shape[3:] |
866 |
# Apply freesurfer hack for large vectors
|
|
867 |
elif (len(shape) >= 3 and shape[1:3] == (1, 1) and |
|
868 |
shape[0] > np.iinfo(hdr['dim'].dtype.base).max): |
|
869 | 33 |
try: |
870 | 33 |
hdr['glmin'] = shape[0] |
871 | 6 |
except OverflowError: |
872 | 6 |
overflow = True |
873 |
else: |
|
874 | 33 |
overflow = hdr['glmin'] != shape[0] |
875 |
if overflow: |
|
876 | 33 |
raise HeaderDataError(f'shape[0] {shape[0]} does not fit in glmax datatype') |
877 | 33 |
warnings.warn('Using large vector Freesurfer hack; header will ' |
878 |
'not be compatible with SPM or FSL', stacklevel=2) |
|
879 | 33 |
shape = (-1, 1, 1) + shape[3:] |
880 | 33 |
super(Nifti1Header, self).set_data_shape(shape) |
881 |
|
|
882 | 33 |
def get_qform_quaternion(self): |
883 |
""" Compute quaternion from b, c, d of quaternion
|
|
884 |
|
|
885 |
Fills a value by assuming this is a unit quaternion
|
|
886 |
"""
|
|
887 | 33 |
hdr = self._structarr |
888 | 33 |
bcd = [hdr['quatern_b'], hdr['quatern_c'], hdr['quatern_d']] |
889 |
# Adjust threshold to precision of stored values in header
|
|
890 | 33 |
return fillpositive(bcd, self.quaternion_threshold) |
891 |
|
|
892 | 33 |
def get_qform(self, coded=False): |
893 |
""" Return 4x4 affine matrix from qform parameters in header
|
|
894 |
|
|
895 |
Parameters
|
|
896 |
----------
|
|
897 |
coded : bool, optional
|
|
898 |
If True, return {affine or None}, and qform code. If False, just
|
|
899 |
return affine. {affine or None} means, return None if qform code
|
|
900 |
== 0, and affine otherwise.
|
|
901 |
|
|
902 |
Returns
|
|
903 |
-------
|
|
904 |
affine : None or (4,4) ndarray
|
|
905 |
If `coded` is False, always return affine reconstructed from qform
|
|
906 |
quaternion. If `coded` is True, return None if qform code is 0,
|
|
907 |
else return the affine.
|
|
908 |
code : int
|
|
909 |
Qform code. Only returned if `coded` is True.
|
|
910 |
"""
|
|
911 | 33 |
hdr = self._structarr |
912 | 33 |
code = int(hdr['qform_code']) |
913 |
if code == 0 and coded: |
|
914 | 33 |
return None, 0 |
915 | 33 |
quat = self.get_qform_quaternion() |
916 | 33 |
R = quat2mat(quat) |
917 | 33 |
vox = hdr['pixdim'][1:4].copy() |
918 |
if np.any(vox < 0): |
|
919 | 33 |
raise HeaderDataError('pixdims[1,2,3] should be positive') |
920 | 33 |
qfac = hdr['pixdim'][0] |
921 |
if qfac not in (-1, 1): |
|
922 |
raise HeaderDataError('qfac (pixdim[0]) should be 1 or -1') |
|
923 | 33 |
vox[-1] *= qfac |
924 | 33 |
S = np.diag(vox) |
925 | 33 |
M = np.dot(R, S) |
926 | 33 |
out = np.eye(4) |
927 | 33 |
out[0:3, 0:3] = M |
928 | 33 |
out[0:3, 3] = [hdr['qoffset_x'], hdr['qoffset_y'], hdr['qoffset_z']] |
929 |
if coded: |
|
930 | 33 |
return out, code |
931 | 33 |
return out |
932 |
|
|
933 | 33 |
def set_qform(self, affine, code=None, strip_shears=True): |
934 |
""" Set qform header values from 4x4 affine
|
|
935 |
|
|
936 |
Parameters
|
|
937 |
----------
|
|
938 |
affine : None or 4x4 array
|
|
939 |
affine transform to write into sform. If None, only set code.
|
|
940 |
code : None, string or integer, optional
|
|
941 |
String or integer giving meaning of transform in *affine*.
|
|
942 |
The default is None. If code is None, then:
|
|
943 |
|
|
944 |
* If affine is None, `code`-> 0
|
|
945 |
* If affine not None and existing qform code in header == 0,
|
|
946 |
`code`-> 2 (aligned)
|
|
947 |
* If affine not None and existing qform code in header != 0,
|
|
948 |
`code`-> existing qform code in header
|
|
949 |
|
|
950 |
strip_shears : bool, optional
|
|
951 |
Whether to strip shears in `affine`. If True, shears will be
|
|
952 |
silently stripped. If False, the presence of shears will raise a
|
|
953 |
``HeaderDataError``
|
|
954 |
|
|
955 |
Notes
|
|
956 |
-----
|
|
957 |
The qform transform only encodes translations, rotations and
|
|
958 |
zooms. If there are shear components to the `affine` transform, and
|
|
959 |
`strip_shears` is True (the default), the written qform gives the
|
|
960 |
closest approximation where the rotation matrix is orthogonal. This is
|
|
961 |
to allow quaternion representation. The orthogonal representation
|
|
962 |
enforces orthogonal axes.
|
|
963 |
|
|
964 |
Examples
|
|
965 |
--------
|
|
966 |
>>> hdr = Nifti1Header()
|
|
967 |
>>> int(hdr['qform_code']) # gives 0 - unknown
|
|
968 |
0
|
|
969 |
>>> affine = np.diag([1,2,3,1])
|
|
970 |
>>> np.all(hdr.get_qform() == affine)
|
|
971 |
False
|
|
972 |
>>> hdr.set_qform(affine)
|
|
973 |
>>> np.all(hdr.get_qform() == affine)
|
|
974 |
True
|
|
975 |
>>> int(hdr['qform_code']) # gives 2 - aligned
|
|
976 |
2
|
|
977 |
>>> hdr.set_qform(affine, code='talairach')
|
|
978 |
>>> int(hdr['qform_code'])
|
|
979 |
3
|
|
980 |
>>> hdr.set_qform(affine, code=None)
|
|
981 |
>>> int(hdr['qform_code'])
|
|
982 |
3
|
|
983 |
>>> hdr.set_qform(affine, code='scanner')
|
|
984 |
>>> int(hdr['qform_code'])
|
|
985 |
1
|
|
986 |
>>> hdr.set_qform(None)
|
|
987 |
>>> int(hdr['qform_code'])
|
|
988 |
0
|
|
989 |
"""
|
|
990 | 33 |
hdr = self._structarr |
991 | 33 |
old_code = hdr['qform_code'] |
992 |
if code is None: |
|
993 |
if affine is None: |
|
994 | 33 |
code = 0 |
995 |
elif old_code == 0: |
|
996 | 33 |
code = 2 # aligned |
997 |
else: |
|
998 | 33 |
code = old_code |
999 |
else: # code set |
|
1000 | 33 |
code = self._field_recoders['qform_code'][code] |
1001 | 33 |
hdr['qform_code'] = code |
1002 |
if affine is None: |
|
1003 | 33 |
return
|
1004 | 33 |
affine = np.asarray(affine) |
1005 |
if not affine.shape == (4, 4): |
|
1006 |
raise TypeError('Need 4x4 affine as input') |
|
1007 | 33 |
trans = affine[:3, 3] |
1008 | 33 |
RZS = affine[:3, :3] |
1009 | 33 |
zooms = np.sqrt(np.sum(RZS * RZS, axis=0)) |
1010 | 33 |
R = RZS / zooms |
1011 |
# Set qfac to make R determinant positive
|
|
1012 |
if npl.det(R) > 0: |
|
1013 | 33 |
qfac = 1 |
1014 |
else: |
|
1015 | 33 |
qfac = -1 |
1016 | 33 |
R[:, -1] *= -1 |
1017 |
# Make R orthogonal (to allow quaternion representation)
|
|
1018 |
# The orthogonal representation enforces orthogonal axes
|
|
1019 |
# (a subtle requirement of the NIFTI format qform transform)
|
|
1020 |
# Transform below is polar decomposition, returning the closest
|
|
1021 |
# orthogonal matrix PR, to input R
|
|
1022 | 33 |
P, S, Qs = npl.svd(R) |
1023 | 33 |
PR = np.dot(P, Qs) |
1024 |
if not strip_shears and not np.allclose(PR, R): |
|
1025 | 33 |
raise HeaderDataError("Shears in affine and `strip_shears` is " |
1026 |
"False") |
|
1027 |
# Convert to quaternion
|
|
1028 | 33 |
quat = mat2quat(PR) |
1029 |
# Set into header
|
|
1030 | 33 |
hdr['qoffset_x'], hdr['qoffset_y'], hdr['qoffset_z'] = trans |
1031 | 33 |
hdr['pixdim'][0] = qfac |
1032 | 33 |
hdr['pixdim'][1:4] = zooms |
1033 | 33 |
hdr['quatern_b'], hdr['quatern_c'], hdr['quatern_d'] = quat[1:] |
1034 |
|
|
1035 | 33 |
def get_sform(self, coded=False): |
1036 |
""" Return 4x4 affine matrix from sform parameters in header
|
|
1037 |
|
|
1038 |
Parameters
|
|
1039 |
----------
|
|
1040 |
coded : bool, optional
|
|
1041 |
If True, return {affine or None}, and sform code. If False, just
|
|
1042 |
return affine. {affine or None} means, return None if sform code
|
|
1043 |
== 0, and affine otherwise.
|
|
1044 |
|
|
1045 |
Returns
|
|
1046 |
-------
|
|
1047 |
affine : None or (4,4) ndarray
|
|
1048 |
If `coded` is False, always return affine from sform fields. If
|
|
1049 |
`coded` is True, return None if sform code is 0, else return the
|
|
1050 |
affine.
|
|
1051 |
code : int
|
|
1052 |
Sform code. Only returned if `coded` is True.
|
|
1053 |
"""
|
|
1054 | 33 |
hdr = self._structarr |
1055 | 33 |
code = int(hdr['sform_code']) |
1056 |
if code == 0 and coded: |
|
1057 | 33 |
return None, 0 |
1058 | 33 |
out = np.eye(4) |
1059 | 33 |
out[0, :] = hdr['srow_x'][:] |
1060 | 33 |
out[1, :] = hdr['srow_y'][:] |
1061 | 33 |
out[2, :] = hdr['srow_z'][:] |
1062 |
if coded: |
|
1063 | 33 |
return out, code |
1064 | 33 |
return out |
1065 |
|
|
1066 | 33 |
def set_sform(self, affine, code=None): |
1067 |
""" Set sform transform from 4x4 affine
|
|
1068 |
|
|
1069 |
Parameters
|
|
1070 |
----------
|
|
1071 |
affine : None or 4x4 array
|
|
1072 |
affine transform to write into sform. If None, only set `code`
|
|
1073 |
code : None, string or integer, optional
|
|
1074 |
String or integer giving meaning of transform in *affine*.
|
|
1075 |
The default is None. If code is None, then:
|
|
1076 |
|
|
1077 |
* If affine is None, `code`-> 0
|
|
1078 |
* If affine not None and existing sform code in header == 0,
|
|
1079 |
`code`-> 2 (aligned)
|
|
1080 |
* If affine not None and existing sform code in header != 0,
|
|
1081 |
`code`-> existing sform code in header
|
|
1082 |
|
|
1083 |
Examples
|
|
1084 |
--------
|
|
1085 |
>>> hdr = Nifti1Header()
|
|
1086 |
>>> int(hdr['sform_code']) # gives 0 - unknown
|
|
1087 |
0
|
|
1088 |
>>> affine = np.diag([1,2,3,1])
|
|
1089 |
>>> np.all(hdr.get_sform() == affine)
|
|
1090 |
False
|
|
1091 |
>>> hdr.set_sform(affine)
|
|
1092 |
>>> np.all(hdr.get_sform() == affine)
|
|
1093 |
True
|
|
1094 |
>>> int(hdr['sform_code']) # gives 2 - aligned
|
|
1095 |
2
|
|
1096 |
>>> hdr.set_sform(affine, code='talairach')
|
|
1097 |
>>> int(hdr['sform_code'])
|
|
1098 |
3
|
|
1099 |
>>> hdr.set_sform(affine, code=None)
|
|
1100 |
>>> int(hdr['sform_code'])
|
|
1101 |
3
|
|
1102 |
>>> hdr.set_sform(affine, code='scanner')
|
|
1103 |
>>> int(hdr['sform_code'])
|
|
1104 |
1
|
|
1105 |
>>> hdr.set_sform(None)
|
|
1106 |
>>> int(hdr['sform_code'])
|
|
1107 |
0
|
|
1108 |
"""
|
|
1109 | 33 |
hdr = self._structarr |
1110 | 33 |
old_code = hdr['sform_code'] |
1111 |
if code is None: |
|
1112 |
if affine is None: |
|
1113 | 33 |
code = 0 |
1114 |
elif old_code == 0: |
|
1115 | 33 |
code = 2 # aligned |
1116 |
else: |
|
1117 | 33 |
code = old_code |
1118 |
else: # code set |
|
1119 | 33 |
code = self._field_recoders['sform_code'][code] |
1120 | 33 |
hdr['sform_code'] = code |
1121 |
if affine is None: |
|
1122 | 33 |
return
|
1123 | 33 |
affine = np.asarray(affine) |
1124 | 33 |
hdr['srow_x'][:] = affine[0, :] |
1125 | 33 |
hdr['srow_y'][:] = affine[1, :] |
1126 | 33 |
hdr['srow_z'][:] = affine[2, :] |
1127 |
|
|
1128 | 33 |
def get_slope_inter(self): |
1129 |
""" Get data scaling (slope) and DC offset (intercept) from header data
|
|
1130 |
|
|
1131 |
Returns
|
|
1132 |
-------
|
|
1133 |
slope : None or float
|
|
1134 |
scaling (slope). None if there is no valid scaling from these
|
|
1135 |
fields
|
|
1136 |
inter : None or float
|
|
1137 |
offset (intercept). None if there is no valid scaling or if offset
|
|
1138 |
is not finite.
|
|
1139 |
|
|
1140 |
Examples
|
|
1141 |
--------
|
|
1142 |
>>> hdr = Nifti1Header()
|
|
1143 |
>>> hdr.get_slope_inter()
|
|
1144 |
(1.0, 0.0)
|
|
1145 |
>>> hdr['scl_slope'] = 0
|
|
1146 |
>>> hdr.get_slope_inter()
|
|
1147 |
(None, None)
|
|
1148 |
>>> hdr['scl_slope'] = np.nan
|
|
1149 |
>>> hdr.get_slope_inter()
|
|
1150 |
(None, None)
|
|
1151 |
>>> hdr['scl_slope'] = 1
|
|
1152 |
>>> hdr['scl_inter'] = 1
|
|
1153 |
>>> hdr.get_slope_inter()
|
|
1154 |
(1.0, 1.0)
|
|
1155 |
>>> hdr['scl_inter'] = np.inf
|
|
1156 |
>>> hdr.get_slope_inter() #doctest: +IGNORE_EXCEPTION_DETAIL
|
|
1157 |
Traceback (most recent call last):
|
|
1158 |
...
|
|
1159 |
HeaderDataError: Valid slope but invalid intercept inf
|
|
1160 |
"""
|
|
1161 |
# Note that we are returning float (float64) scalefactors and
|
|
1162 |
# intercepts, although they are stored as in nifti1 as float32.
|
|
1163 | 33 |
slope = float(self['scl_slope']) |
1164 | 33 |
inter = float(self['scl_inter']) |
1165 |
if slope == 0 or not np.isfinite(slope): |
|
1166 | 33 |
return None, None |
1167 |
if not np.isfinite(inter): |
|
1168 | 33 |
raise HeaderDataError(f'Valid slope but invalid intercept {inter}') |
1169 | 33 |
return slope, inter |
1170 |
|
|
1171 | 33 |
def set_slope_inter(self, slope, inter=None): |
1172 |
""" Set slope and / or intercept into header
|
|
1173 |
|
|
1174 |
Set slope and intercept for image data, such that, if the image
|
|
1175 |
data is ``arr``, then the scaled image data will be ``(arr *
|
|
1176 |
slope) + inter``
|
|
1177 |
|
|
1178 |
(`slope`, `inter`) of (NaN, NaN) is a signal to a containing image to
|
|
1179 |
set `slope`, `inter` automatically on write.
|
|
1180 |
|
|
1181 |
Parameters
|
|
1182 |
----------
|
|
1183 |
slope : None or float
|
|
1184 |
If None, implies `slope` of NaN. If `slope` is None or NaN then
|
|
1185 |
`inter` should be None or NaN. Values of 0, Inf or -Inf raise
|
|
1186 |
HeaderDataError
|
|
1187 |
inter : None or float, optional
|
|
1188 |
Intercept. If None, implies `inter` of NaN. If `slope` is None or
|
|
1189 |
NaN then `inter` should be None or NaN. Values of Inf or -Inf raise
|
|
1190 |
HeaderDataError
|
|
1191 |
"""
|
|
1192 |
if slope is None: |
|
1193 | 33 |
slope = np.nan |
1194 |
if inter is None: |
|
1195 | 33 |
inter = np.nan |
1196 |
if slope in (0, np.inf, -np.inf): |
|
1197 | 33 |
raise HeaderDataError('Slope cannot be 0 or infinite') |
1198 |
if inter in (np.inf, -np.inf): |
|
1199 | 33 |
raise HeaderDataError('Intercept cannot be infinite') |
1200 |
if np.isnan(slope) ^ np.isnan(inter): |
|
1201 | 33 |
raise HeaderDataError('None or both of slope, inter should be nan') |
1202 | 33 |
self._structarr['scl_slope'] = slope |
1203 | 33 |
self._structarr['scl_inter'] = inter |
1204 |
|
|
1205 | 33 |
def get_dim_info(self): |
1206 |
""" Gets NIfTI MRI slice etc dimension information
|
|
1207 |
|
|
1208 |
Returns
|
|
1209 |
-------
|
|
1210 |
freq : {None,0,1,2}
|
|
1211 |
Which data array axis is frequency encode direction
|
|
1212 |
phase : {None,0,1,2}
|
|
1213 |
Which data array axis is phase encode direction
|
|
1214 |
slice : {None,0,1,2}
|
|
1215 |
Which data array axis is slice encode direction
|
|
1216 |
|
|
1217 |
where ``data array`` is the array returned by ``get_data``
|
|
1218 |
|
|
1219 |
Because NIfTI1 files are natively Fortran indexed:
|
|
1220 |
0 is fastest changing in file
|
|
1221 |
1 is medium changing in file
|
|
1222 |
2 is slowest changing in file
|
|
1223 |
|
|
1224 |
``None`` means the axis appears not to be specified.
|
|
1225 |
|
|
1226 |
Examples
|
|
1227 |
--------
|
|
1228 |
See set_dim_info function
|
|
1229 |
|
|
1230 |
"""
|
|
1231 | 33 |
hdr = self._structarr |
1232 | 33 |
info = int(hdr['dim_info']) |
1233 | 33 |
freq = info & 3 |
1234 | 33 |
phase = (info >> 2) & 3 |
1235 | 33 |
slice = (info >> 4) & 3 |
1236 | 33 |
return (freq - 1 if freq else None, |
1237 |
phase - 1 if phase else None, |
|
1238 |
slice - 1 if slice else None) |
|
1239 |
|
|
1240 | 33 |
def set_dim_info(self, freq=None, phase=None, slice=None): |
1241 |
""" Sets nifti MRI slice etc dimension information
|
|
1242 |
|
|
1243 |
Parameters
|
|
1244 |
----------
|
|
1245 |
freq : {None, 0, 1, 2}
|
|
1246 |
axis of data array referring to frequency encoding
|
|
1247 |
phase : {None, 0, 1, 2}
|
|
1248 |
axis of data array referring to phase encoding
|
|
1249 |
slice : {None, 0, 1, 2}
|
|
1250 |
axis of data array referring to slice encoding
|
|
1251 |
|
|
1252 |
``None`` means the axis is not specified.
|
|
1253 |
|
|
1254 |
Examples
|
|
1255 |
--------
|
|
1256 |
>>> hdr = Nifti1Header()
|
|
1257 |
>>> hdr.set_dim_info(1, 2, 0)
|
|
1258 |
>>> hdr.get_dim_info()
|
|
1259 |
(1, 2, 0)
|
|
1260 |
>>> hdr.set_dim_info(freq=1, phase=2, slice=0)
|
|
1261 |
>>> hdr.get_dim_info()
|
|
1262 |
(1, 2, 0)
|
|
1263 |
>>> hdr.set_dim_info()
|
|
1264 |
>>> hdr.get_dim_info()
|
|
1265 |
(None, None, None)
|
|
1266 |
>>> hdr.set_dim_info(freq=1, phase=None, slice=0)
|
|
1267 |
>>> hdr.get_dim_info()
|
|
1268 |
(1, None, 0)
|
|
1269 |
|
|
1270 |
Notes
|
|
1271 |
-----
|
|
1272 |
This is stored in one byte in the header
|
|
1273 |
"""
|
|
1274 |
for inp in (freq, phase, slice): |
|
1275 |
# Don't use == on None to avoid a FutureWarning in python3
|
|
1276 |
if inp is not None and inp not in (0, 1, 2): |
|
1277 |
raise HeaderDataError('Inputs must be in [None, 0, 1, 2]') |
|
1278 | 33 |
info = 0 |
1279 |
if freq is not None: |
|
1280 | 33 |
info = info | ((freq + 1) & 3) |
1281 |
if phase is not None: |
|
1282 | 33 |
info = info | (((phase + 1) & 3) << 2) |
1283 |
if slice is not None: |
|
1284 | 33 |
info = info | (((slice + 1) & 3) << 4) |
1285 | 33 |
self._structarr['dim_info'] = info |
1286 |
|
|
1287 | 33 |
def get_intent(self, code_repr='label'): |
1288 |
""" Get intent code, parameters and name
|
|
1289 |
|
|
1290 |
Parameters
|
|
1291 |
----------
|
|
1292 |
code_repr : string
|
|
1293 |
string giving output form of intent code representation.
|
|
1294 |
Default is 'label'; use 'code' for integer representation.
|
|
1295 |
|
|
1296 |
Returns
|
|
1297 |
-------
|
|
1298 |
code : string or integer
|
|
1299 |
intent code, or string describing code
|
|
1300 |
parameters : tuple
|
|
1301 |
parameters for the intent
|
|
1302 |
name : string
|
|
1303 |
intent name
|
|
1304 |
|
|
1305 |
Examples
|
|
1306 |
--------
|
|
1307 |
>>> hdr = Nifti1Header()
|
|
1308 |
>>> hdr.set_intent('t test', (10,), name='some score')
|
|
1309 |
>>> hdr.get_intent()
|
|
1310 |
('t test', (10.0,), 'some score')
|
|
1311 |
>>> hdr.get_intent('code')
|
|
1312 |
(3, (10.0,), 'some score')
|
|
1313 |
"""
|
|
1314 | 33 |
hdr = self._structarr |
1315 | 33 |
recoder = self._field_recoders['intent_code'] |
1316 | 33 |
code = int(hdr['intent_code']) |
1317 | 33 |
known_intent = code in recoder |
1318 |
if code_repr == 'code': |
|
1319 | 33 |
label = code |
1320 |
elif code_repr == 'label': |
|
1321 |
if known_intent: |
|
1322 | 33 |
label = recoder.label[code] |
1323 |
else: |
|
1324 | 33 |
label = 'unknown code ' + str(code) |
1325 |
else: |
|
1326 |
raise TypeError('repr can be "label" or "code"') |
|
1327 | 33 |
n_params = len(recoder.parameters[code]) if known_intent else 0 |
1328 |
params = (float(hdr['intent_p%d' % (i + 1)]) for i in range(n_params)) |
|
1329 | 33 |
name = asstr(hdr['intent_name'].item()) |
1330 | 33 |
return label, tuple(params), name |
1331 |
|
|
1332 | 33 |
def set_intent(self, code, params=(), name='', allow_unknown=False): |
1333 |
""" Set the intent code, parameters and name
|
|
1334 |
|
|
1335 |
If parameters are not specified, assumed to be all zero. Each
|
|
1336 |
intent code has a set number of parameters associated. If you
|
|
1337 |
specify any parameters, then it will need to be the correct number
|
|
1338 |
(e.g the "f test" intent requires 2). However, parameters can
|
|
1339 |
also be set in the file data, so we also allow not setting any
|
|
1340 |
parameters (empty parameter tuple).
|
|
1341 |
|
|
1342 |
Parameters
|
|
1343 |
----------
|
|
1344 |
code : integer or string
|
|
1345 |
code specifying nifti intent
|
|
1346 |
params : list, tuple of scalars
|
|
1347 |
parameters relating to intent (see intent_codes)
|
|
1348 |
defaults to (). Unspecified parameters are set to 0.0
|
|
1349 |
name : string
|
|
1350 |
intent name (description). Defaults to ''
|
|
1351 |
allow_unknown : {False, True}, optional
|
|
1352 |
Allow unknown integer intent codes. If False (the default),
|
|
1353 |
a KeyError is raised on attempts to set the intent
|
|
1354 |
to an unknown code.
|
|
1355 |
|
|
1356 |
Returns
|
|
1357 |
-------
|
|
1358 |
None
|
|
1359 |
|
|
1360 |
Examples
|
|
1361 |
--------
|
|
1362 |
>>> hdr = Nifti1Header()
|
|
1363 |
>>> hdr.set_intent(0) # no intent
|
|
1364 |
>>> hdr.set_intent('z score')
|
|
1365 |
>>> hdr.get_intent()
|
|
1366 |
('z score', (), '')
|
|
1367 |
>>> hdr.get_intent('code')
|
|
1368 |
(5, (), '')
|
|
1369 |
>>> hdr.set_intent('t test', (10,), name='some score')
|
|
1370 |
>>> hdr.get_intent()
|
|
1371 |
('t test', (10.0,), 'some score')
|
|
1372 |
>>> hdr.set_intent('f test', (2, 10), name='another score')
|
|
1373 |
>>> hdr.get_intent()
|
|
1374 |
('f test', (2.0, 10.0), 'another score')
|
|
1375 |
>>> hdr.set_intent('f test')
|
|
1376 |
>>> hdr.get_intent()
|
|
1377 |
('f test', (0.0, 0.0), '')
|
|
1378 |
>>> hdr.set_intent(9999, allow_unknown=True) # unknown code
|
|
1379 |
>>> hdr.get_intent()
|
|
1380 |
('unknown code 9999', (), '')
|
|
1381 |
"""
|
|
1382 | 33 |
hdr = self._structarr |
1383 | 33 |
known_intent = code in intent_codes |
1384 |
if not known_intent: |
|
1385 |
# We can set intent via an unknown integer code, but can't via an
|
|
1386 |
# unknown string label
|
|
1387 |
if not allow_unknown or isinstance(code, str): |
|
1388 | 33 |
raise KeyError('Unknown intent code: ' + str(code)) |
1389 |
if known_intent: |
|
1390 | 33 |
icode = intent_codes.code[code] |
1391 | 33 |
p_descr = intent_codes.parameters[code] |
1392 |
else: |
|
1393 | 33 |
icode = code |
1394 | 33 |
p_descr = ('p1', 'p2', 'p3') |
1395 |
if len(params) and len(params) != len(p_descr): |
|
1396 | 33 |
raise HeaderDataError(f'Need params of form {p_descr}, or empty') |
1397 | 33 |
hdr['intent_code'] = icode |
1398 | 33 |
hdr['intent_name'] = name |
1399 | 33 |
all_params = [0] * 3 |
1400 | 33 |
all_params[:len(params)] = params[:] |
1401 |
for i, param in enumerate(all_params): |
|
1402 | 33 |
hdr['intent_p%d' % (i + 1)] = param |
1403 |
|
|
1404 | 33 |
def get_slice_duration(self): |
1405 |
""" Get slice duration
|
|
1406 |
|
|
1407 |
Returns
|
|
1408 |
-------
|
|
1409 |
slice_duration : float
|
|
1410 |
time to acquire one slice
|
|
1411 |
|
|
1412 |
Examples
|
|
1413 |
--------
|
|
1414 |
>>> hdr = Nifti1Header()
|
|
1415 |
>>> hdr.set_dim_info(slice=2)
|
|
1416 |
>>> hdr.set_slice_duration(0.3)
|
|
1417 |
>>> print("%0.1f" % hdr.get_slice_duration())
|
|
1418 |
0.3
|
|
1419 |
|
|
1420 |
Notes
|
|
1421 |
-----
|
|
1422 |
The NIfTI1 spec appears to require the slice dimension to be
|
|
1423 |
defined for slice_duration to have meaning.
|
|
1424 |
"""
|
|
1425 | 33 |
_, _, slice_dim = self.get_dim_info() |
1426 |
if slice_dim is None: |
|
1427 |
raise HeaderDataError('Slice dimension must be set ' |
|
1428 |
'for duration to be valid') |
|
1429 | 33 |
return float(self._structarr['slice_duration']) |
1430 |
|
|
1431 | 33 |
def set_slice_duration(self, duration): |
1432 |
""" Set slice duration
|
|
1433 |
|
|
1434 |
Parameters
|
|
1435 |
----------
|
|
1436 |
duration : scalar
|
|
1437 |
time to acquire one slice
|
|
1438 |
|
|
1439 |
Examples
|
|
1440 |
--------
|
|
1441 |
See ``get_slice_duration``
|
|
1442 |
"""
|
|
1443 | 33 |
_, _, slice_dim = self.get_dim_info() |
1444 |
if slice_dim is None: |
|
1445 |
raise HeaderDataError('Slice dimension must be set ' |
|
1446 |
'for duration to be valid') |
|
1447 | 33 |
self._structarr['slice_duration'] = duration |
1448 |
|
|
1449 | 33 |
def get_n_slices(self): |
1450 |
""" Return the number of slices
|
|
1451 |
"""
|
|
1452 | 33 |
_, _, slice_dim = self.get_dim_info() |
1453 |
if slice_dim is None: |
|
1454 | 33 |
raise HeaderDataError('Slice dimension not set in header ' |
1455 |
'dim_info') |
|
1456 | 33 |
shape = self.get_data_shape() |
1457 | 33 |
try: |
1458 | 33 |
slice_len = shape[slice_dim] |
1459 | 33 |
except IndexError: |
1460 | 33 |
raise HeaderDataError(f'Slice dimension index ({slice_dim}) ' |
1461 |
f'outside shape tuple ({shape})') |
|
1462 | 33 |
return slice_len |
1463 |
|
|
1464 | 33 |
def get_slice_times(self): |
1465 |
""" Get slice times from slice timing information
|
|
1466 |
|
|
1467 |
Returns
|
|
1468 |
-------
|
|
1469 |
slice_times : tuple
|
|
1470 |
Times of acquisition of slices, where 0 is the beginning of
|
|
1471 |
the acquisition, ordered by position in file. nifti allows
|
|
1472 |
slices at the top and bottom of the volume to be excluded from
|
|
1473 |
the standard slice timing specification, and calls these
|
|
1474 |
"padding slices". We give padding slices ``None`` as a time
|
|
1475 |
of acquisition
|
|
1476 |
|
|
1477 |
Examples
|
|
1478 |
--------
|
|
1479 |
>>> hdr = Nifti1Header()
|
|
1480 |
>>> hdr.set_dim_info(slice=2)
|
|
1481 |
>>> hdr.set_data_shape((1, 1, 7))
|
|
1482 |
>>> hdr.set_slice_duration(0.1)
|
|
1483 |
>>> hdr['slice_code'] = slice_order_codes['sequential increasing']
|
|
1484 |
>>> slice_times = hdr.get_slice_times()
|
|
1485 |
>>> np.allclose(slice_times, [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
|
|
1486 |
True
|
|
1487 |
"""
|
|
1488 | 33 |
hdr = self._structarr |
1489 | 33 |
slice_len = self.get_n_slices() |
1490 | 33 |
duration = self.get_slice_duration() |
1491 | 33 |
slabel = self.get_value_label('slice_code') |
1492 |
if slabel == 'unknown': |
|
1493 | 33 |
raise HeaderDataError('Cannot get slice times when ' |
1494 |
'Slice code is "unknown"') |
|
1495 | 33 |
slice_start, slice_end = (int(hdr['slice_start']), |
1496 |
int(hdr['slice_end'])) |
|
1497 |
if slice_start < 0: |
|
1498 |
raise HeaderDataError('slice_start should be >= 0') |
|
1499 |
if slice_end == 0: |
|
1500 | 33 |
slice_end = slice_len - 1 |
1501 | 33 |
n_timed = slice_end - slice_start + 1 |
1502 |
if n_timed < 1: |
|
1503 |
raise HeaderDataError('slice_end should be > slice_start') |
|
1504 | 33 |
st_order = self._slice_time_order(slabel, n_timed) |
1505 | 33 |
times = st_order * duration |
1506 | 33 |
return ((None,) * slice_start + |
1507 |
tuple(times) + |
|
1508 |
(None,) * (slice_len - slice_end - 1)) |
|
1509 |
|
|
1510 | 33 |
def set_slice_times(self, slice_times): |
1511 |
""" Set slice times into *hdr*
|
|
1512 |
|
|
1513 |
Parameters
|
|
1514 |
----------
|
|
1515 |
slice_times : tuple
|
|
1516 |
tuple of slice times, one value per slice
|
|
1517 |
tuple can include None to indicate no slice time for that slice
|
|
1518 |
|
|
1519 |
Examples
|
|
1520 |
--------
|
|
1521 |
>>> hdr = Nifti1Header()
|
|
1522 |
>>> hdr.set_dim_info(slice=2)
|
|
1523 |
>>> hdr.set_data_shape([1, 1, 7])
|
|
1524 |
>>> hdr.set_slice_duration(0.1)
|
|
1525 |
>>> times = [None, 0.2, 0.4, 0.1, 0.3, 0.0, None]
|
|
1526 |
>>> hdr.set_slice_times(times)
|
|
1527 |
>>> hdr.get_value_label('slice_code')
|
|
1528 |
'alternating decreasing'
|
|
1529 |
>>> int(hdr['slice_start'])
|
|
1530 |
1
|
|
1531 |
>>> int(hdr['slice_end'])
|
|
1532 |
5
|
|
1533 |
"""
|
|
1534 |
# Check if number of slices matches header
|
|
1535 | 33 |
hdr = self._structarr |
1536 | 33 |
slice_len = self.get_n_slices() |
1537 |
if slice_len != len(slice_times): |
|
1538 | 33 |
raise HeaderDataError('Number of slice times does not ' |
1539 |
'match number of slices') |
|
1540 |
# Extract Nones at beginning and end. Check for others
|
|
1541 |
for ind, time in enumerate(slice_times): |
|
1542 |
if time is not None: |
|
1543 | 33 |
slice_start = ind |
1544 | 33 |
break
|
1545 |
else: |
|
1546 | 33 |
raise HeaderDataError('Not all slice times can be None') |
1547 |
for ind, time in enumerate(slice_times[::-1]): |
|
1548 |
if time is not None: |
|
1549 | 33 |
slice_end = slice_len - ind - 1 |
1550 | 33 |
break
|
1551 | 33 |
timed = slice_times[slice_start:slice_end + 1] |
1552 |
for time in timed: |
|
1553 |
if time is None: |
|
1554 | 33 |
raise HeaderDataError('Cannot have None in middle ' |
1555 |
'of slice time vector') |
|
1556 |
# Find slice duration, check times are compatible with single
|
|
1557 |
# duration
|
|
1558 | 33 |
tdiffs = np.diff(np.sort(timed)) |
1559 |
if not np.allclose(np.diff(tdiffs), 0): |
|
1560 | 33 |
raise HeaderDataError('Slice times not compatible with ' |
1561 |
'single slice duration') |
|
1562 | 33 |
duration = np.mean(tdiffs) |
1563 |
# To slice time order
|
|
1564 | 33 |
st_order = np.round(np.array(timed) / duration) |
1565 |
# Check if slice times fit known schemes
|
|
1566 | 33 |
n_timed = len(timed) |
1567 | 33 |
so_recoder = self._field_recoders['slice_code'] |
1568 | 33 |
labels = so_recoder.value_set('label') |
1569 | 33 |
labels.remove('unknown') |
1570 |
|
|
1571 | 33 |
matching_labels = [] |
1572 |
for label in labels: |
|
1573 |
if np.all(st_order == self._slice_time_order( |
|
1574 |
label, |
|
1575 |
n_timed)): |
|
1576 | 33 |
matching_labels.append(label) |
1577 |
|
|
1578 |
if not matching_labels: |
|
1579 | 33 |
raise HeaderDataError(f'slice ordering of {st_order} fits with no known scheme') |
1580 |
if len(matching_labels) > 1: |
|
1581 | 33 |
warnings.warn( |
1582 |
f"Multiple slice orders satisfy: {', '.join(matching_labels)}. " |
|
1583 |
"Choosing the first one") |
|
1584 | 33 |
label = matching_labels[0] |
1585 |
# Set values into header
|
|
1586 | 33 |
hdr['slice_start'] = slice_start |
1587 | 33 |
hdr['slice_end'] = slice_end |
1588 | 33 |
hdr['slice_duration'] = duration |
1589 | 33 |
hdr['slice_code'] = slice_order_codes.code[label] |
1590 |
|
|
1591 | 33 |
def _slice_time_order(self, slabel, n_slices): |
1592 |
""" Supporting function to give time order of slices from label """
|
|
1593 |
if slabel == 'sequential increasing': |
|
1594 | 33 |
sp_ind_time_order = list(range(n_slices)) |
1595 |
elif slabel == 'sequential decreasing': |
|
1596 | 33 |
sp_ind_time_order = list(range(n_slices)[::-1]) |
1597 |
elif slabel == 'alternating increasing': |
|
1598 | 33 |
sp_ind_time_order = (list(range(0, n_slices, 2)) + |
1599 |
list(range(1, n_slices, 2))) |
|
1600 |
elif slabel == 'alternating decreasing': |
|
1601 | 33 |
sp_ind_time_order = (list(range(n_slices - 1, -1, -2)) + |
1602 |
list(range(n_slices - 2, -1, -2))) |
|
1603 |
elif slabel == 'alternating increasing 2': |
|
1604 | 33 |
sp_ind_time_order = (list(range(1, n_slices, 2)) + |
1605 |
list(range(0, n_slices, 2))) |
|
1606 |
elif slabel == 'alternating decreasing 2': |
|
1607 | 33 |
sp_ind_time_order = (list(range(n_slices - 2, -1, -2)) + |
1608 |
list(range(n_slices - 1, -1, -2))) |
|
1609 |
else: |
|
1610 |
raise HeaderDataError(f'We do not handle slice ordering "{slabel}"') |
|
1611 | 33 |
return np.argsort(sp_ind_time_order) |
1612 |
|
|
1613 | 33 |
def get_xyzt_units(self): |
1614 | 33 |
xyz_code = self.structarr['xyzt_units'] % 8 |
1615 | 33 |
t_code = self.structarr['xyzt_units'] - xyz_code |
1616 | 33 |
return (unit_codes.label[xyz_code], |
1617 |
unit_codes.label[t_code]) |
|
1618 |
|
|
1619 | 33 |
def set_xyzt_units(self, xyz=None, t=None): |
1620 |
if xyz is None: |
|
1621 | 33 |
xyz = 0 |
1622 |
if t is None: |
|
1623 | 33 |
t = 0 |
1624 | 33 |
xyz_code = self.structarr['xyzt_units'] % 8 |
1625 | 33 |
t_code = self.structarr['xyzt_units'] - xyz_code |
1626 | 33 |
xyz_code = unit_codes[xyz] |
1627 | 33 |
t_code = unit_codes[t] |
1628 | 33 |
self.structarr['xyzt_units'] = xyz_code + t_code |
1629 |
|
|
1630 | 33 |
def _clean_after_mapping(self): |
1631 |
""" Set format-specific stuff after converting header from mapping
|
|
1632 |
|
|
1633 |
Clean up header after it has been initialized from an
|
|
1634 |
``as_analyze_map`` method of another header type
|
|
1635 |
|
|
1636 |
See :meth:`nibabel.analyze.AnalyzeHeader._clean_after_mapping` for a
|
|
1637 |
more detailed description.
|
|
1638 |
"""
|
|
1639 | 33 |
self._structarr['magic'] = (self.single_magic if self.is_single |
1640 |
else self.pair_magic) |
|
1641 |
|
|
1642 |
""" Checks only below here """
|
|
1643 |
|
|
1644 | 33 |
@classmethod
|
1645 | 5 |
def _get_checks(klass): |
1646 |
# We need to return our own versions of - e.g. chk_datatype, to
|
|
1647 |
# pick up the Nifti datatypes from our class
|
|
1648 | 33 |
return (klass._chk_sizeof_hdr, |
1649 |
klass._chk_datatype, |
|
1650 |
klass._chk_bitpix, |
|
1651 |
klass._chk_pixdims, |
|
1652 |
klass._chk_qfac, |
|
1653 |
klass._chk_magic, |
|
1654 |
klass._chk_offset, |
|
1655 |
klass._chk_qform_code, |
|
1656 |
klass._chk_sform_code) |
|
1657 |
|
|
1658 | 33 |
@staticmethod
|
1659 | 33 |
def _chk_qfac(hdr, fix=False): |
1660 | 33 |
rep = Report(HeaderDataError) |
1661 |
if hdr['pixdim'][0] in (-1, 1): |
|
1662 | 33 |
return hdr, rep |
1663 | 33 |
rep.problem_level = 20 |
1664 | 33 |
rep.problem_msg = 'pixdim[0] (qfac) should be 1 (default) or -1' |
1665 |
if fix: |
|
1666 | 33 |
hdr['pixdim'][0] = 1 |
1667 | 33 |
rep.fix_msg = 'setting qfac to 1' |
1668 | 33 |
return hdr, rep |
1669 |
|
|
1670 | 33 |
@staticmethod
|
1671 | 33 |
def _chk_magic(hdr, fix=False): |
1672 | 33 |
rep = Report(HeaderDataError) |
1673 | 33 |
magic = hdr['magic'].item() |
1674 |
if magic in (hdr.pair_magic, hdr.single_magic): |
|
1675 | 33 |
return hdr, rep |
1676 | 33 |
rep.problem_msg = f'magic string "{asstr(magic)}" is not valid' |
1677 | 33 |
rep.problem_level = 45 |
1678 |
if fix: |
|
1679 | 33 |
rep.fix_msg = 'leaving as is, but future errors are likely' |
1680 | 33 |
return hdr, rep |
1681 |
|
|
1682 | 33 |
@staticmethod
|
1683 | 33 |
def _chk_offset(hdr, fix=False): |
1684 | 33 |
rep = Report(HeaderDataError) |
1685 |
# for ease of later string formatting, use scalar of byte string
|
|
1686 | 33 |
magic = hdr['magic'].item() |
1687 | 33 |
offset = hdr['vox_offset'].item() |
1688 |
if offset == 0: |
|
1689 | 33 |
return hdr, rep |
1690 |
if magic == hdr.single_magic and offset < hdr.single_vox_offset: |
|
1691 | 33 |
rep.problem_level = 40 |
1692 | 33 |
rep.problem_msg = ('vox offset %d too low for ' |
1693 |
'single file nifti1' % offset) |
|
1694 |
if fix: |
|
1695 | 33 |
hdr['vox_offset'] = hdr.single_vox_offset |
1696 | 33 |
rep.fix_msg = f'setting to minimum value of {hdr.single_vox_offset}' |
1697 | 33 |
return hdr, rep |
1698 |
if not offset % 16: |
|
1699 | 33 |
return hdr, rep |
1700 |
# SPM uses memory mapping to read the data, and
|
|
1701 |
# apparently this has to start on 16 byte boundaries
|
|
1702 | 33 |
rep.problem_msg = f'vox offset (={offset:g}) not divisible by 16, not SPM compatible' |
1703 | 33 |
rep.problem_level = 30 |
1704 |
if fix: |
|
1705 | 33 |
rep.fix_msg = 'leaving at current value' |
1706 | 33 |
return hdr, rep |
1707 |
|
|
1708 | 33 |
@classmethod
|
1709 | 33 |
def _chk_qform_code(klass, hdr, fix=False): |
1710 | 33 |
return klass._chk_xform_code('qform_code', hdr, fix) |
1711 |
|
|
1712 | 33 |
@classmethod
|
1713 | 33 |
def _chk_sform_code(klass, hdr, fix=False): |
1714 | 33 |
return klass._chk_xform_code('sform_code', hdr, fix) |
1715 |
|
|
1716 | 33 |
@classmethod
|
1717 | 5 |
def _chk_xform_code(klass, code_type, hdr, fix): |
1718 |
# utility method for sform and qform codes
|
|
1719 | 33 |
rep = Report(HeaderDataError) |
1720 | 33 |
code = int(hdr[code_type]) |
1721 | 33 |
recoder = klass._field_recoders[code_type] |
1722 |
if code in recoder.value_set(): |
|
1723 | 33 |
return hdr, rep |
1724 | 33 |
rep.problem_level = 30 |
1725 | 33 |
rep.problem_msg = '%s %d not valid' % (code_type, code) |
1726 |
if fix: |
|
1727 | 33 |
hdr[code_type] = 0 |
1728 | 33 |
rep.fix_msg = 'setting to 0' |
1729 | 33 |
return hdr, rep |
1730 |
|
|
1731 | 33 |
@classmethod
|
1732 | 5 |
def may_contain_header(klass, binaryblock): |
1733 |
if len(binaryblock) < klass.sizeof_hdr: |
|
1734 |
return False |
|
1735 |
|
|
1736 | 33 |
hdr_struct = np.ndarray(shape=(), dtype=header_dtype, |
1737 |
buffer=binaryblock[:klass.sizeof_hdr]) |
|
1738 | 33 |
return hdr_struct['magic'] in (b'ni1', b'n+1') |
1739 |
|
|
1740 |
|
|
1741 | 33 |
class Nifti1PairHeader(Nifti1Header): |
1742 |
""" Class for NIfTI1 pair header """
|
|
1743 |
# Signal whether this is single (header + data) file
|
|
1744 | 33 |
is_single = False |
1745 |
|
|
1746 |
|
|
1747 | 33 |
class Nifti1Pair(analyze.AnalyzeImage): |
1748 |
""" Class for NIfTI1 format image, header pair
|
|
1749 |
"""
|
|
1750 | 33 |
header_class = Nifti1PairHeader |
1751 | 33 |
_meta_sniff_len = header_class.sizeof_hdr |
1752 | 33 |
rw = True |
1753 |
|
|
1754 | 33 |
def __init__(self, dataobj, affine, header=None, |
1755 |
extra=None, file_map=None): |
|
1756 | 33 |
super(Nifti1Pair, self).__init__(dataobj, |
1757 |
affine, |
|
1758 |
header, |
|
1759 |
extra, |
|
1760 |
file_map) |
|
1761 |
# Force set of s/q form when header is None unless affine is also None
|
|
1762 |
if header is None and affine is not None: |
|
1763 | 33 |
self._affine2header() |
1764 |
# Copy docstring
|
|
1765 | 33 |
__init__.__doc__ = analyze.AnalyzeImage.__init__.__doc__ + """ |
1766 |
Notes
|
|
1767 |
-----
|
|
1768 |
|
|
1769 |
If both a `header` and an `affine` are specified, and the `affine` does
|
|
1770 |
not match the affine that is in the `header`, the `affine` will be used,
|
|
1771 |
but the ``sform_code`` and ``qform_code`` fields in the header will be
|
|
1772 |
re-initialised to their default values. This is performed on the basis
|
|
1773 |
that, if you are changing the affine, you are likely to be changing the
|
|
1774 |
space to which the affine is pointing. The :meth:`set_sform` and
|
|
1775 |
:meth:`set_qform` methods can be used to update the codes after an image
|
|
1776 |
has been created - see those methods, and the :ref:`manual
|
|
1777 |
<default-sform-qform-codes>` for more details. """
|
|
1778 |
|
|
1779 | 33 |
def update_header(self): |
1780 |
""" Harmonize header with image data and affine
|
|
1781 |
|
|
1782 |
See AnalyzeImage.update_header for more examples
|
|
1783 |
|
|
1784 |
Examples
|
|
1785 |
--------
|
|
1786 |
>>> data = np.zeros((2,3,4))
|
|
1787 |
>>> affine = np.diag([1.0,2.0,3.0,1.0])
|
|
1788 |
>>> img = Nifti1Image(data, affine)
|
|
1789 |
>>> hdr = img.header
|
|
1790 |
>>> np.all(hdr.get_qform() == affine)
|
|
1791 |
True
|
|
1792 |
>>> np.all(hdr.get_sform() == affine)
|
|
1793 |
True
|
|
1794 |
"""
|
|
1795 | 33 |
super(Nifti1Pair, self).update_header() |
1796 | 33 |
hdr = self._header |
1797 | 33 |
hdr['magic'] = hdr.pair_magic |
1798 |
|
|
1799 | 33 |
def _affine2header(self): |
1800 |
""" Unconditionally set affine into the header """
|
|
1801 | 33 |
hdr = self._header |
1802 |
# Set affine into sform with default code
|
|
1803 | 33 |
hdr.set_sform(self._affine, code='aligned') |
1804 |
# Make qform 'unknown'
|
|
1805 | 33 |
hdr.set_qform(self._affine, code='unknown') |
1806 |
|
|
1807 | 33 |
def get_qform(self, coded=False): |
1808 |
""" Return 4x4 affine matrix from qform parameters in header
|
|
1809 |
|
|
1810 |
Parameters
|
|
1811 |
----------
|
|
1812 |
coded : bool, optional
|
|
1813 |
If True, return {affine or None}, and qform code. If False, just
|
|
1814 |
return affine. {affine or None} means, return None if qform code
|
|
1815 |
== 0, and affine otherwise.
|
|
1816 |
|
|
1817 |
Returns
|
|
1818 |
-------
|
|
1819 |
affine : None or (4,4) ndarray
|
|
1820 |
If `coded` is False, always return affine reconstructed from qform
|
|
1821 |
quaternion. If `coded` is True, return None if qform code is 0,
|
|
1822 |
else return the affine.
|
|
1823 |
code : int
|
|
1824 |
Qform code. Only returned if `coded` is True.
|
|
1825 |
|
|
1826 |
See also
|
|
1827 |
--------
|
|
1828 |
set_qform
|
|
1829 |
get_sform
|
|
1830 |
"""
|
|
1831 | 33 |
return self._header.get_qform(coded) |
1832 |
|
|
1833 | 33 |
def set_qform(self, affine, code=None, strip_shears=True, **kwargs): |
1834 |
""" Set qform header values from 4x4 affine
|
|
1835 |
|
|
1836 |
Parameters
|
|
1837 |
----------
|
|
1838 |
affine : None or 4x4 array
|
|
1839 |
affine transform to write into sform. If None, only set code.
|
|
1840 |
code : None, string or integer
|
|
1841 |
String or integer giving meaning of transform in *affine*.
|
|
1842 |
The default is None. If code is None, then:
|
|
1843 |
|
|
1844 |
* If affine is None, `code`-> 0
|
|
1845 |
* If affine not None and existing qform code in header == 0,
|
|
1846 |
`code`-> 2 (aligned)
|
|
1847 |
* If affine not None and existing qform code in header != 0,
|
|
1848 |
`code`-> existing qform code in header
|
|
1849 |
|
|
1850 |
strip_shears : bool, optional
|
|
1851 |
Whether to strip shears in `affine`. If True, shears will be
|
|
1852 |
silently stripped. If False, the presence of shears will raise a
|
|
1853 |
``HeaderDataError``
|
|
1854 |
update_affine : bool, optional
|
|
1855 |
Whether to update the image affine from the header best affine
|
|
1856 |
after setting the qform. Must be keyword argument (because of
|
|
1857 |
different position in `set_qform`). Default is True
|
|
1858 |
|
|
1859 |
See also
|
|
1860 |
--------
|
|
1861 |
get_qform
|
|
1862 |
set_sform
|
|
1863 |
|
|
1864 |
Examples
|
|
1865 |
--------
|
|
1866 |
>>> data = np.arange(24).reshape((2,3,4))
|
|
1867 |
>>> aff = np.diag([2, 3, 4, 1])
|
|
1868 |
>>> img = Nifti1Pair(data, aff)
|
|
1869 |
>>> img.get_qform()
|
|
1870 |
array([[2., 0., 0., 0.],
|
|
1871 |
[0., 3., 0., 0.],
|
|
1872 |
[0., 0., 4., 0.],
|
|
1873 |
[0., 0., 0., 1.]])
|
|
1874 |
>>> img.get_qform(coded=True)
|
|
1875 |
(None, 0)
|
|
1876 |
>>> aff2 = np.diag([3, 4, 5, 1])
|
|
1877 |
>>> img.set_qform(aff2, 'talairach')
|
|
1878 |
>>> qaff, code = img.get_qform(coded=True)
|
|
1879 |
>>> np.all(qaff == aff2)
|
|
1880 |
True
|
|
1881 |
>>> int(code)
|
|
1882 |
3
|
|
1883 |
"""
|
|
1884 | 33 |
update_affine = kwargs.pop('update_affine', True) |
1885 |
if kwargs: |
|
1886 |
raise TypeError(f'Unexpected keyword argument(s) {kwargs}') |
|
1887 | 33 |
self._header.set_qform(affine, code, strip_shears) |
1888 |
if update_affine: |
|
1889 |
if self._affine is None: |
|
1890 | 33 |
self._affine = self._header.get_best_affine() |
1891 |
else: |
|
1892 | 33 |
self._affine[:] = self._header.get_best_affine() |
1893 |
|
|
1894 | 33 |
def get_sform(self, coded=False): |
1895 |
""" Return 4x4 affine matrix from sform parameters in header
|
|
1896 |
|
|
1897 |
Parameters
|
|
1898 |
----------
|
|
1899 |
coded : bool, optional
|
|
1900 |
If True, return {affine or None}, and sform code. If False, just
|
|
1901 |
return affine. {affine or None} means, return None if sform code
|
|
1902 |
== 0, and affine otherwise.
|
|
1903 |
|
|
1904 |
Returns
|
|
1905 |
-------
|
|
1906 |
affine : None or (4,4) ndarray
|
|
1907 |
If `coded` is False, always return affine from sform fields. If
|
|
1908 |
`coded` is True, return None if sform code is 0, else return the
|
|
1909 |
affine.
|
|
1910 |
code : int
|
|
1911 |
Sform code. Only returned if `coded` is True.
|
|
1912 |
|
|
1913 |
See also
|
|
1914 |
--------
|
|
1915 |
set_sform
|
|
1916 |
get_qform
|
|
1917 |
"""
|
|
1918 | 33 |
return self._header.get_sform(coded) |
1919 |
|
|
1920 | 33 |
def set_sform(self, affine, code=None, **kwargs): |
1921 |
""" Set sform transform from 4x4 affine
|
|
1922 |
|
|
1923 |
Parameters
|
|
1924 |
----------
|
|
1925 |
affine : None or 4x4 array
|
|
1926 |
affine transform to write into sform. If None, only set `code`
|
|
1927 |
code : None, string or integer
|
|
1928 |
String or integer giving meaning of transform in *affine*.
|
|
1929 |
The default is None. If code is None, then:
|
|
1930 |
|
|
1931 |
* If affine is None, `code`-> 0
|
|
1932 |
* If affine not None and existing sform code in header == 0,
|
|
1933 |
`code`-> 2 (aligned)
|
|
1934 |
* If affine not None and existing sform code in header != 0,
|
|
1935 |
`code`-> existing sform code in header
|
|
1936 |
|
|
1937 |
update_affine : bool, optional
|
|
1938 |
Whether to update the image affine from the header best affine
|
|
1939 |
after setting the qform. Must be keyword argument (because of
|
|
1940 |
different position in `set_qform`). Default is True
|
|
1941 |
|
|
1942 |
See also
|
|
1943 |
--------
|
|
1944 |
get_sform
|
|
1945 |
set_qform
|
|
1946 |
|
|
1947 |
Examples
|
|
1948 |
--------
|
|
1949 |
>>> data = np.arange(24).reshape((2,3,4))
|
|
1950 |
>>> aff = np.diag([2, 3, 4, 1])
|
|
1951 |
>>> img = Nifti1Pair(data, aff)
|
|
1952 |
>>> img.get_sform()
|
|
1953 |
array([[2., 0., 0., 0.],
|
|
1954 |
[0., 3., 0., 0.],
|
|
1955 |
[0., 0., 4., 0.],
|
|
1956 |
[0., 0., 0., 1.]])
|
|
1957 |
>>> saff, code = img.get_sform(coded=True)
|
|
1958 |
>>> saff
|
|
1959 |
array([[2., 0., 0., 0.],
|
|
1960 |
[0., 3., 0., 0.],
|
|
1961 |
[0., 0., 4., 0.],
|
|
1962 |
[0., 0., 0., 1.]])
|
|
1963 |
>>> int(code)
|
|
1964 |
2
|
|
1965 |
>>> aff2 = np.diag([3, 4, 5, 1])
|
|
1966 |
>>> img.set_sform(aff2, 'talairach')
|
|
1967 |
>>> saff, code = img.get_sform(coded=True)
|
|
1968 |
>>> np.all(saff == aff2)
|
|
1969 |
True
|
|
1970 |
>>> int(code)
|
|
1971 |
3
|
|
1972 |
"""
|
|
1973 | 33 |
update_affine = kwargs.pop('update_affine', True) |
1974 |
if kwargs: |
|
1975 |
raise TypeError(f'Unexpected keyword argument(s) {kwargs}') |
|
1976 | 33 |
self._header.set_sform(affine, code) |
1977 |
if update_affine: |
|
1978 |
if self._affine is None: |
|
1979 | 33 |
self._affine = self._header.get_best_affine() |
1980 |
else: |
|
1981 | 33 |
self._affine[:] = self._header.get_best_affine() |
1982 |
|
|
1983 | 33 |
def as_reoriented(self, ornt): |
1984 |
"""Apply an orientation change and return a new image
|
|
1985 |
|
|
1986 |
If ornt is identity transform, return the original image, unchanged
|
|
1987 |
|
|
1988 |
Parameters
|
|
1989 |
----------
|
|
1990 |
ornt : (n,2) orientation array
|
|
1991 |
orientation transform. ``ornt[N,1]` is flip of axis N of the
|
|
1992 |
array implied by `shape`, where 1 means no flip and -1 means
|
|
1993 |
flip. For example, if ``N==0`` and ``ornt[0,1] == -1``, and
|
|
1994 |
there's an array ``arr`` of shape `shape`, the flip would
|
|
1995 |
correspond to the effect of ``np.flipud(arr)``. ``ornt[:,0]`` is
|
|
1996 |
the transpose that needs to be done to the implied array, as in
|
|
1997 |
``arr.transpose(ornt[:,0])``
|
|
1998 |
"""
|
|
1999 | 33 |
img = super(Nifti1Pair, self).as_reoriented(ornt) |
2000 |
|
|
2001 |
if img is self: |
|
2002 | 33 |
return img |
2003 |
|
|
2004 |
# Also apply the transform to the dim_info fields
|
|
2005 |
new_dim = [ |
|
2006 |
None if orig_dim is None else int(ornt[orig_dim, 0]) |
|
2007 |
for orig_dim in img.header.get_dim_info()] |
|
2008 |
|
|
2009 | 33 |
img.header.set_dim_info(*new_dim) |
2010 |
|
|
2011 | 33 |
return img |
2012 |
|
|
2013 |
|
|
2014 | 33 |
class Nifti1Image(Nifti1Pair, SerializableImage): |
2015 |
""" Class for single file NIfTI1 format image
|
|
2016 |
"""
|
|
2017 | 33 |
header_class = Nifti1Header |
2018 |