index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
56,713 |
nibabel.xmlutils
|
to_xml
|
Generate an XML bytestring with a given encoding.
Parameters
----------
enc : :class:`string`
Encoding to use for the generated bytestring. Default: 'utf-8'
\*\*kwargs : :class:`dict`
Additional keyword arguments to :func:`xml.etree.ElementTree.tostring`.
|
def to_xml(self, enc='utf-8', **kwargs) -> bytes:
r"""Generate an XML bytestring with a given encoding.
Parameters
----------
enc : :class:`string`
Encoding to use for the generated bytestring. Default: 'utf-8'
\*\*kwargs : :class:`dict`
Additional keyword arguments to :func:`xml.etree.ElementTree.tostring`.
"""
ele = self._to_xml_element()
return b'' if ele is None else tostring(ele, enc, **kwargs)
|
(self, enc='utf-8', **kwargs) -> bytes
|
56,714 |
nibabel.filebasedimages
|
write_to
| null |
def write_to(self, fileobj: io.IOBase) -> None:
raise NotImplementedError # pragma: no cover
|
(self, fileobj: io.IOBase) -> NoneType
|
56,715 |
nibabel.cifti2.cifti2
|
Cifti2Image
|
Class for single file CIFTI-2 format image
|
class Cifti2Image(DataobjImage, SerializableImage):
"""Class for single file CIFTI-2 format image"""
header_class = Cifti2Header
header: Cifti2Header
valid_exts = Nifti2Image.valid_exts
files_types = Nifti2Image.files_types
makeable = False
rw = True
def __init__(
self,
dataobj=None,
header=None,
nifti_header=None,
extra=None,
file_map=None,
dtype=None,
):
"""Initialize image
The image is a combination of (dataobj, header), with optional metadata
in `nifti_header` (a NIfTI2 header). There may be more metadata in the
mapping `extra`. Filename / file-like objects can also go in the
`file_map` mapping.
Parameters
----------
dataobj : object
Object containing image data. It should be some object that
returns an array from ``np.asanyarray``. It should have a
``shape`` attribute or property.
header : Cifti2Header instance or sequence of :class:`cifti2_axes.Axis`
Header with data for / from XML part of CIFTI-2 format.
Alternatively a sequence of cifti2_axes.Axis objects can be provided
describing each dimension of the array.
nifti_header : None or mapping or NIfTI2 header instance, optional
Metadata for NIfTI2 component of this format.
extra : None or mapping
Extra metadata not captured by `header` or `nifti_header`.
file_map : mapping, optional
Mapping giving file information for this image format.
"""
if not isinstance(header, Cifti2Header) and header:
header = Cifti2Header.from_axes(header)
super().__init__(dataobj, header=header, extra=extra, file_map=file_map)
self._nifti_header = LimitedNifti2Header.from_header(nifti_header)
# if NIfTI header not specified, get data type from input array
if dtype is not None:
self.set_data_dtype(dtype)
elif nifti_header is None and hasattr(dataobj, 'dtype'):
self.set_data_dtype(dataobj.dtype)
self.update_headers()
if self._dataobj.shape != self.header.matrix.get_data_shape():
warn(
f'Dataobj shape {self._dataobj.shape} does not match shape '
f'expected from CIFTI-2 header {self.header.matrix.get_data_shape()}'
)
@property
def nifti_header(self):
return self._nifti_header
@classmethod
def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None):
"""Load a CIFTI-2 image from a file_map
Parameters
----------
file_map : file_map
Returns
-------
img : Cifti2Image
Returns a Cifti2Image
"""
from .parse_cifti2 import Cifti2Extension, _Cifti2AsNiftiImage
nifti_img = _Cifti2AsNiftiImage.from_file_map(
file_map, mmap=mmap, keep_file_open=keep_file_open
)
# Get cifti2 header
for item in nifti_img.header.extensions:
if isinstance(item, Cifti2Extension):
cifti_header = item.get_content()
break
else:
raise ValueError('NIfTI2 header does not contain a CIFTI-2 extension')
# Construct cifti image.
# Use array proxy object where possible
dataobj = nifti_img.dataobj
return Cifti2Image(
reshape_dataobj(dataobj, dataobj.shape[4:]),
header=cifti_header,
nifti_header=nifti_img.header,
file_map=file_map,
)
@classmethod
def from_image(klass, img):
"""Class method to create new instance of own class from `img`
Parameters
----------
img : instance
In fact, an object with the API of :class:`DataobjImage`.
Returns
-------
cimg : instance
Image, of our own class
"""
if isinstance(img, klass):
return img
raise NotImplementedError
def to_file_map(self, file_map=None, dtype=None):
"""Write image to `file_map` or contained ``self.file_map``
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead.
Returns
-------
None
"""
from .parse_cifti2 import Cifti2Extension
self.update_headers()
header = self._nifti_header
extension = Cifti2Extension(content=self.header.to_xml())
header.extensions = Nifti1Extensions(
ext for ext in header.extensions if not isinstance(ext, Cifti2Extension)
)
header.extensions.append(extension)
if self._dataobj.shape != self.header.matrix.get_data_shape():
raise ValueError(
f'Dataobj shape {self._dataobj.shape} does not match shape '
f'expected from CIFTI-2 header {self.header.matrix.get_data_shape()}'
)
# if intent code is not set, default to unknown CIFTI
if header.get_intent()[0] == 'none':
header.set_intent('NIFTI_INTENT_CONNECTIVITY_UNKNOWN')
data = reshape_dataobj(self.dataobj, (1, 1, 1, 1) + self.dataobj.shape)
# If qform not set, reset pixdim values so Nifti2 does not complain
if header['qform_code'] == 0:
header['pixdim'][:4] = 1
img = Nifti2Image(data, None, header, dtype=dtype)
img.to_file_map(file_map or self.file_map)
def update_headers(self):
"""Harmonize NIfTI headers with image data
Ensures that the NIfTI-2 header records the data shape in the last three
``dim`` fields. Per the spec:
Because the first four dimensions in NIfTI are reserved for space and time, the CIFTI
dimensions are stored in the NIfTI header in dim[5] and up, where dim[5] is the length
of the first CIFTI dimension (number of values in a row), dim[6] is the length of the
second CIFTI dimension, and dim[7] is the length of the third CIFTI dimension, if
applicable. The fields dim[1] through dim[4] will be 1; dim[0] will be 6 or 7,
depending on whether a third matrix dimension exists.
>>> import numpy as np
>>> data = np.zeros((2,3,4))
>>> img = Cifti2Image(data) # doctest: +IGNORE_WARNINGS
>>> img.shape == (2, 3, 4)
True
>>> img.update_headers()
>>> img.nifti_header.get_data_shape() == (1, 1, 1, 1, 2, 3, 4)
True
>>> img.shape == (2, 3, 4)
True
"""
self._nifti_header.set_data_shape((1, 1, 1, 1) + self._dataobj.shape)
def get_data_dtype(self):
return self._nifti_header.get_data_dtype()
def set_data_dtype(self, dtype):
self._nifti_header.set_data_dtype(dtype)
|
(dataobj=None, header=None, nifti_header=None, extra=None, file_map=None, dtype=None)
|
56,716 |
nibabel.filebasedimages
|
__getitem__
|
No slicing or dictionary interface for images
|
def __getitem__(self, key) -> None:
"""No slicing or dictionary interface for images"""
raise TypeError('Cannot slice image objects.')
|
(self, key) -> NoneType
|
56,717 |
nibabel.cifti2.cifti2
|
__init__
|
Initialize image
The image is a combination of (dataobj, header), with optional metadata
in `nifti_header` (a NIfTI2 header). There may be more metadata in the
mapping `extra`. Filename / file-like objects can also go in the
`file_map` mapping.
Parameters
----------
dataobj : object
Object containing image data. It should be some object that
returns an array from ``np.asanyarray``. It should have a
``shape`` attribute or property.
header : Cifti2Header instance or sequence of :class:`cifti2_axes.Axis`
Header with data for / from XML part of CIFTI-2 format.
Alternatively a sequence of cifti2_axes.Axis objects can be provided
describing each dimension of the array.
nifti_header : None or mapping or NIfTI2 header instance, optional
Metadata for NIfTI2 component of this format.
extra : None or mapping
Extra metadata not captured by `header` or `nifti_header`.
file_map : mapping, optional
Mapping giving file information for this image format.
|
def __init__(
self,
dataobj=None,
header=None,
nifti_header=None,
extra=None,
file_map=None,
dtype=None,
):
"""Initialize image
The image is a combination of (dataobj, header), with optional metadata
in `nifti_header` (a NIfTI2 header). There may be more metadata in the
mapping `extra`. Filename / file-like objects can also go in the
`file_map` mapping.
Parameters
----------
dataobj : object
Object containing image data. It should be some object that
returns an array from ``np.asanyarray``. It should have a
``shape`` attribute or property.
header : Cifti2Header instance or sequence of :class:`cifti2_axes.Axis`
Header with data for / from XML part of CIFTI-2 format.
Alternatively a sequence of cifti2_axes.Axis objects can be provided
describing each dimension of the array.
nifti_header : None or mapping or NIfTI2 header instance, optional
Metadata for NIfTI2 component of this format.
extra : None or mapping
Extra metadata not captured by `header` or `nifti_header`.
file_map : mapping, optional
Mapping giving file information for this image format.
"""
if not isinstance(header, Cifti2Header) and header:
header = Cifti2Header.from_axes(header)
super().__init__(dataobj, header=header, extra=extra, file_map=file_map)
self._nifti_header = LimitedNifti2Header.from_header(nifti_header)
# if NIfTI header not specified, get data type from input array
if dtype is not None:
self.set_data_dtype(dtype)
elif nifti_header is None and hasattr(dataobj, 'dtype'):
self.set_data_dtype(dataobj.dtype)
self.update_headers()
if self._dataobj.shape != self.header.matrix.get_data_shape():
warn(
f'Dataobj shape {self._dataobj.shape} does not match shape '
f'expected from CIFTI-2 header {self.header.matrix.get_data_shape()}'
)
|
(self, dataobj=None, header=None, nifti_header=None, extra=None, file_map=None, dtype=None)
|
56,719 |
nibabel.cifti2.cifti2
|
get_data_dtype
| null |
def get_data_dtype(self):
return self._nifti_header.get_data_dtype()
|
(self)
|
56,722 |
nibabel.cifti2.cifti2
|
set_data_dtype
| null |
def set_data_dtype(self, dtype):
self._nifti_header.set_data_dtype(dtype)
|
(self, dtype)
|
56,724 |
nibabel.filebasedimages
|
to_bytes
|
Return a ``bytes`` object with the contents of the file that would
be written if the image were saved.
Parameters
----------
\*\*kwargs : keyword arguments
Keyword arguments that may be passed to ``img.to_file_map()``
Returns
-------
bytes
Serialized image
|
def to_bytes(self, **kwargs) -> bytes:
r"""Return a ``bytes`` object with the contents of the file that would
be written if the image were saved.
Parameters
----------
\*\*kwargs : keyword arguments
Keyword arguments that may be passed to ``img.to_file_map()``
Returns
-------
bytes
Serialized image
"""
bio = io.BytesIO()
self.to_stream(bio, **kwargs)
return bio.getvalue()
|
(self, **kwargs) -> bytes
|
56,725 |
nibabel.cifti2.cifti2
|
to_file_map
|
Write image to `file_map` or contained ``self.file_map``
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead.
Returns
-------
None
|
def to_file_map(self, file_map=None, dtype=None):
"""Write image to `file_map` or contained ``self.file_map``
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead.
Returns
-------
None
"""
from .parse_cifti2 import Cifti2Extension
self.update_headers()
header = self._nifti_header
extension = Cifti2Extension(content=self.header.to_xml())
header.extensions = Nifti1Extensions(
ext for ext in header.extensions if not isinstance(ext, Cifti2Extension)
)
header.extensions.append(extension)
if self._dataobj.shape != self.header.matrix.get_data_shape():
raise ValueError(
f'Dataobj shape {self._dataobj.shape} does not match shape '
f'expected from CIFTI-2 header {self.header.matrix.get_data_shape()}'
)
# if intent code is not set, default to unknown CIFTI
if header.get_intent()[0] == 'none':
header.set_intent('NIFTI_INTENT_CONNECTIVITY_UNKNOWN')
data = reshape_dataobj(self.dataobj, (1, 1, 1, 1) + self.dataobj.shape)
# If qform not set, reset pixdim values so Nifti2 does not complain
if header['qform_code'] == 0:
header['pixdim'][:4] = 1
img = Nifti2Image(data, None, header, dtype=dtype)
img.to_file_map(file_map or self.file_map)
|
(self, file_map=None, dtype=None)
|
56,727 |
nibabel.filebasedimages
|
to_stream
|
Save image to writable IO stream
Parameters
----------
io_obj : IOBase object
Writable stream
\*\*kwargs : keyword arguments
Keyword arguments that may be passed to ``img.to_file_map()``
|
def to_stream(self, io_obj: io.IOBase, **kwargs) -> None:
r"""Save image to writable IO stream
Parameters
----------
io_obj : IOBase object
Writable stream
\*\*kwargs : keyword arguments
Keyword arguments that may be passed to ``img.to_file_map()``
"""
self.to_file_map(self._filemap_from_iobase(io_obj), **kwargs)
|
(self, io_obj: io.IOBase, **kwargs) -> NoneType
|
56,729 |
nibabel.cifti2.cifti2
|
update_headers
|
Harmonize NIfTI headers with image data
Ensures that the NIfTI-2 header records the data shape in the last three
``dim`` fields. Per the spec:
Because the first four dimensions in NIfTI are reserved for space and time, the CIFTI
dimensions are stored in the NIfTI header in dim[5] and up, where dim[5] is the length
of the first CIFTI dimension (number of values in a row), dim[6] is the length of the
second CIFTI dimension, and dim[7] is the length of the third CIFTI dimension, if
applicable. The fields dim[1] through dim[4] will be 1; dim[0] will be 6 or 7,
depending on whether a third matrix dimension exists.
>>> import numpy as np
>>> data = np.zeros((2,3,4))
>>> img = Cifti2Image(data) # doctest: +IGNORE_WARNINGS
>>> img.shape == (2, 3, 4)
True
>>> img.update_headers()
>>> img.nifti_header.get_data_shape() == (1, 1, 1, 1, 2, 3, 4)
True
>>> img.shape == (2, 3, 4)
True
|
def update_headers(self):
"""Harmonize NIfTI headers with image data
Ensures that the NIfTI-2 header records the data shape in the last three
``dim`` fields. Per the spec:
Because the first four dimensions in NIfTI are reserved for space and time, the CIFTI
dimensions are stored in the NIfTI header in dim[5] and up, where dim[5] is the length
of the first CIFTI dimension (number of values in a row), dim[6] is the length of the
second CIFTI dimension, and dim[7] is the length of the third CIFTI dimension, if
applicable. The fields dim[1] through dim[4] will be 1; dim[0] will be 6 or 7,
depending on whether a third matrix dimension exists.
>>> import numpy as np
>>> data = np.zeros((2,3,4))
>>> img = Cifti2Image(data) # doctest: +IGNORE_WARNINGS
>>> img.shape == (2, 3, 4)
True
>>> img.update_headers()
>>> img.nifti_header.get_data_shape() == (1, 1, 1, 1, 2, 3, 4)
True
>>> img.shape == (2, 3, 4)
True
"""
self._nifti_header.set_data_shape((1, 1, 1, 1) + self._dataobj.shape)
|
(self)
|
56,730 |
nibabel.fileholders
|
FileHolder
|
class to contain filename, fileobj and file position
|
class FileHolder:
"""class to contain filename, fileobj and file position"""
def __init__(
self,
filename: str | None = None,
fileobj: io.IOBase | None = None,
pos: int = 0,
):
"""Initialize FileHolder instance
Parameters
----------
filename : str, optional
filename. Default is None
fileobj : file-like object, optional
Should implement at least 'seek' (for the purposes for this
class). Default is None
pos : int, optional
position in filename or fileobject at which to start reading
or writing data; defaults to 0
"""
self.filename = filename
self.fileobj = fileobj
self.pos = pos
def get_prepare_fileobj(self, *args, **kwargs) -> ImageOpener:
"""Return fileobj if present, or return fileobj from filename
Set position to that given in self.pos
Parameters
----------
*args : tuple
positional arguments to file open. Ignored if there is a
defined ``self.fileobj``. These might include the mode, such
as 'rb'
**kwargs : dict
named arguments to file open. Ignored if there is a
defined ``self.fileobj``
Returns
-------
fileobj : file-like object
object has position set (via ``fileobj.seek()``) to
``self.pos``
"""
if self.fileobj is not None:
obj = ImageOpener(self.fileobj) # for context manager
obj.seek(self.pos)
elif self.filename is not None:
obj = ImageOpener(self.filename, *args, **kwargs)
if self.pos != 0:
obj.seek(self.pos)
else:
raise FileHolderError('No filename or fileobj present')
return obj
def same_file_as(self, other: FileHolder) -> bool:
"""Test if `self` refers to same files / fileobj as `other`
Parameters
----------
other : object
object with `filename` and `fileobj` attributes
Returns
-------
tf : bool
True if `other` has the same filename (or both have None) and the
same fileobj (or both have None
"""
return (self.filename == other.filename) and (self.fileobj == other.fileobj)
@property
def file_like(self) -> str | io.IOBase | None:
"""Return ``self.fileobj`` if not None, otherwise ``self.filename``"""
return self.fileobj if self.fileobj is not None else self.filename
|
(filename: 'str | None' = None, fileobj: 'io.IOBase | None' = None, pos: 'int' = 0)
|
56,731 |
nibabel.fileholders
|
__init__
|
Initialize FileHolder instance
Parameters
----------
filename : str, optional
filename. Default is None
fileobj : file-like object, optional
Should implement at least 'seek' (for the purposes for this
class). Default is None
pos : int, optional
position in filename or fileobject at which to start reading
or writing data; defaults to 0
|
def __init__(
self,
filename: str | None = None,
fileobj: io.IOBase | None = None,
pos: int = 0,
):
"""Initialize FileHolder instance
Parameters
----------
filename : str, optional
filename. Default is None
fileobj : file-like object, optional
Should implement at least 'seek' (for the purposes for this
class). Default is None
pos : int, optional
position in filename or fileobject at which to start reading
or writing data; defaults to 0
"""
self.filename = filename
self.fileobj = fileobj
self.pos = pos
|
(self, filename: Optional[str] = None, fileobj: Optional[io.IOBase] = None, pos: int = 0)
|
56,732 |
nibabel.fileholders
|
get_prepare_fileobj
|
Return fileobj if present, or return fileobj from filename
Set position to that given in self.pos
Parameters
----------
*args : tuple
positional arguments to file open. Ignored if there is a
defined ``self.fileobj``. These might include the mode, such
as 'rb'
**kwargs : dict
named arguments to file open. Ignored if there is a
defined ``self.fileobj``
Returns
-------
fileobj : file-like object
object has position set (via ``fileobj.seek()``) to
``self.pos``
|
def get_prepare_fileobj(self, *args, **kwargs) -> ImageOpener:
"""Return fileobj if present, or return fileobj from filename
Set position to that given in self.pos
Parameters
----------
*args : tuple
positional arguments to file open. Ignored if there is a
defined ``self.fileobj``. These might include the mode, such
as 'rb'
**kwargs : dict
named arguments to file open. Ignored if there is a
defined ``self.fileobj``
Returns
-------
fileobj : file-like object
object has position set (via ``fileobj.seek()``) to
``self.pos``
"""
if self.fileobj is not None:
obj = ImageOpener(self.fileobj) # for context manager
obj.seek(self.pos)
elif self.filename is not None:
obj = ImageOpener(self.filename, *args, **kwargs)
if self.pos != 0:
obj.seek(self.pos)
else:
raise FileHolderError('No filename or fileobj present')
return obj
|
(self, *args, **kwargs) -> nibabel.openers.ImageOpener
|
56,733 |
nibabel.fileholders
|
same_file_as
|
Test if `self` refers to same files / fileobj as `other`
Parameters
----------
other : object
object with `filename` and `fileobj` attributes
Returns
-------
tf : bool
True if `other` has the same filename (or both have None) and the
same fileobj (or both have None
|
def same_file_as(self, other: FileHolder) -> bool:
"""Test if `self` refers to same files / fileobj as `other`
Parameters
----------
other : object
object with `filename` and `fileobj` attributes
Returns
-------
tf : bool
True if `other` has the same filename (or both have None) and the
same fileobj (or both have None
"""
return (self.filename == other.filename) and (self.fileobj == other.fileobj)
|
(self, other: nibabel.fileholders.FileHolder) -> bool
|
56,734 |
nibabel.fileholders
|
FileHolderError
| null |
class FileHolderError(Exception):
pass
| null |
56,735 |
nibabel.gifti.gifti
|
GiftiImage
|
GIFTI image object
The Gifti spec suggests using the following suffixes to your
filename when saving each specific type of data:
.gii
Generic GIFTI File
.coord.gii
Coordinates
.func.gii
Functional
.label.gii
Labels
.rgba.gii
RGB or RGBA
.shape.gii
Shape
.surf.gii
Surface
.tensor.gii
Tensors
.time.gii
Time Series
.topo.gii
Topology
The Gifti file is stored in endian convention of the current machine.
|
class GiftiImage(xml.XmlSerializable, SerializableImage):
"""GIFTI image object
The Gifti spec suggests using the following suffixes to your
filename when saving each specific type of data:
.gii
Generic GIFTI File
.coord.gii
Coordinates
.func.gii
Functional
.label.gii
Labels
.rgba.gii
RGB or RGBA
.shape.gii
Shape
.surf.gii
Surface
.tensor.gii
Tensors
.time.gii
Time Series
.topo.gii
Topology
The Gifti file is stored in endian convention of the current machine.
"""
valid_exts = ('.gii',)
files_types = (('image', '.gii'),)
_compressed_suffixes = ('.gz', '.bz2')
# The parser will in due course be a GiftiImageParser, but we can't set
# that now, because it would result in a circular import. We set it after
# the class has been defined, at the end of the class definition.
parser: Type[xml.XmlParser]
def __init__(
self,
header=None,
extra=None,
file_map=None,
meta=None,
labeltable=None,
darrays=None,
version='1.0',
):
super().__init__(header=header, extra=extra, file_map=file_map)
if darrays is None:
darrays = []
if meta is None:
meta = GiftiMetaData()
if labeltable is None:
labeltable = GiftiLabelTable()
self._labeltable = labeltable
self._meta = meta
self.darrays = darrays
self.version = version
@property
def numDA(self):
return len(self.darrays)
@property
def labeltable(self):
return self._labeltable
@labeltable.setter
def labeltable(self, labeltable):
"""Set the labeltable for this GiftiImage
Parameters
----------
labeltable : :class:`GiftiLabelTable` instance
"""
if not isinstance(labeltable, GiftiLabelTable):
raise TypeError('Not a valid GiftiLabelTable instance')
self._labeltable = labeltable
@property
def meta(self):
return self._meta
@meta.setter
def meta(self, meta):
"""Set the metadata for this GiftiImage
Parameters
----------
meta : :class:`GiftiMetaData` instance
"""
if not isinstance(meta, GiftiMetaData):
raise TypeError('Not a valid GiftiMetaData instance')
self._meta = meta
def add_gifti_data_array(self, dataarr):
"""Adds a data array to the GiftiImage
Parameters
----------
dataarr : :class:`GiftiDataArray` instance
"""
if not isinstance(dataarr, GiftiDataArray):
raise TypeError('Not a valid GiftiDataArray instance')
self.darrays.append(dataarr)
def remove_gifti_data_array(self, ith):
"""Removes the ith data array element from the GiftiImage"""
self.darrays.pop(ith)
def remove_gifti_data_array_by_intent(self, intent):
"""Removes all the data arrays with the given intent type"""
intent2remove = intent_codes.code[intent]
for dele in self.darrays:
if dele.intent == intent2remove:
self.darrays.remove(dele)
def get_arrays_from_intent(self, intent):
"""Return list of GiftiDataArray elements matching given intent"""
it = intent_codes.code[intent]
return [x for x in self.darrays if x.intent == it]
def agg_data(self, intent_code=None):
"""
Aggregate GIFTI data arrays into an ndarray or tuple of ndarray
In the general case, the numpy data array is extracted from each ``GiftiDataArray``
object and returned in a ``tuple``, in the order they are found in the GIFTI image.
If all ``GiftiDataArray`` s have ``intent`` of 2001 (``NIFTI_INTENT_TIME_SERIES``),
then the data arrays are concatenated as columns, producing a vertex-by-time array.
If an ``intent_code`` is passed, data arrays are filtered by the selected intents,
before being aggregated.
This may be useful for images containing several intents, or ensuring an expected
data type in an image of uncertain provenance.
If ``intent_code`` is a ``tuple``, then a ``tuple`` will be returned with the result of
``agg_data`` for each element, in order.
This may be useful for ensuring that expected data arrives in a consistent order.
Parameters
----------
intent_code : None, string, integer or tuple of strings or integers, optional
code(s) specifying nifti intent
Returns
-------
tuple of ndarrays or ndarray
If the input is a tuple, the returned tuple will match the order.
Examples
--------
Consider a surface GIFTI file:
>>> import nibabel as nib
>>> from nibabel.testing import get_test_data
>>> surf_img = nib.load(get_test_data('gifti', 'ascii.gii'))
The coordinate data, which is indicated by the ``NIFTI_INTENT_POINTSET``
intent code, may be retrieved using any of the following equivalent
calls:
>>> coords = surf_img.agg_data('NIFTI_INTENT_POINTSET')
>>> coords_2 = surf_img.agg_data('pointset')
>>> coords_3 = surf_img.agg_data(1008) # Numeric code for pointset
>>> print(np.array2string(coords, precision=3))
[[-16.072 -66.188 21.267]
[-16.706 -66.054 21.233]
[-17.614 -65.402 21.071]]
>>> np.array_equal(coords, coords_2)
True
>>> np.array_equal(coords, coords_3)
True
Similarly, the triangle mesh can be retrieved using various intent
specifiers:
>>> triangles = surf_img.agg_data('NIFTI_INTENT_TRIANGLE')
>>> triangles_2 = surf_img.agg_data('triangle')
>>> triangles_3 = surf_img.agg_data(1009) # Numeric code for pointset
>>> print(np.array2string(triangles))
[[0 1 2]]
>>> np.array_equal(triangles, triangles_2)
True
>>> np.array_equal(triangles, triangles_3)
True
All arrays can be retrieved as a ``tuple`` by omitting the intent
code:
>>> coords_4, triangles_4 = surf_img.agg_data()
>>> np.array_equal(coords, coords_4)
True
>>> np.array_equal(triangles, triangles_4)
True
Finally, a tuple of intent codes may be passed in order to select
the arrays in a specific order:
>>> triangles_5, coords_5 = surf_img.agg_data(('triangle', 'pointset'))
>>> np.array_equal(triangles, triangles_5)
True
>>> np.array_equal(coords, coords_5)
True
The following image is a GIFTI file with ten (10) data arrays of the same
size, and with intent code 2001 (``NIFTI_INTENT_TIME_SERIES``):
>>> func_img = nib.load(get_test_data('gifti', 'task.func.gii'))
When aggregating time series data, these arrays are concatenated into
a single, vertex-by-timestep array:
>>> series = func_img.agg_data()
>>> series.shape
(642, 10)
In the case of a GIFTI file with unknown data arrays, it may be preferable
to specify the intent code, so that a time series array is always returned:
>>> series_2 = func_img.agg_data('NIFTI_INTENT_TIME_SERIES')
>>> series_3 = func_img.agg_data('time series')
>>> series_4 = func_img.agg_data(2001)
>>> np.array_equal(series, series_2)
True
>>> np.array_equal(series, series_3)
True
>>> np.array_equal(series, series_4)
True
Requesting a data array from a GIFTI file with no matching intent codes
will result in an empty tuple:
>>> surf_img.agg_data('time series')
()
>>> func_img.agg_data('triangle')
()
"""
# Allow multiple intents to specify the order
# e.g., agg_data(('pointset', 'triangle')) ensures consistent order
if isinstance(intent_code, tuple):
return tuple(self.agg_data(intent_code=code) for code in intent_code)
darrays = self.darrays if intent_code is None else self.get_arrays_from_intent(intent_code)
all_data = tuple(da.data for da in darrays)
all_intent = {intent_codes.niistring[da.intent] for da in darrays}
if all_intent == {'NIFTI_INTENT_TIME_SERIES'}: # stack when the gifti is a timeseries
return np.column_stack(all_data)
if len(all_data) == 1:
all_data = all_data[0]
return all_data
def print_summary(self):
print('----start----')
print('Source filename: ', self.get_filename())
print('Number of data arrays: ', self.numDA)
print('Version: ', self.version)
if self.meta is not None:
print('----')
print('Metadata:')
print(self.meta.print_summary())
if self.labeltable is not None:
print('----')
print('Labeltable:')
print(self.labeltable.print_summary())
for i, da in enumerate(self.darrays):
print('----')
print(f'DataArray {i}:')
print(da.print_summary())
print('----end----')
def _to_xml_element(self):
GIFTI = xml.Element(
'GIFTI', attrib={'Version': self.version, 'NumberOfDataArrays': str(self.numDA)}
)
if self.meta is not None:
GIFTI.append(self.meta._to_xml_element())
if self.labeltable is not None:
GIFTI.append(self.labeltable._to_xml_element())
for dar in self.darrays:
GIFTI.append(dar._to_xml_element())
return GIFTI
def to_xml(self, enc='utf-8', *, mode='strict', **kwargs) -> bytes:
"""Return XML corresponding to image content"""
if mode == 'strict':
if any(arr.datatype not in GIFTI_DTYPES for arr in self.darrays):
raise ValueError(
'GiftiImage contains data arrays with invalid data types; '
'use mode="compat" to automatically cast to conforming types'
)
elif mode == 'compat':
darrays = []
for arr in self.darrays:
if arr.datatype not in GIFTI_DTYPES:
arr = copy(arr)
# TODO: Better typing for recoders
dtype = cast(np.dtype, data_type_codes.dtype[arr.datatype])
if np.issubdtype(dtype, np.floating):
arr.datatype = data_type_codes['float32']
elif np.issubdtype(dtype, np.integer):
arr.datatype = data_type_codes['int32']
else:
raise ValueError(f'Cannot convert {dtype} to float32/int32')
darrays.append(arr)
gii = copy(self)
gii.darrays = darrays
return gii.to_xml(enc=enc, mode='strict')
elif mode != 'force':
raise TypeError(f'Unknown mode {mode}')
header = b"""<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE GIFTI SYSTEM "http://www.nitrc.org/frs/download.php/115/gifti.dtd">
"""
return header + super().to_xml(enc, **kwargs)
# Avoid the indirection of going through to_file_map
def to_bytes(self, enc='utf-8', *, mode='strict'):
return self.to_xml(enc=enc, mode=mode)
to_bytes.__doc__ = SerializableImage.to_bytes.__doc__
def to_file_map(self, file_map=None, enc='utf-8', *, mode='strict'):
"""Save the current image to the specified file_map
Parameters
----------
file_map : dict
Dictionary with single key ``image`` with associated value which is
a :class:`FileHolder` instance pointing to the image file.
Returns
-------
None
"""
if file_map is None:
file_map = self.file_map
with file_map['image'].get_prepare_fileobj('wb') as f:
f.write(self.to_xml(enc=enc, mode=mode))
@classmethod
def from_file_map(klass, file_map, buffer_size=35000000, mmap=True):
"""Load a Gifti image from a file_map
Parameters
----------
file_map : dict
Dictionary with single key ``image`` with associated value which is
a :class:`FileHolder` instance pointing to the image file.
buffer_size: None or int, optional
size of read buffer. None uses default buffer_size
from xml.parsers.expat.
mmap : {True, False, 'c', 'r', 'r+'}
Controls the use of numpy memory mapping for reading data. Only
has an effect when loading GIFTI images with data stored in
external files (``DataArray`` elements with an ``Encoding`` equal
to ``ExternalFileBinary``). If ``False``, do not try numpy
``memmap`` for data array. If one of ``{'c', 'r', 'r+'}``, try
numpy ``memmap`` with ``mode=mmap``. A `mmap` value of ``True``
gives the same behavior as ``mmap='c'``. If the file cannot be
memory-mapped, ignore `mmap` value and read array from file.
Returns
-------
img : GiftiImage
"""
parser = klass.parser(buffer_size=buffer_size, mmap=mmap)
with file_map['image'].get_prepare_fileobj('rb') as fptr:
parser.parse(fptr=fptr)
return parser.img
@classmethod
def from_filename(klass, filename, buffer_size=35000000, mmap=True):
file_map = klass.filespec_to_file_map(filename)
img = klass.from_file_map(file_map, buffer_size=buffer_size, mmap=mmap)
return img
|
(header=None, extra=None, file_map=None, meta=None, labeltable=None, darrays=None, version='1.0')
|
56,737 |
nibabel.gifti.gifti
|
__init__
| null |
def __init__(
self,
header=None,
extra=None,
file_map=None,
meta=None,
labeltable=None,
darrays=None,
version='1.0',
):
super().__init__(header=header, extra=extra, file_map=file_map)
if darrays is None:
darrays = []
if meta is None:
meta = GiftiMetaData()
if labeltable is None:
labeltable = GiftiLabelTable()
self._labeltable = labeltable
self._meta = meta
self.darrays = darrays
self.version = version
|
(self, header=None, extra=None, file_map=None, meta=None, labeltable=None, darrays=None, version='1.0')
|
56,738 |
nibabel.gifti.gifti
|
_to_xml_element
| null |
def _to_xml_element(self):
GIFTI = xml.Element(
'GIFTI', attrib={'Version': self.version, 'NumberOfDataArrays': str(self.numDA)}
)
if self.meta is not None:
GIFTI.append(self.meta._to_xml_element())
if self.labeltable is not None:
GIFTI.append(self.labeltable._to_xml_element())
for dar in self.darrays:
GIFTI.append(dar._to_xml_element())
return GIFTI
|
(self)
|
56,739 |
nibabel.gifti.gifti
|
add_gifti_data_array
|
Adds a data array to the GiftiImage
Parameters
----------
dataarr : :class:`GiftiDataArray` instance
|
def add_gifti_data_array(self, dataarr):
"""Adds a data array to the GiftiImage
Parameters
----------
dataarr : :class:`GiftiDataArray` instance
"""
if not isinstance(dataarr, GiftiDataArray):
raise TypeError('Not a valid GiftiDataArray instance')
self.darrays.append(dataarr)
|
(self, dataarr)
|
56,740 |
nibabel.gifti.gifti
|
agg_data
|
Aggregate GIFTI data arrays into an ndarray or tuple of ndarray
In the general case, the numpy data array is extracted from each ``GiftiDataArray``
object and returned in a ``tuple``, in the order they are found in the GIFTI image.
If all ``GiftiDataArray`` s have ``intent`` of 2001 (``NIFTI_INTENT_TIME_SERIES``),
then the data arrays are concatenated as columns, producing a vertex-by-time array.
If an ``intent_code`` is passed, data arrays are filtered by the selected intents,
before being aggregated.
This may be useful for images containing several intents, or ensuring an expected
data type in an image of uncertain provenance.
If ``intent_code`` is a ``tuple``, then a ``tuple`` will be returned with the result of
``agg_data`` for each element, in order.
This may be useful for ensuring that expected data arrives in a consistent order.
Parameters
----------
intent_code : None, string, integer or tuple of strings or integers, optional
code(s) specifying nifti intent
Returns
-------
tuple of ndarrays or ndarray
If the input is a tuple, the returned tuple will match the order.
Examples
--------
Consider a surface GIFTI file:
>>> import nibabel as nib
>>> from nibabel.testing import get_test_data
>>> surf_img = nib.load(get_test_data('gifti', 'ascii.gii'))
The coordinate data, which is indicated by the ``NIFTI_INTENT_POINTSET``
intent code, may be retrieved using any of the following equivalent
calls:
>>> coords = surf_img.agg_data('NIFTI_INTENT_POINTSET')
>>> coords_2 = surf_img.agg_data('pointset')
>>> coords_3 = surf_img.agg_data(1008) # Numeric code for pointset
>>> print(np.array2string(coords, precision=3))
[[-16.072 -66.188 21.267]
[-16.706 -66.054 21.233]
[-17.614 -65.402 21.071]]
>>> np.array_equal(coords, coords_2)
True
>>> np.array_equal(coords, coords_3)
True
Similarly, the triangle mesh can be retrieved using various intent
specifiers:
>>> triangles = surf_img.agg_data('NIFTI_INTENT_TRIANGLE')
>>> triangles_2 = surf_img.agg_data('triangle')
>>> triangles_3 = surf_img.agg_data(1009) # Numeric code for pointset
>>> print(np.array2string(triangles))
[[0 1 2]]
>>> np.array_equal(triangles, triangles_2)
True
>>> np.array_equal(triangles, triangles_3)
True
All arrays can be retrieved as a ``tuple`` by omitting the intent
code:
>>> coords_4, triangles_4 = surf_img.agg_data()
>>> np.array_equal(coords, coords_4)
True
>>> np.array_equal(triangles, triangles_4)
True
Finally, a tuple of intent codes may be passed in order to select
the arrays in a specific order:
>>> triangles_5, coords_5 = surf_img.agg_data(('triangle', 'pointset'))
>>> np.array_equal(triangles, triangles_5)
True
>>> np.array_equal(coords, coords_5)
True
The following image is a GIFTI file with ten (10) data arrays of the same
size, and with intent code 2001 (``NIFTI_INTENT_TIME_SERIES``):
>>> func_img = nib.load(get_test_data('gifti', 'task.func.gii'))
When aggregating time series data, these arrays are concatenated into
a single, vertex-by-timestep array:
>>> series = func_img.agg_data()
>>> series.shape
(642, 10)
In the case of a GIFTI file with unknown data arrays, it may be preferable
to specify the intent code, so that a time series array is always returned:
>>> series_2 = func_img.agg_data('NIFTI_INTENT_TIME_SERIES')
>>> series_3 = func_img.agg_data('time series')
>>> series_4 = func_img.agg_data(2001)
>>> np.array_equal(series, series_2)
True
>>> np.array_equal(series, series_3)
True
>>> np.array_equal(series, series_4)
True
Requesting a data array from a GIFTI file with no matching intent codes
will result in an empty tuple:
>>> surf_img.agg_data('time series')
()
>>> func_img.agg_data('triangle')
()
|
def agg_data(self, intent_code=None):
"""
Aggregate GIFTI data arrays into an ndarray or tuple of ndarray
In the general case, the numpy data array is extracted from each ``GiftiDataArray``
object and returned in a ``tuple``, in the order they are found in the GIFTI image.
If all ``GiftiDataArray`` s have ``intent`` of 2001 (``NIFTI_INTENT_TIME_SERIES``),
then the data arrays are concatenated as columns, producing a vertex-by-time array.
If an ``intent_code`` is passed, data arrays are filtered by the selected intents,
before being aggregated.
This may be useful for images containing several intents, or ensuring an expected
data type in an image of uncertain provenance.
If ``intent_code`` is a ``tuple``, then a ``tuple`` will be returned with the result of
``agg_data`` for each element, in order.
This may be useful for ensuring that expected data arrives in a consistent order.
Parameters
----------
intent_code : None, string, integer or tuple of strings or integers, optional
code(s) specifying nifti intent
Returns
-------
tuple of ndarrays or ndarray
If the input is a tuple, the returned tuple will match the order.
Examples
--------
Consider a surface GIFTI file:
>>> import nibabel as nib
>>> from nibabel.testing import get_test_data
>>> surf_img = nib.load(get_test_data('gifti', 'ascii.gii'))
The coordinate data, which is indicated by the ``NIFTI_INTENT_POINTSET``
intent code, may be retrieved using any of the following equivalent
calls:
>>> coords = surf_img.agg_data('NIFTI_INTENT_POINTSET')
>>> coords_2 = surf_img.agg_data('pointset')
>>> coords_3 = surf_img.agg_data(1008) # Numeric code for pointset
>>> print(np.array2string(coords, precision=3))
[[-16.072 -66.188 21.267]
[-16.706 -66.054 21.233]
[-17.614 -65.402 21.071]]
>>> np.array_equal(coords, coords_2)
True
>>> np.array_equal(coords, coords_3)
True
Similarly, the triangle mesh can be retrieved using various intent
specifiers:
>>> triangles = surf_img.agg_data('NIFTI_INTENT_TRIANGLE')
>>> triangles_2 = surf_img.agg_data('triangle')
>>> triangles_3 = surf_img.agg_data(1009) # Numeric code for pointset
>>> print(np.array2string(triangles))
[[0 1 2]]
>>> np.array_equal(triangles, triangles_2)
True
>>> np.array_equal(triangles, triangles_3)
True
All arrays can be retrieved as a ``tuple`` by omitting the intent
code:
>>> coords_4, triangles_4 = surf_img.agg_data()
>>> np.array_equal(coords, coords_4)
True
>>> np.array_equal(triangles, triangles_4)
True
Finally, a tuple of intent codes may be passed in order to select
the arrays in a specific order:
>>> triangles_5, coords_5 = surf_img.agg_data(('triangle', 'pointset'))
>>> np.array_equal(triangles, triangles_5)
True
>>> np.array_equal(coords, coords_5)
True
The following image is a GIFTI file with ten (10) data arrays of the same
size, and with intent code 2001 (``NIFTI_INTENT_TIME_SERIES``):
>>> func_img = nib.load(get_test_data('gifti', 'task.func.gii'))
When aggregating time series data, these arrays are concatenated into
a single, vertex-by-timestep array:
>>> series = func_img.agg_data()
>>> series.shape
(642, 10)
In the case of a GIFTI file with unknown data arrays, it may be preferable
to specify the intent code, so that a time series array is always returned:
>>> series_2 = func_img.agg_data('NIFTI_INTENT_TIME_SERIES')
>>> series_3 = func_img.agg_data('time series')
>>> series_4 = func_img.agg_data(2001)
>>> np.array_equal(series, series_2)
True
>>> np.array_equal(series, series_3)
True
>>> np.array_equal(series, series_4)
True
Requesting a data array from a GIFTI file with no matching intent codes
will result in an empty tuple:
>>> surf_img.agg_data('time series')
()
>>> func_img.agg_data('triangle')
()
"""
# Allow multiple intents to specify the order
# e.g., agg_data(('pointset', 'triangle')) ensures consistent order
if isinstance(intent_code, tuple):
return tuple(self.agg_data(intent_code=code) for code in intent_code)
darrays = self.darrays if intent_code is None else self.get_arrays_from_intent(intent_code)
all_data = tuple(da.data for da in darrays)
all_intent = {intent_codes.niistring[da.intent] for da in darrays}
if all_intent == {'NIFTI_INTENT_TIME_SERIES'}: # stack when the gifti is a timeseries
return np.column_stack(all_data)
if len(all_data) == 1:
all_data = all_data[0]
return all_data
|
(self, intent_code=None)
|
56,741 |
nibabel.gifti.gifti
|
get_arrays_from_intent
|
Return list of GiftiDataArray elements matching given intent
|
def get_arrays_from_intent(self, intent):
"""Return list of GiftiDataArray elements matching given intent"""
it = intent_codes.code[intent]
return [x for x in self.darrays if x.intent == it]
|
(self, intent)
|
56,743 |
nibabel.gifti.gifti
|
print_summary
| null |
def print_summary(self):
print('----start----')
print('Source filename: ', self.get_filename())
print('Number of data arrays: ', self.numDA)
print('Version: ', self.version)
if self.meta is not None:
print('----')
print('Metadata:')
print(self.meta.print_summary())
if self.labeltable is not None:
print('----')
print('Labeltable:')
print(self.labeltable.print_summary())
for i, da in enumerate(self.darrays):
print('----')
print(f'DataArray {i}:')
print(da.print_summary())
print('----end----')
|
(self)
|
56,744 |
nibabel.gifti.gifti
|
remove_gifti_data_array
|
Removes the ith data array element from the GiftiImage
|
def remove_gifti_data_array(self, ith):
"""Removes the ith data array element from the GiftiImage"""
self.darrays.pop(ith)
|
(self, ith)
|
56,745 |
nibabel.gifti.gifti
|
remove_gifti_data_array_by_intent
|
Removes all the data arrays with the given intent type
|
def remove_gifti_data_array_by_intent(self, intent):
"""Removes all the data arrays with the given intent type"""
intent2remove = intent_codes.code[intent]
for dele in self.darrays:
if dele.intent == intent2remove:
self.darrays.remove(dele)
|
(self, intent)
|
56,747 |
nibabel.gifti.gifti
|
to_bytes
|
Return a ``bytes`` object with the contents of the file that would
be written if the image were saved.
Parameters
----------
\*\*kwargs : keyword arguments
Keyword arguments that may be passed to ``img.to_file_map()``
Returns
-------
bytes
Serialized image
|
def to_bytes(self, enc='utf-8', *, mode='strict'):
return self.to_xml(enc=enc, mode=mode)
|
(self, enc='utf-8', *, mode='strict')
|
56,748 |
nibabel.gifti.gifti
|
to_file_map
|
Save the current image to the specified file_map
Parameters
----------
file_map : dict
Dictionary with single key ``image`` with associated value which is
a :class:`FileHolder` instance pointing to the image file.
Returns
-------
None
|
def to_file_map(self, file_map=None, enc='utf-8', *, mode='strict'):
"""Save the current image to the specified file_map
Parameters
----------
file_map : dict
Dictionary with single key ``image`` with associated value which is
a :class:`FileHolder` instance pointing to the image file.
Returns
-------
None
"""
if file_map is None:
file_map = self.file_map
with file_map['image'].get_prepare_fileobj('wb') as f:
f.write(self.to_xml(enc=enc, mode=mode))
|
(self, file_map=None, enc='utf-8', *, mode='strict')
|
56,751 |
nibabel.gifti.gifti
|
to_xml
|
Return XML corresponding to image content
|
def to_xml(self, enc='utf-8', *, mode='strict', **kwargs) -> bytes:
"""Return XML corresponding to image content"""
if mode == 'strict':
if any(arr.datatype not in GIFTI_DTYPES for arr in self.darrays):
raise ValueError(
'GiftiImage contains data arrays with invalid data types; '
'use mode="compat" to automatically cast to conforming types'
)
elif mode == 'compat':
darrays = []
for arr in self.darrays:
if arr.datatype not in GIFTI_DTYPES:
arr = copy(arr)
# TODO: Better typing for recoders
dtype = cast(np.dtype, data_type_codes.dtype[arr.datatype])
if np.issubdtype(dtype, np.floating):
arr.datatype = data_type_codes['float32']
elif np.issubdtype(dtype, np.integer):
arr.datatype = data_type_codes['int32']
else:
raise ValueError(f'Cannot convert {dtype} to float32/int32')
darrays.append(arr)
gii = copy(self)
gii.darrays = darrays
return gii.to_xml(enc=enc, mode='strict')
elif mode != 'force':
raise TypeError(f'Unknown mode {mode}')
header = b"""<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE GIFTI SYSTEM "http://www.nitrc.org/frs/download.php/115/gifti.dtd">
"""
return header + super().to_xml(enc, **kwargs)
|
(self, enc='utf-8', *, mode='strict', **kwargs) -> bytes
|
56,752 |
nibabel.freesurfer.mghformat
|
MGHImage
|
Class for MGH format image
|
class MGHImage(SpatialImage, SerializableImage):
"""Class for MGH format image"""
header_class = MGHHeader
header: MGHHeader
valid_exts = ('.mgh', '.mgz')
# Register that .mgz extension signals gzip compression
ImageOpener.compress_ext_map['.mgz'] = ImageOpener.gz_def
files_types = (('image', '.mgh'),)
_compressed_suffixes = ()
makeable = True
rw = True
ImageArrayProxy = ArrayProxy
def __init__(self, dataobj, affine, header=None, extra=None, file_map=None):
shape = dataobj.shape
if len(shape) < 3:
dataobj = reshape_dataobj(dataobj, shape + (1,) * (3 - len(shape)))
super().__init__(dataobj, affine, header=header, extra=extra, file_map=file_map)
@classmethod
def filespec_to_file_map(klass, filespec):
filespec = _stringify_path(filespec)
""" Check for compressed .mgz format, then .mgh format """
if splitext(filespec)[1].lower() == '.mgz':
return dict(image=FileHolder(filename=filespec))
return super().filespec_to_file_map(filespec)
@classmethod
def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None):
"""Class method to create image from mapping in ``file_map``
Parameters
----------
file_map : dict
Mapping with (kay, value) pairs of (``file_type``, FileHolder
instance giving file-likes for each file needed for this image
type.
mmap : {True, False, 'c', 'r'}, optional, keyword only
`mmap` controls the use of numpy memory mapping for reading image
array data. If False, do not try numpy ``memmap`` for data array.
If one of {'c', 'r'}, try numpy memmap with ``mode=mmap``. A
`mmap` value of True gives the same behavior as ``mmap='c'``. If
image data file cannot be memory-mapped, ignore `mmap` value and
read array from file.
keep_file_open : { None, True, False }, optional, keyword only
`keep_file_open` controls whether a new file handle is created
every time the image is accessed, or a single file handle is
created and used for the lifetime of this ``ArrayProxy``. If
``True``, a single file handle is created and used. If ``False``,
a new file handle is created every time the image is accessed.
If ``file_map`` refers to an open file handle, this setting has no
effect. The default value (``None``) will result in the value of
``nibabel.arrayproxy.KEEP_FILE_OPEN_DEFAULT`` being used.
Returns
-------
img : MGHImage instance
"""
if mmap not in (True, False, 'c', 'r'):
raise ValueError("mmap should be one of {True, False, 'c', 'r'}")
img_fh = file_map['image']
mghf = img_fh.get_prepare_fileobj('rb')
header = klass.header_class.from_fileobj(mghf)
affine = header.get_affine()
hdr_copy = header.copy()
# Pass original image fileobj / filename to array proxy
data = klass.ImageArrayProxy(
img_fh.file_like, hdr_copy, mmap=mmap, keep_file_open=keep_file_open
)
img = klass(data, affine, header, file_map=file_map)
return img
def to_file_map(self, file_map=None):
"""Write image to `file_map` or contained ``self.file_map``
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead
"""
if file_map is None:
file_map = self.file_map
data = np.asanyarray(self.dataobj)
self.update_header()
hdr = self.header
with file_map['image'].get_prepare_fileobj('wb') as mghf:
hdr.writehdr_to(mghf)
self._write_data(mghf, data, hdr)
hdr.writeftr_to(mghf)
self._header = hdr
self.file_map = file_map
def _write_data(self, mghfile, data, header):
"""Utility routine to write image
Parameters
----------
mghfile : file-like
file-like object implementing ``seek`` or ``tell``, and
``write``
data : array-like
array to write
header : analyze-type header object
header
"""
shape = header.get_data_shape()
if data.shape != shape:
raise HeaderDataError('Data should be shape (%s)' % ', '.join(str(s) for s in shape))
offset = header.get_data_offset()
out_dtype = header.get_data_dtype()
array_to_file(data, mghfile, out_dtype, offset)
def _affine2header(self):
"""Unconditionally set affine into the header"""
hdr = self._header
shape = np.array(self._dataobj.shape[:3])
# for more information, go through save_mgh.m in FreeSurfer dist
voxelsize = voxel_sizes(self._affine)
Mdc = self._affine[:3, :3] / voxelsize
c_ras = self._affine.dot(np.hstack((shape / 2.0, [1])))[:3]
# Assign after we've had a chance to raise exceptions
hdr['delta'] = voxelsize
hdr['Mdc'] = Mdc.T
hdr['Pxyz_c'] = c_ras
|
(dataobj, affine, header=None, extra=None, file_map=None)
|
56,754 |
nibabel.freesurfer.mghformat
|
__init__
| null |
def __init__(self, dataobj, affine, header=None, extra=None, file_map=None):
shape = dataobj.shape
if len(shape) < 3:
dataobj = reshape_dataobj(dataobj, shape + (1,) * (3 - len(shape)))
super().__init__(dataobj, affine, header=header, extra=extra, file_map=file_map)
|
(self, dataobj, affine, header=None, extra=None, file_map=None)
|
56,756 |
nibabel.freesurfer.mghformat
|
_affine2header
|
Unconditionally set affine into the header
|
def _affine2header(self):
"""Unconditionally set affine into the header"""
hdr = self._header
shape = np.array(self._dataobj.shape[:3])
# for more information, go through save_mgh.m in FreeSurfer dist
voxelsize = voxel_sizes(self._affine)
Mdc = self._affine[:3, :3] / voxelsize
c_ras = self._affine.dot(np.hstack((shape / 2.0, [1])))[:3]
# Assign after we've had a chance to raise exceptions
hdr['delta'] = voxelsize
hdr['Mdc'] = Mdc.T
hdr['Pxyz_c'] = c_ras
|
(self)
|
56,757 |
nibabel.freesurfer.mghformat
|
_write_data
|
Utility routine to write image
Parameters
----------
mghfile : file-like
file-like object implementing ``seek`` or ``tell``, and
``write``
data : array-like
array to write
header : analyze-type header object
header
|
def _write_data(self, mghfile, data, header):
"""Utility routine to write image
Parameters
----------
mghfile : file-like
file-like object implementing ``seek`` or ``tell``, and
``write``
data : array-like
array to write
header : analyze-type header object
header
"""
shape = header.get_data_shape()
if data.shape != shape:
raise HeaderDataError('Data should be shape (%s)' % ', '.join(str(s) for s in shape))
offset = header.get_data_offset()
out_dtype = header.get_data_dtype()
array_to_file(data, mghfile, out_dtype, offset)
|
(self, mghfile, data, header)
|
56,760 |
nibabel.spatialimages
|
get_data_dtype
| null |
def get_data_dtype(self) -> np.dtype:
return self._header.get_data_dtype()
|
(self) -> numpy.dtype
|
56,764 |
nibabel.spatialimages
|
set_data_dtype
| null |
def set_data_dtype(self, dtype: npt.DTypeLike) -> None:
self._header.set_data_dtype(dtype)
|
(self, dtype: 'npt.DTypeLike') -> 'None'
|
56,767 |
nibabel.freesurfer.mghformat
|
to_file_map
|
Write image to `file_map` or contained ``self.file_map``
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead
|
def to_file_map(self, file_map=None):
"""Write image to `file_map` or contained ``self.file_map``
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead
"""
if file_map is None:
file_map = self.file_map
data = np.asanyarray(self.dataobj)
self.update_header()
hdr = self.header
with file_map['image'].get_prepare_fileobj('wb') as mghf:
hdr.writehdr_to(mghf)
self._write_data(mghf, data, hdr)
hdr.writeftr_to(mghf)
self._header = hdr
self.file_map = file_map
|
(self, file_map=None)
|
56,772 |
nibabel.minc1
|
Minc1Image
|
Class for MINC1 format images
The MINC1 image class uses the default header type, rather than a specific
MINC header type - and reads the relevant information from the MINC file on
load.
|
class Minc1Image(SpatialImage):
"""Class for MINC1 format images
The MINC1 image class uses the default header type, rather than a specific
MINC header type - and reads the relevant information from the MINC file on
load.
"""
header_class: type[MincHeader] = Minc1Header
header: MincHeader
_meta_sniff_len: int = 4
valid_exts: tuple[str, ...] = ('.mnc',)
files_types: tuple[tuple[str, str], ...] = (('image', '.mnc'),)
_compressed_suffixes: tuple[str, ...] = ('.gz', '.bz2', '.zst')
makeable = True
rw = False
ImageArrayProxy = MincImageArrayProxy
@classmethod
def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None):
# Note that mmap and keep_file_open are included for proper
with file_map['image'].get_prepare_fileobj() as fobj:
minc_file = Minc1File(netcdf_file(fobj))
affine = minc_file.get_affine()
if affine.shape != (4, 4):
raise MincError('Image does not have 3 spatial dimensions')
data_dtype = minc_file.get_data_dtype()
shape = minc_file.get_data_shape()
zooms = minc_file.get_zooms()
header = klass.header_class(data_dtype, shape, zooms)
data = klass.ImageArrayProxy(minc_file)
return klass(data, affine, header, extra=None, file_map=file_map)
|
(dataobj: 'ArrayLike', affine: 'np.ndarray | None', header: 'FileBasedHeader | ty.Mapping | None' = None, extra: 'ty.Mapping | None' = None, file_map: 'FileMap | None' = None)
|
56,774 |
nibabel.spatialimages
|
__init__
|
Initialize image
The image is a combination of (array-like, affine matrix, header), with
optional metadata in `extra`, and filename / file-like objects
contained in the `file_map` mapping.
Parameters
----------
dataobj : object
Object containing image data. It should be some object that returns an
array from ``np.asanyarray``. It should have a ``shape`` attribute
or property
affine : None or (4,4) array-like
homogeneous affine giving relationship between voxel coordinates and
world coordinates. Affine can also be None. In this case,
``obj.affine`` also returns None, and the affine as written to disk
will depend on the file format.
header : None or mapping or header instance, optional
metadata for this image format
extra : None or mapping, optional
metadata to associate with image that cannot be stored in the
metadata of this image type
file_map : mapping, optional
mapping giving file information for this image format
|
def __init__(
self,
dataobj: ArrayLike,
affine: np.ndarray | None,
header: FileBasedHeader | ty.Mapping | None = None,
extra: ty.Mapping | None = None,
file_map: FileMap | None = None,
):
"""Initialize image
The image is a combination of (array-like, affine matrix, header), with
optional metadata in `extra`, and filename / file-like objects
contained in the `file_map` mapping.
Parameters
----------
dataobj : object
Object containing image data. It should be some object that returns an
array from ``np.asanyarray``. It should have a ``shape`` attribute
or property
affine : None or (4,4) array-like
homogeneous affine giving relationship between voxel coordinates and
world coordinates. Affine can also be None. In this case,
``obj.affine`` also returns None, and the affine as written to disk
will depend on the file format.
header : None or mapping or header instance, optional
metadata for this image format
extra : None or mapping, optional
metadata to associate with image that cannot be stored in the
metadata of this image type
file_map : mapping, optional
mapping giving file information for this image format
"""
super().__init__(dataobj, header=header, extra=extra, file_map=file_map)
if affine is not None:
# Check that affine is array-like 4,4. Maybe this is too strict at
# this abstract level, but so far I think all image formats we know
# do need 4,4.
# Copy affine to isolate from environment. Specify float type to
# avoid surprising integer rounding when setting values into affine
affine = np.array(affine, dtype=np.float64, copy=True)
if not affine.shape == (4, 4):
raise ValueError('Affine should be shape 4,4')
self._affine = affine
# if header not specified, get data type from input array
if header is None:
if hasattr(dataobj, 'dtype'):
self._header.set_data_dtype(dataobj.dtype)
# make header correspond with image and affine
self.update_header()
self._data_cache = None
|
(self, dataobj: nibabel.arrayproxy.ArrayLike, affine: numpy.ndarray | None, header: Union[nibabel.filebasedimages.FileBasedHeader, Mapping, NoneType] = None, extra: Optional[Mapping] = None, file_map: Optional[Mapping[str, nibabel.fileholders.FileHolder]] = None)
|
56,785 |
nibabel.filebasedimages
|
to_file_map
| null |
def to_file_map(self, file_map: FileMap | None = None, **kwargs) -> None:
raise NotImplementedError # pragma: no cover
|
(self, file_map: Optional[Mapping[str, nibabel.fileholders.FileHolder]] = None, **kwargs) -> NoneType
|
56,789 |
nibabel.minc2
|
Minc2Image
|
Class for MINC2 images
The MINC2 image class uses the default header type, rather than a
specific MINC header type - and reads the relevant information from
the MINC file on load.
|
class Minc2Image(Minc1Image):
"""Class for MINC2 images
The MINC2 image class uses the default header type, rather than a
specific MINC header type - and reads the relevant information from
the MINC file on load.
"""
# MINC2 does not do compressed whole files
_compressed_suffixes = ()
header_class = Minc2Header
header: Minc2Header
@classmethod
def from_file_map(klass, file_map, *, mmap=True, keep_file_open=None):
# Import of h5py might take awhile for MPI-enabled builds
# So we are importing it here "on demand"
import h5py # type: ignore
holder = file_map['image']
if holder.filename is None:
raise MincError('MINC2 needs filename for load')
minc_file = Minc2File(h5py.File(holder.filename, 'r'))
affine = minc_file.get_affine()
if affine.shape != (4, 4):
raise MincError('Image does not have 3 spatial dimensions')
data_dtype = minc_file.get_data_dtype()
shape = minc_file.get_data_shape()
zooms = minc_file.get_zooms()
header = klass.header_class(data_dtype, shape, zooms)
data = klass.ImageArrayProxy(minc_file)
return klass(data, affine, header, extra=None, file_map=file_map)
|
(dataobj: 'ArrayLike', affine: 'np.ndarray | None', header: 'FileBasedHeader | ty.Mapping | None' = None, extra: 'ty.Mapping | None' = None, file_map: 'FileMap | None' = None)
|
56,806 |
nibabel.nifti1
|
Nifti1Header
|
Class for NIfTI1 header
The NIfTI1 header has many more coded fields than the simpler Analyze
variants. NIfTI1 headers also have extensions.
Nifti allows the header to be a separate file, as part of a nifti image /
header pair, or to precede the data in a single file. The object needs to
know which type it is, in order to manage the voxel offset pointing to the
data, extension reading, and writing the correct magic string.
This class handles the header-preceding-data case.
|
class Nifti1Header(SpmAnalyzeHeader):
"""Class for NIfTI1 header
The NIfTI1 header has many more coded fields than the simpler Analyze
variants. NIfTI1 headers also have extensions.
Nifti allows the header to be a separate file, as part of a nifti image /
header pair, or to precede the data in a single file. The object needs to
know which type it is, in order to manage the voxel offset pointing to the
data, extension reading, and writing the correct magic string.
This class handles the header-preceding-data case.
"""
# Copies of module level definitions
template_dtype = header_dtype
_data_type_codes = data_type_codes
# fields with recoders for their values
_field_recoders = {
'datatype': data_type_codes,
'qform_code': xform_codes,
'sform_code': xform_codes,
'intent_code': intent_codes,
'slice_code': slice_order_codes,
}
# data scaling capabilities
has_data_slope = True
has_data_intercept = True
# Extension class; should implement __call__ for construction, and
# ``from_fileobj`` for reading from file
exts_klass = Nifti1Extensions
# Signal whether this is single (header + data) file
is_single = True
# Default voxel data offsets for single and pair
pair_vox_offset = 0
single_vox_offset = 352
# Magics for single and pair
pair_magic = b'ni1'
single_magic = b'n+1'
# Quaternion threshold near 0, based on float32 precision
quaternion_threshold = np.finfo(np.float32).eps * 3
def __init__(self, binaryblock=None, endianness=None, check=True, extensions=()):
"""Initialize header from binary data block and extensions"""
super().__init__(binaryblock, endianness, check)
self.extensions = self.exts_klass(extensions)
def copy(self):
"""Return copy of header
Take reference to extensions as well as copy of header contents
"""
return self.__class__(self.binaryblock, self.endianness, False, self.extensions)
@classmethod
def from_fileobj(klass, fileobj, endianness=None, check=True):
raw_str = fileobj.read(klass.template_dtype.itemsize)
hdr = klass(raw_str, endianness, check)
# Read next 4 bytes to see if we have extensions. The nifti standard
# has this as a 4 byte string; if the first value is not zero, then we
# have extensions.
extension_status = fileobj.read(4)
# Need to test *slice* of extension_status to preserve byte string type
# on Python 3
if len(extension_status) < 4 or extension_status[0:1] == b'\x00':
return hdr
# If this is a detached header file read to end
if not klass.is_single:
extsize = -1
else: # otherwise read until the beginning of the data
extsize = hdr._structarr['vox_offset'] - fileobj.tell()
byteswap = endian_codes['native'] != hdr.endianness
hdr.extensions = klass.exts_klass.from_fileobj(fileobj, extsize, byteswap)
return hdr
def write_to(self, fileobj):
# First check that vox offset is large enough; set if necessary
if self.is_single:
vox_offset = self._structarr['vox_offset']
min_vox_offset = self.single_vox_offset + self.extensions.get_sizeondisk()
if vox_offset == 0: # vox offset unset; set as necessary
self._structarr['vox_offset'] = min_vox_offset
elif vox_offset < min_vox_offset:
raise HeaderDataError(
f'vox offset set to {vox_offset}, but need at least {min_vox_offset}'
)
super().write_to(fileobj)
# Write extensions
if len(self.extensions) == 0:
# If single file, write required 0 stream to signal no extensions
if self.is_single:
fileobj.write(b'\x00' * 4)
return
# Signal there are extensions that follow
fileobj.write(b'\x01\x00\x00\x00')
byteswap = endian_codes['native'] != self.endianness
self.extensions.write_to(fileobj, byteswap)
def get_best_affine(self):
"""Select best of available transforms"""
hdr = self._structarr
if hdr['sform_code'] != 0:
return self.get_sform()
if hdr['qform_code'] != 0:
return self.get_qform()
return self.get_base_affine()
@classmethod
def default_structarr(klass, endianness=None):
"""Create empty header binary block with given endianness"""
hdr_data = super().default_structarr(endianness)
if klass.is_single:
hdr_data['magic'] = klass.single_magic
else:
hdr_data['magic'] = klass.pair_magic
return hdr_data
@classmethod
def from_header(klass, header=None, check=True):
"""Class method to create header from another header
Extend Analyze header copy by copying extensions from other Nifti
types.
Parameters
----------
header : ``Header`` instance or mapping
a header of this class, or another class of header for
conversion to this type
check : {True, False}
whether to check header for integrity
Returns
-------
hdr : header instance
fresh header instance of our own class
"""
new_hdr = super().from_header(header, check)
if isinstance(header, Nifti1Header):
new_hdr.extensions[:] = header.extensions[:]
return new_hdr
def get_data_shape(self):
"""Get shape of data
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.get_data_shape()
(0,)
>>> hdr.set_data_shape((1,2,3))
>>> hdr.get_data_shape()
(1, 2, 3)
Expanding number of dimensions gets default zooms
>>> hdr.get_zooms()
(1.0, 1.0, 1.0)
Notes
-----
Applies freesurfer hack for large vectors described in `issue 100`_ and
`save_nifti.m <save77_>`_.
Allows for freesurfer hack for 7th order icosahedron surface described
in `issue 309`_, load_nifti.m_, and `save_nifti.m <save50_>`_.
"""
shape = super().get_data_shape()
# Apply freesurfer hack for large vectors
if shape[:3] == (-1, 1, 1):
vec_len = int(self._structarr['glmin'])
if vec_len == 0:
raise HeaderDataError(
'-1 in dim[1] but 0 in glmin; inconsistent freesurfer type header?'
)
return (vec_len, 1, 1) + shape[3:]
# Apply freesurfer hack for ico7 surface
elif shape[:3] == (27307, 1, 6):
return (163842, 1, 1) + shape[3:]
else: # Normal case
return shape
def set_data_shape(self, shape):
"""Set shape of data # noqa
If ``ndims == len(shape)`` then we set zooms for dimensions higher than
``ndims`` to 1.0
Nifti1 images can have up to seven dimensions. For FreeSurfer-variant
Nifti surface files, the first dimension is assumed to correspond to
vertices/nodes on a surface, and dimensions two and three are
constrained to have depth of 1. Dimensions 4-7 are constrained only by
type bounds.
Parameters
----------
shape : sequence
sequence of integers specifying data array shape
Notes
-----
Applies freesurfer hack for large vectors described in `issue 100`_ and
`save_nifti.m <save77_>`_.
Allows for freesurfer hack for 7th order icosahedron surface described
in `issue 309`_, load_nifti.m_, and `save_nifti.m <save50_>`_.
The Nifti1 `standard header`_ allows for the following "point set"
definition of a surface, not currently implemented in nibabel.
::
To signify that the vector value at each voxel is really a
spatial coordinate (e.g., the vertices or nodes of a surface mesh):
- dataset must have a 5th dimension
- intent_code must be NIFTI_INTENT_POINTSET
- dim[0] = 5
- dim[1] = number of points
- dim[2] = dim[3] = dim[4] = 1
- dim[5] must be the dimensionality of space (e.g., 3 => 3D space).
- intent_name may describe the object these points come from
(e.g., "pial", "gray/white" , "EEG", "MEG").
.. _issue 100: https://github.com/nipy/nibabel/issues/100
.. _issue 309: https://github.com/nipy/nibabel/issues/309
.. _save77:
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/save_nifti.m#L77-L82
.. _save50:
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/save_nifti.m#L50-L56
.. _load_nifti.m:
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/load_nifti.m#L86-L89
.. _standard header: http://nifti.nimh.nih.gov/pub/dist/src/niftilib/nifti1.h
"""
hdr = self._structarr
shape = tuple(shape)
# Apply freesurfer hack for ico7 surface
if shape[:3] == (163842, 1, 1):
shape = (27307, 1, 6) + shape[3:]
# Apply freesurfer hack for large vectors
elif (
len(shape) >= 3
and shape[1:3] == (1, 1)
and shape[0] > np.iinfo(hdr['dim'].dtype.base).max
):
try:
hdr['glmin'] = shape[0]
except OverflowError:
overflow = True
else:
overflow = hdr['glmin'] != shape[0]
if overflow:
raise HeaderDataError(f'shape[0] {shape[0]} does not fit in glmax datatype')
warnings.warn(
'Using large vector Freesurfer hack; header will '
'not be compatible with SPM or FSL',
stacklevel=2,
)
shape = (-1, 1, 1) + shape[3:]
super().set_data_shape(shape)
def set_data_dtype(self, datatype):
"""Set numpy dtype for data from code or dtype or type
Using :py:class:`int` or ``"int"`` is disallowed, as these types
will be interpreted as ``np.int64``, which is almost never desired.
``np.int64`` is permitted for those intent on making poor choices.
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_data_dtype(np.uint8)
>>> hdr.get_data_dtype()
dtype('uint8')
>>> hdr.set_data_dtype(np.dtype(np.uint8))
>>> hdr.get_data_dtype()
dtype('uint8')
>>> hdr.set_data_dtype('implausible')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "implausible" not recognized
>>> hdr.set_data_dtype('none')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "none" known but not supported
>>> hdr.set_data_dtype(np.void)
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "<class 'numpy.void'>" known
but not supported
>>> hdr.set_data_dtype('int')
Traceback (most recent call last):
...
ValueError: Invalid data type 'int'. Specify a sized integer, e.g., 'uint8' or numpy.int16.
>>> hdr.set_data_dtype(int)
Traceback (most recent call last):
...
ValueError: Invalid data type <class 'int'>. Specify a sized integer, e.g., 'uint8' or
numpy.int16.
>>> hdr.set_data_dtype('int64')
>>> hdr.get_data_dtype() == np.dtype('int64')
True
"""
if not isinstance(datatype, np.dtype) and datatype in (int, 'int'):
raise ValueError(
f'Invalid data type {datatype!r}. Specify a sized integer, '
"e.g., 'uint8' or numpy.int16."
)
super().set_data_dtype(datatype)
def get_qform_quaternion(self):
"""Compute quaternion from b, c, d of quaternion
Fills a value by assuming this is a unit quaternion
"""
hdr = self._structarr
bcd = [hdr['quatern_b'], hdr['quatern_c'], hdr['quatern_d']]
# Adjust threshold to precision of stored values in header
return fillpositive(bcd, self.quaternion_threshold)
def get_qform(self, coded=False):
"""Return 4x4 affine matrix from qform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and qform code. If False, just
return affine. {affine or None} means, return None if qform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine reconstructed from qform
quaternion. If `coded` is True, return None if qform code is 0,
else return the affine.
code : int
Qform code. Only returned if `coded` is True.
"""
hdr = self._structarr
code = int(hdr['qform_code'])
if code == 0 and coded:
return None, 0
quat = self.get_qform_quaternion()
R = quat2mat(quat)
vox = hdr['pixdim'][1:4].copy()
if np.any(vox < 0):
raise HeaderDataError('pixdims[1,2,3] should be positive')
qfac = hdr['pixdim'][0]
if qfac not in (-1, 1):
raise HeaderDataError('qfac (pixdim[0]) should be 1 or -1')
vox[-1] *= qfac
S = np.diag(vox)
M = np.dot(R, S)
out = np.eye(4)
out[0:3, 0:3] = M
out[0:3, 3] = [hdr['qoffset_x'], hdr['qoffset_y'], hdr['qoffset_z']]
if coded:
return out, code
return out
def set_qform(self, affine, code=None, strip_shears=True):
"""Set qform header values from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set code.
code : None, string or integer, optional
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing qform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing qform code in header != 0,
`code`-> existing qform code in header
strip_shears : bool, optional
Whether to strip shears in `affine`. If True, shears will be
silently stripped. If False, the presence of shears will raise a
``HeaderDataError``
Notes
-----
The qform transform only encodes translations, rotations and
zooms. If there are shear components to the `affine` transform, and
`strip_shears` is True (the default), the written qform gives the
closest approximation where the rotation matrix is orthogonal. This is
to allow quaternion representation. The orthogonal representation
enforces orthogonal axes.
Examples
--------
>>> hdr = Nifti1Header()
>>> int(hdr['qform_code']) # gives 0 - unknown
0
>>> affine = np.diag([1,2,3,1])
>>> np.all(hdr.get_qform() == affine)
False
>>> hdr.set_qform(affine)
>>> np.all(hdr.get_qform() == affine)
True
>>> int(hdr['qform_code']) # gives 2 - aligned
2
>>> hdr.set_qform(affine, code='talairach')
>>> int(hdr['qform_code'])
3
>>> hdr.set_qform(affine, code=None)
>>> int(hdr['qform_code'])
3
>>> hdr.set_qform(affine, code='scanner')
>>> int(hdr['qform_code'])
1
>>> hdr.set_qform(None)
>>> int(hdr['qform_code'])
0
"""
hdr = self._structarr
old_code = hdr['qform_code']
if code is None:
if affine is None:
code = 0
elif old_code == 0:
code = 2 # aligned
else:
code = old_code
else: # code set
code = self._field_recoders['qform_code'][code]
hdr['qform_code'] = code
if affine is None:
return
affine = np.asarray(affine)
if not affine.shape == (4, 4):
raise TypeError('Need 4x4 affine as input')
trans = affine[:3, 3]
RZS = affine[:3, :3]
zooms = np.sqrt(np.sum(RZS * RZS, axis=0))
R = RZS / zooms
# Set qfac to make R determinant positive
if npl.det(R) > 0:
qfac = 1
else:
qfac = -1
R[:, -1] *= -1
# Make R orthogonal (to allow quaternion representation)
# The orthogonal representation enforces orthogonal axes
# (a subtle requirement of the NIFTI format qform transform)
# Transform below is polar decomposition, returning the closest
# orthogonal matrix PR, to input R
P, S, Qs = npl.svd(R)
PR = np.dot(P, Qs)
if not strip_shears and not np.allclose(PR, R):
raise HeaderDataError('Shears in affine and `strip_shears` is False')
# Convert to quaternion
quat = mat2quat(PR)
# Set into header
hdr['qoffset_x'], hdr['qoffset_y'], hdr['qoffset_z'] = trans
hdr['pixdim'][0] = qfac
hdr['pixdim'][1:4] = zooms
hdr['quatern_b'], hdr['quatern_c'], hdr['quatern_d'] = quat[1:]
def get_sform(self, coded=False):
"""Return 4x4 affine matrix from sform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and sform code. If False, just
return affine. {affine or None} means, return None if sform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine from sform fields. If
`coded` is True, return None if sform code is 0, else return the
affine.
code : int
Sform code. Only returned if `coded` is True.
"""
hdr = self._structarr
code = int(hdr['sform_code'])
if code == 0 and coded:
return None, 0
out = np.eye(4)
out[0, :] = hdr['srow_x'][:]
out[1, :] = hdr['srow_y'][:]
out[2, :] = hdr['srow_z'][:]
if coded:
return out, code
return out
def set_sform(self, affine, code=None):
"""Set sform transform from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set `code`
code : None, string or integer, optional
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing sform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing sform code in header != 0,
`code`-> existing sform code in header
Examples
--------
>>> hdr = Nifti1Header()
>>> int(hdr['sform_code']) # gives 0 - unknown
0
>>> affine = np.diag([1,2,3,1])
>>> np.all(hdr.get_sform() == affine)
False
>>> hdr.set_sform(affine)
>>> np.all(hdr.get_sform() == affine)
True
>>> int(hdr['sform_code']) # gives 2 - aligned
2
>>> hdr.set_sform(affine, code='talairach')
>>> int(hdr['sform_code'])
3
>>> hdr.set_sform(affine, code=None)
>>> int(hdr['sform_code'])
3
>>> hdr.set_sform(affine, code='scanner')
>>> int(hdr['sform_code'])
1
>>> hdr.set_sform(None)
>>> int(hdr['sform_code'])
0
"""
hdr = self._structarr
old_code = hdr['sform_code']
if code is None:
if affine is None:
code = 0
elif old_code == 0:
code = 2 # aligned
else:
code = old_code
else: # code set
code = self._field_recoders['sform_code'][code]
hdr['sform_code'] = code
if affine is None:
return
affine = np.asarray(affine)
hdr['srow_x'][:] = affine[0, :]
hdr['srow_y'][:] = affine[1, :]
hdr['srow_z'][:] = affine[2, :]
def get_slope_inter(self):
"""Get data scaling (slope) and DC offset (intercept) from header data
Returns
-------
slope : None or float
scaling (slope). None if there is no valid scaling from these
fields
inter : None or float
offset (intercept). None if there is no valid scaling or if offset
is not finite.
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.get_slope_inter()
(1.0, 0.0)
>>> hdr['scl_slope'] = 0
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['scl_slope'] = np.nan
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['scl_slope'] = 1
>>> hdr['scl_inter'] = 1
>>> hdr.get_slope_inter()
(1.0, 1.0)
>>> hdr['scl_inter'] = np.inf
>>> hdr.get_slope_inter() #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
HeaderDataError: Valid slope but invalid intercept inf
"""
# Note that we are returning float (float64) scalefactors and
# intercepts, although they are stored as in nifti1 as float32.
slope = float(self['scl_slope'])
inter = float(self['scl_inter'])
if slope == 0 or not np.isfinite(slope):
return None, None
if not np.isfinite(inter):
raise HeaderDataError(f'Valid slope but invalid intercept {inter}')
return slope, inter
def set_slope_inter(self, slope, inter=None):
"""Set slope and / or intercept into header
Set slope and intercept for image data, such that, if the image
data is ``arr``, then the scaled image data will be ``(arr *
slope) + inter``
(`slope`, `inter`) of (NaN, NaN) is a signal to a containing image to
set `slope`, `inter` automatically on write.
Parameters
----------
slope : None or float
If None, implies `slope` of NaN. If `slope` is None or NaN then
`inter` should be None or NaN. Values of 0, Inf or -Inf raise
HeaderDataError
inter : None or float, optional
Intercept. If None, implies `inter` of NaN. If `slope` is None or
NaN then `inter` should be None or NaN. Values of Inf or -Inf raise
HeaderDataError
"""
if slope is None:
slope = np.nan
if inter is None:
inter = np.nan
if slope in (0, np.inf, -np.inf):
raise HeaderDataError('Slope cannot be 0 or infinite')
if inter in (np.inf, -np.inf):
raise HeaderDataError('Intercept cannot be infinite')
if np.isnan(slope) ^ np.isnan(inter):
raise HeaderDataError('None or both of slope, inter should be nan')
self._structarr['scl_slope'] = slope
self._structarr['scl_inter'] = inter
def get_dim_info(self):
"""Gets NIfTI MRI slice etc dimension information
Returns
-------
freq : {None,0,1,2}
Which data array axis is frequency encode direction
phase : {None,0,1,2}
Which data array axis is phase encode direction
slice : {None,0,1,2}
Which data array axis is slice encode direction
where ``data array`` is the array returned by ``get_data``
Because NIfTI1 files are natively Fortran indexed:
0 is fastest changing in file
1 is medium changing in file
2 is slowest changing in file
``None`` means the axis appears not to be specified.
Examples
--------
See set_dim_info function
"""
hdr = self._structarr
info = int(hdr['dim_info'])
freq = info & 3
phase = (info >> 2) & 3
slice = (info >> 4) & 3
return (
freq - 1 if freq else None,
phase - 1 if phase else None,
slice - 1 if slice else None,
)
def set_dim_info(self, freq=None, phase=None, slice=None):
"""Sets nifti MRI slice etc dimension information
Parameters
----------
freq : {None, 0, 1, 2}
axis of data array referring to frequency encoding
phase : {None, 0, 1, 2}
axis of data array referring to phase encoding
slice : {None, 0, 1, 2}
axis of data array referring to slice encoding
``None`` means the axis is not specified.
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(1, 2, 0)
>>> hdr.get_dim_info()
(1, 2, 0)
>>> hdr.set_dim_info(freq=1, phase=2, slice=0)
>>> hdr.get_dim_info()
(1, 2, 0)
>>> hdr.set_dim_info()
>>> hdr.get_dim_info()
(None, None, None)
>>> hdr.set_dim_info(freq=1, phase=None, slice=0)
>>> hdr.get_dim_info()
(1, None, 0)
Notes
-----
This is stored in one byte in the header
"""
for inp in (freq, phase, slice):
# Don't use == on None to avoid a FutureWarning in python3
if inp is not None and inp not in (0, 1, 2):
raise HeaderDataError('Inputs must be in [None, 0, 1, 2]')
info = 0
if freq is not None:
info = info | ((freq + 1) & 3)
if phase is not None:
info = info | (((phase + 1) & 3) << 2)
if slice is not None:
info = info | (((slice + 1) & 3) << 4)
self._structarr['dim_info'] = info
def get_intent(self, code_repr='label'):
"""Get intent code, parameters and name
Parameters
----------
code_repr : string
string giving output form of intent code representation.
Default is 'label'; use 'code' for integer representation.
Returns
-------
code : string or integer
intent code, or string describing code
parameters : tuple
parameters for the intent
name : string
intent name
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_intent('t test', (10,), name='some score')
>>> hdr.get_intent()
('t test', (10.0,), 'some score')
>>> hdr.get_intent('code')
(3, (10.0,), 'some score')
"""
hdr = self._structarr
recoder = self._field_recoders['intent_code']
code = int(hdr['intent_code'])
known_intent = code in recoder
if code_repr == 'code':
label = code
elif code_repr == 'label':
if known_intent:
label = recoder.label[code]
else:
label = 'unknown code ' + str(code)
else:
raise TypeError('repr can be "label" or "code"')
n_params = len(recoder.parameters[code]) if known_intent else 0
params = (float(hdr['intent_p%d' % (i + 1)]) for i in range(n_params))
name = hdr['intent_name'].item().decode('latin-1')
return label, tuple(params), name
def set_intent(self, code, params=(), name='', allow_unknown=False):
"""Set the intent code, parameters and name
If parameters are not specified, assumed to be all zero. Each
intent code has a set number of parameters associated. If you
specify any parameters, then it will need to be the correct number
(e.g the "f test" intent requires 2). However, parameters can
also be set in the file data, so we also allow not setting any
parameters (empty parameter tuple).
Parameters
----------
code : integer or string
code specifying nifti intent
params : list, tuple of scalars
parameters relating to intent (see intent_codes)
defaults to (). Unspecified parameters are set to 0.0
name : string
intent name (description). Defaults to ''
allow_unknown : {False, True}, optional
Allow unknown integer intent codes. If False (the default),
a KeyError is raised on attempts to set the intent
to an unknown code.
Returns
-------
None
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_intent(0) # no intent
>>> hdr.set_intent('z score')
>>> hdr.get_intent()
('z score', (), '')
>>> hdr.get_intent('code')
(5, (), '')
>>> hdr.set_intent('t test', (10,), name='some score')
>>> hdr.get_intent()
('t test', (10.0,), 'some score')
>>> hdr.set_intent('f test', (2, 10), name='another score')
>>> hdr.get_intent()
('f test', (2.0, 10.0), 'another score')
>>> hdr.set_intent('f test')
>>> hdr.get_intent()
('f test', (0.0, 0.0), '')
>>> hdr.set_intent(9999, allow_unknown=True) # unknown code
>>> hdr.get_intent()
('unknown code 9999', (), '')
"""
hdr = self._structarr
known_intent = code in intent_codes
if not known_intent:
# We can set intent via an unknown integer code, but can't via an
# unknown string label
if not allow_unknown or isinstance(code, str):
raise KeyError('Unknown intent code: ' + str(code))
if known_intent:
icode = intent_codes.code[code]
p_descr = intent_codes.parameters[code]
else:
icode = code
p_descr = ('p1', 'p2', 'p3')
if len(params) and len(params) != len(p_descr):
raise HeaderDataError(f'Need params of form {p_descr}, or empty')
hdr['intent_code'] = icode
hdr['intent_name'] = name
all_params = [0] * 3
all_params[: len(params)] = params[:]
for i, param in enumerate(all_params):
hdr['intent_p%d' % (i + 1)] = param
def get_slice_duration(self):
"""Get slice duration
Returns
-------
slice_duration : float
time to acquire one slice
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(slice=2)
>>> hdr.set_slice_duration(0.3)
>>> print("%0.1f" % hdr.get_slice_duration())
0.3
Notes
-----
The NIfTI1 spec appears to require the slice dimension to be
defined for slice_duration to have meaning.
"""
_, _, slice_dim = self.get_dim_info()
if slice_dim is None:
raise HeaderDataError('Slice dimension must be set for duration to be valid')
return float(self._structarr['slice_duration'])
def set_slice_duration(self, duration):
"""Set slice duration
Parameters
----------
duration : scalar
time to acquire one slice
Examples
--------
See ``get_slice_duration``
"""
_, _, slice_dim = self.get_dim_info()
if slice_dim is None:
raise HeaderDataError('Slice dimension must be set for duration to be valid')
self._structarr['slice_duration'] = duration
def get_n_slices(self):
"""Return the number of slices"""
_, _, slice_dim = self.get_dim_info()
if slice_dim is None:
raise HeaderDataError('Slice dimension not set in header dim_info')
shape = self.get_data_shape()
try:
slice_len = shape[slice_dim]
except IndexError:
raise HeaderDataError(
f'Slice dimension index ({slice_dim}) outside shape tuple ({shape})'
)
return slice_len
def get_slice_times(self):
"""Get slice times from slice timing information
Returns
-------
slice_times : tuple
Times of acquisition of slices, where 0 is the beginning of
the acquisition, ordered by position in file. nifti allows
slices at the top and bottom of the volume to be excluded from
the standard slice timing specification, and calls these
"padding slices". We give padding slices ``None`` as a time
of acquisition
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(slice=2)
>>> hdr.set_data_shape((1, 1, 7))
>>> hdr.set_slice_duration(0.1)
>>> hdr['slice_code'] = slice_order_codes['sequential increasing']
>>> slice_times = hdr.get_slice_times()
>>> np.allclose(slice_times, [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
True
"""
hdr = self._structarr
slice_len = self.get_n_slices()
duration = self.get_slice_duration()
slabel = self.get_value_label('slice_code')
if slabel == 'unknown':
raise HeaderDataError('Cannot get slice times when slice code is "unknown"')
slice_start, slice_end = (int(hdr['slice_start']), int(hdr['slice_end']))
if slice_start < 0:
raise HeaderDataError('slice_start should be >= 0')
if slice_end == 0:
slice_end = slice_len - 1
n_timed = slice_end - slice_start + 1
if n_timed < 1:
raise HeaderDataError('slice_end should be > slice_start')
st_order = self._slice_time_order(slabel, n_timed)
times = st_order * duration
return (None,) * slice_start + tuple(times) + (None,) * (slice_len - slice_end - 1)
def set_slice_times(self, slice_times):
"""Set slice times into *hdr*
Parameters
----------
slice_times : tuple
tuple of slice times, one value per slice
tuple can include None to indicate no slice time for that slice
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(slice=2)
>>> hdr.set_data_shape([1, 1, 7])
>>> hdr.set_slice_duration(0.1)
>>> times = [None, 0.2, 0.4, 0.1, 0.3, 0.0, None]
>>> hdr.set_slice_times(times)
>>> hdr.get_value_label('slice_code')
'alternating decreasing'
>>> int(hdr['slice_start'])
1
>>> int(hdr['slice_end'])
5
"""
# Check if number of slices matches header
hdr = self._structarr
slice_len = self.get_n_slices()
if slice_len != len(slice_times):
raise HeaderDataError('Number of slice times does not match number of slices')
# Extract Nones at beginning and end. Check for others
for ind, time in enumerate(slice_times):
if time is not None:
slice_start = ind
break
else:
raise HeaderDataError('Not all slice times can be None')
for ind, time in enumerate(slice_times[::-1]):
if time is not None:
slice_end = slice_len - ind - 1
break
timed = slice_times[slice_start : slice_end + 1]
for time in timed:
if time is None:
raise HeaderDataError('Cannot have None in middle of slice time vector')
# Find slice duration, check times are compatible with single
# duration
tdiffs = np.diff(np.sort(timed))
if not np.allclose(np.diff(tdiffs), 0):
raise HeaderDataError('Slice times not compatible with single slice duration')
duration = np.mean(tdiffs)
# To slice time order
st_order = np.round(np.array(timed) / duration)
# Check if slice times fit known schemes
n_timed = len(timed)
so_recoder = self._field_recoders['slice_code']
labels = so_recoder.value_set('label')
labels.remove('unknown')
matching_labels = [
label for label in labels if np.all(st_order == self._slice_time_order(label, n_timed))
]
if not matching_labels:
raise HeaderDataError(f'slice ordering of {st_order} fits with no known scheme')
if len(matching_labels) > 1:
warnings.warn(
f"Multiple slice orders satisfy: {', '.join(matching_labels)}. "
'Choosing the first one'
)
label = matching_labels[0]
# Set values into header
hdr['slice_start'] = slice_start
hdr['slice_end'] = slice_end
hdr['slice_duration'] = duration
hdr['slice_code'] = slice_order_codes.code[label]
def _slice_time_order(self, slabel, n_slices):
"""Supporting function to give time order of slices from label"""
if slabel == 'sequential increasing':
sp_ind_time_order = list(range(n_slices))
elif slabel == 'sequential decreasing':
sp_ind_time_order = list(range(n_slices)[::-1])
elif slabel == 'alternating increasing':
sp_ind_time_order = list(range(0, n_slices, 2)) + list(range(1, n_slices, 2))
elif slabel == 'alternating decreasing':
sp_ind_time_order = list(range(n_slices - 1, -1, -2)) + list(
range(n_slices - 2, -1, -2)
)
elif slabel == 'alternating increasing 2':
sp_ind_time_order = list(range(1, n_slices, 2)) + list(range(0, n_slices, 2))
elif slabel == 'alternating decreasing 2':
sp_ind_time_order = list(range(n_slices - 2, -1, -2)) + list(
range(n_slices - 1, -1, -2)
)
else:
raise HeaderDataError(f'We do not handle slice ordering "{slabel}"')
return np.argsort(sp_ind_time_order)
def get_xyzt_units(self):
xyz_code = self.structarr['xyzt_units'] % 8
t_code = self.structarr['xyzt_units'] - xyz_code
return (unit_codes.label[xyz_code], unit_codes.label[t_code])
def set_xyzt_units(self, xyz=None, t=None):
if xyz is None:
xyz = 0
if t is None:
t = 0
xyz_code = self.structarr['xyzt_units'] % 8
t_code = self.structarr['xyzt_units'] - xyz_code
xyz_code = unit_codes[xyz]
t_code = unit_codes[t]
self.structarr['xyzt_units'] = xyz_code + t_code
def _clean_after_mapping(self):
"""Set format-specific stuff after converting header from mapping
Clean up header after it has been initialized from an
``as_analyze_map`` method of another header type
See :meth:`nibabel.analyze.AnalyzeHeader._clean_after_mapping` for a
more detailed description.
"""
self._structarr['magic'] = self.single_magic if self.is_single else self.pair_magic
""" Checks only below here """
@classmethod
def _get_checks(klass):
# We need to return our own versions of - e.g. chk_datatype, to
# pick up the Nifti datatypes from our class
return (
klass._chk_sizeof_hdr,
klass._chk_datatype,
klass._chk_bitpix,
klass._chk_pixdims,
klass._chk_qfac,
klass._chk_magic,
klass._chk_offset,
klass._chk_qform_code,
klass._chk_sform_code,
)
@staticmethod
def _chk_qfac(hdr, fix=False):
rep = Report(HeaderDataError)
if hdr['pixdim'][0] in (-1, 1):
return hdr, rep
rep.problem_level = 20
rep.problem_msg = 'pixdim[0] (qfac) should be 1 (default) or -1'
if fix:
hdr['pixdim'][0] = 1
rep.fix_msg = 'setting qfac to 1'
return hdr, rep
@staticmethod
def _chk_magic(hdr, fix=False):
rep = Report(HeaderDataError)
magic = hdr['magic'].item()
if magic in (hdr.pair_magic, hdr.single_magic):
return hdr, rep
rep.problem_msg = f'magic string {magic.decode("latin1")!r} is not valid'
rep.problem_level = 45
if fix:
rep.fix_msg = 'leaving as is, but future errors are likely'
return hdr, rep
@staticmethod
def _chk_offset(hdr, fix=False):
rep = Report(HeaderDataError)
# for ease of later string formatting, use scalar of byte string
magic = hdr['magic'].item()
offset = hdr['vox_offset'].item()
if offset == 0:
return hdr, rep
if magic == hdr.single_magic and offset < hdr.single_vox_offset:
rep.problem_level = 40
rep.problem_msg = 'vox offset %d too low for single file nifti1' % offset
if fix:
hdr['vox_offset'] = hdr.single_vox_offset
rep.fix_msg = f'setting to minimum value of {hdr.single_vox_offset}'
return hdr, rep
if not offset % 16:
return hdr, rep
# SPM uses memory mapping to read the data, and
# apparently this has to start on 16 byte boundaries
rep.problem_msg = f'vox offset (={offset:g}) not divisible by 16, not SPM compatible'
rep.problem_level = 30
if fix:
rep.fix_msg = 'leaving at current value'
return hdr, rep
@classmethod
def _chk_qform_code(klass, hdr, fix=False):
return klass._chk_xform_code('qform_code', hdr, fix)
@classmethod
def _chk_sform_code(klass, hdr, fix=False):
return klass._chk_xform_code('sform_code', hdr, fix)
@classmethod
def _chk_xform_code(klass, code_type, hdr, fix):
# utility method for sform and qform codes
rep = Report(HeaderDataError)
code = int(hdr[code_type])
recoder = klass._field_recoders[code_type]
if code in recoder.value_set():
return hdr, rep
rep.problem_level = 30
rep.problem_msg = '%s %d not valid' % (code_type, code)
if fix:
hdr[code_type] = 0
rep.fix_msg = 'setting to 0'
return hdr, rep
@classmethod
def may_contain_header(klass, binaryblock):
if len(binaryblock) < klass.sizeof_hdr:
return False
hdr_struct = np.ndarray(
shape=(), dtype=header_dtype, buffer=binaryblock[: klass.sizeof_hdr]
)
return hdr_struct['magic'] in (b'ni1', b'n+1')
|
(binaryblock=None, endianness=None, check=True, extensions=())
|
56,809 |
nibabel.nifti1
|
__init__
|
Initialize header from binary data block and extensions
|
def __init__(self, binaryblock=None, endianness=None, check=True, extensions=()):
"""Initialize header from binary data block and extensions"""
super().__init__(binaryblock, endianness, check)
self.extensions = self.exts_klass(extensions)
|
(self, binaryblock=None, endianness=None, check=True, extensions=())
|
56,815 |
nibabel.nifti1
|
_chk_magic
| null |
@staticmethod
def _chk_magic(hdr, fix=False):
rep = Report(HeaderDataError)
magic = hdr['magic'].item()
if magic in (hdr.pair_magic, hdr.single_magic):
return hdr, rep
rep.problem_msg = f'magic string {magic.decode("latin1")!r} is not valid'
rep.problem_level = 45
if fix:
rep.fix_msg = 'leaving as is, but future errors are likely'
return hdr, rep
|
(hdr, fix=False)
|
56,816 |
nibabel.nifti1
|
_chk_offset
| null |
@staticmethod
def _chk_offset(hdr, fix=False):
rep = Report(HeaderDataError)
# for ease of later string formatting, use scalar of byte string
magic = hdr['magic'].item()
offset = hdr['vox_offset'].item()
if offset == 0:
return hdr, rep
if magic == hdr.single_magic and offset < hdr.single_vox_offset:
rep.problem_level = 40
rep.problem_msg = 'vox offset %d too low for single file nifti1' % offset
if fix:
hdr['vox_offset'] = hdr.single_vox_offset
rep.fix_msg = f'setting to minimum value of {hdr.single_vox_offset}'
return hdr, rep
if not offset % 16:
return hdr, rep
# SPM uses memory mapping to read the data, and
# apparently this has to start on 16 byte boundaries
rep.problem_msg = f'vox offset (={offset:g}) not divisible by 16, not SPM compatible'
rep.problem_level = 30
if fix:
rep.fix_msg = 'leaving at current value'
return hdr, rep
|
(hdr, fix=False)
|
56,818 |
nibabel.nifti1
|
_chk_qfac
| null |
@staticmethod
def _chk_qfac(hdr, fix=False):
rep = Report(HeaderDataError)
if hdr['pixdim'][0] in (-1, 1):
return hdr, rep
rep.problem_level = 20
rep.problem_msg = 'pixdim[0] (qfac) should be 1 (default) or -1'
if fix:
hdr['pixdim'][0] = 1
rep.fix_msg = 'setting qfac to 1'
return hdr, rep
|
(hdr, fix=False)
|
56,819 |
nibabel.nifti1
|
_clean_after_mapping
|
Set format-specific stuff after converting header from mapping
Clean up header after it has been initialized from an
``as_analyze_map`` method of another header type
See :meth:`nibabel.analyze.AnalyzeHeader._clean_after_mapping` for a
more detailed description.
|
def _clean_after_mapping(self):
"""Set format-specific stuff after converting header from mapping
Clean up header after it has been initialized from an
``as_analyze_map`` method of another header type
See :meth:`nibabel.analyze.AnalyzeHeader._clean_after_mapping` for a
more detailed description.
"""
self._structarr['magic'] = self.single_magic if self.is_single else self.pair_magic
|
(self)
|
56,820 |
nibabel.nifti1
|
_slice_time_order
|
Supporting function to give time order of slices from label
|
def _slice_time_order(self, slabel, n_slices):
"""Supporting function to give time order of slices from label"""
if slabel == 'sequential increasing':
sp_ind_time_order = list(range(n_slices))
elif slabel == 'sequential decreasing':
sp_ind_time_order = list(range(n_slices)[::-1])
elif slabel == 'alternating increasing':
sp_ind_time_order = list(range(0, n_slices, 2)) + list(range(1, n_slices, 2))
elif slabel == 'alternating decreasing':
sp_ind_time_order = list(range(n_slices - 1, -1, -2)) + list(
range(n_slices - 2, -1, -2)
)
elif slabel == 'alternating increasing 2':
sp_ind_time_order = list(range(1, n_slices, 2)) + list(range(0, n_slices, 2))
elif slabel == 'alternating decreasing 2':
sp_ind_time_order = list(range(n_slices - 2, -1, -2)) + list(
range(n_slices - 1, -1, -2)
)
else:
raise HeaderDataError(f'We do not handle slice ordering "{slabel}"')
return np.argsort(sp_ind_time_order)
|
(self, slabel, n_slices)
|
56,824 |
nibabel.nifti1
|
copy
|
Return copy of header
Take reference to extensions as well as copy of header contents
|
def copy(self):
"""Return copy of header
Take reference to extensions as well as copy of header contents
"""
return self.__class__(self.binaryblock, self.endianness, False, self.extensions)
|
(self)
|
56,829 |
nibabel.nifti1
|
get_best_affine
|
Select best of available transforms
|
def get_best_affine(self):
"""Select best of available transforms"""
hdr = self._structarr
if hdr['sform_code'] != 0:
return self.get_sform()
if hdr['qform_code'] != 0:
return self.get_qform()
return self.get_base_affine()
|
(self)
|
56,832 |
nibabel.nifti1
|
get_data_shape
|
Get shape of data
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.get_data_shape()
(0,)
>>> hdr.set_data_shape((1,2,3))
>>> hdr.get_data_shape()
(1, 2, 3)
Expanding number of dimensions gets default zooms
>>> hdr.get_zooms()
(1.0, 1.0, 1.0)
Notes
-----
Applies freesurfer hack for large vectors described in `issue 100`_ and
`save_nifti.m <save77_>`_.
Allows for freesurfer hack for 7th order icosahedron surface described
in `issue 309`_, load_nifti.m_, and `save_nifti.m <save50_>`_.
|
def get_data_shape(self):
"""Get shape of data
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.get_data_shape()
(0,)
>>> hdr.set_data_shape((1,2,3))
>>> hdr.get_data_shape()
(1, 2, 3)
Expanding number of dimensions gets default zooms
>>> hdr.get_zooms()
(1.0, 1.0, 1.0)
Notes
-----
Applies freesurfer hack for large vectors described in `issue 100`_ and
`save_nifti.m <save77_>`_.
Allows for freesurfer hack for 7th order icosahedron surface described
in `issue 309`_, load_nifti.m_, and `save_nifti.m <save50_>`_.
"""
shape = super().get_data_shape()
# Apply freesurfer hack for large vectors
if shape[:3] == (-1, 1, 1):
vec_len = int(self._structarr['glmin'])
if vec_len == 0:
raise HeaderDataError(
'-1 in dim[1] but 0 in glmin; inconsistent freesurfer type header?'
)
return (vec_len, 1, 1) + shape[3:]
# Apply freesurfer hack for ico7 surface
elif shape[:3] == (27307, 1, 6):
return (163842, 1, 1) + shape[3:]
else: # Normal case
return shape
|
(self)
|
56,833 |
nibabel.nifti1
|
get_dim_info
|
Gets NIfTI MRI slice etc dimension information
Returns
-------
freq : {None,0,1,2}
Which data array axis is frequency encode direction
phase : {None,0,1,2}
Which data array axis is phase encode direction
slice : {None,0,1,2}
Which data array axis is slice encode direction
where ``data array`` is the array returned by ``get_data``
Because NIfTI1 files are natively Fortran indexed:
0 is fastest changing in file
1 is medium changing in file
2 is slowest changing in file
``None`` means the axis appears not to be specified.
Examples
--------
See set_dim_info function
|
def get_dim_info(self):
"""Gets NIfTI MRI slice etc dimension information
Returns
-------
freq : {None,0,1,2}
Which data array axis is frequency encode direction
phase : {None,0,1,2}
Which data array axis is phase encode direction
slice : {None,0,1,2}
Which data array axis is slice encode direction
where ``data array`` is the array returned by ``get_data``
Because NIfTI1 files are natively Fortran indexed:
0 is fastest changing in file
1 is medium changing in file
2 is slowest changing in file
``None`` means the axis appears not to be specified.
Examples
--------
See set_dim_info function
"""
hdr = self._structarr
info = int(hdr['dim_info'])
freq = info & 3
phase = (info >> 2) & 3
slice = (info >> 4) & 3
return (
freq - 1 if freq else None,
phase - 1 if phase else None,
slice - 1 if slice else None,
)
|
(self)
|
56,834 |
nibabel.nifti1
|
get_intent
|
Get intent code, parameters and name
Parameters
----------
code_repr : string
string giving output form of intent code representation.
Default is 'label'; use 'code' for integer representation.
Returns
-------
code : string or integer
intent code, or string describing code
parameters : tuple
parameters for the intent
name : string
intent name
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_intent('t test', (10,), name='some score')
>>> hdr.get_intent()
('t test', (10.0,), 'some score')
>>> hdr.get_intent('code')
(3, (10.0,), 'some score')
|
def get_intent(self, code_repr='label'):
"""Get intent code, parameters and name
Parameters
----------
code_repr : string
string giving output form of intent code representation.
Default is 'label'; use 'code' for integer representation.
Returns
-------
code : string or integer
intent code, or string describing code
parameters : tuple
parameters for the intent
name : string
intent name
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_intent('t test', (10,), name='some score')
>>> hdr.get_intent()
('t test', (10.0,), 'some score')
>>> hdr.get_intent('code')
(3, (10.0,), 'some score')
"""
hdr = self._structarr
recoder = self._field_recoders['intent_code']
code = int(hdr['intent_code'])
known_intent = code in recoder
if code_repr == 'code':
label = code
elif code_repr == 'label':
if known_intent:
label = recoder.label[code]
else:
label = 'unknown code ' + str(code)
else:
raise TypeError('repr can be "label" or "code"')
n_params = len(recoder.parameters[code]) if known_intent else 0
params = (float(hdr['intent_p%d' % (i + 1)]) for i in range(n_params))
name = hdr['intent_name'].item().decode('latin-1')
return label, tuple(params), name
|
(self, code_repr='label')
|
56,835 |
nibabel.nifti1
|
get_n_slices
|
Return the number of slices
|
def get_n_slices(self):
"""Return the number of slices"""
_, _, slice_dim = self.get_dim_info()
if slice_dim is None:
raise HeaderDataError('Slice dimension not set in header dim_info')
shape = self.get_data_shape()
try:
slice_len = shape[slice_dim]
except IndexError:
raise HeaderDataError(
f'Slice dimension index ({slice_dim}) outside shape tuple ({shape})'
)
return slice_len
|
(self)
|
56,836 |
nibabel.nifti1
|
get_qform
|
Return 4x4 affine matrix from qform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and qform code. If False, just
return affine. {affine or None} means, return None if qform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine reconstructed from qform
quaternion. If `coded` is True, return None if qform code is 0,
else return the affine.
code : int
Qform code. Only returned if `coded` is True.
|
def get_qform(self, coded=False):
"""Return 4x4 affine matrix from qform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and qform code. If False, just
return affine. {affine or None} means, return None if qform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine reconstructed from qform
quaternion. If `coded` is True, return None if qform code is 0,
else return the affine.
code : int
Qform code. Only returned if `coded` is True.
"""
hdr = self._structarr
code = int(hdr['qform_code'])
if code == 0 and coded:
return None, 0
quat = self.get_qform_quaternion()
R = quat2mat(quat)
vox = hdr['pixdim'][1:4].copy()
if np.any(vox < 0):
raise HeaderDataError('pixdims[1,2,3] should be positive')
qfac = hdr['pixdim'][0]
if qfac not in (-1, 1):
raise HeaderDataError('qfac (pixdim[0]) should be 1 or -1')
vox[-1] *= qfac
S = np.diag(vox)
M = np.dot(R, S)
out = np.eye(4)
out[0:3, 0:3] = M
out[0:3, 3] = [hdr['qoffset_x'], hdr['qoffset_y'], hdr['qoffset_z']]
if coded:
return out, code
return out
|
(self, coded=False)
|
56,837 |
nibabel.nifti1
|
get_qform_quaternion
|
Compute quaternion from b, c, d of quaternion
Fills a value by assuming this is a unit quaternion
|
def get_qform_quaternion(self):
"""Compute quaternion from b, c, d of quaternion
Fills a value by assuming this is a unit quaternion
"""
hdr = self._structarr
bcd = [hdr['quatern_b'], hdr['quatern_c'], hdr['quatern_d']]
# Adjust threshold to precision of stored values in header
return fillpositive(bcd, self.quaternion_threshold)
|
(self)
|
56,838 |
nibabel.nifti1
|
get_sform
|
Return 4x4 affine matrix from sform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and sform code. If False, just
return affine. {affine or None} means, return None if sform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine from sform fields. If
`coded` is True, return None if sform code is 0, else return the
affine.
code : int
Sform code. Only returned if `coded` is True.
|
def get_sform(self, coded=False):
"""Return 4x4 affine matrix from sform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and sform code. If False, just
return affine. {affine or None} means, return None if sform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine from sform fields. If
`coded` is True, return None if sform code is 0, else return the
affine.
code : int
Sform code. Only returned if `coded` is True.
"""
hdr = self._structarr
code = int(hdr['sform_code'])
if code == 0 and coded:
return None, 0
out = np.eye(4)
out[0, :] = hdr['srow_x'][:]
out[1, :] = hdr['srow_y'][:]
out[2, :] = hdr['srow_z'][:]
if coded:
return out, code
return out
|
(self, coded=False)
|
56,839 |
nibabel.nifti1
|
get_slice_duration
|
Get slice duration
Returns
-------
slice_duration : float
time to acquire one slice
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(slice=2)
>>> hdr.set_slice_duration(0.3)
>>> print("%0.1f" % hdr.get_slice_duration())
0.3
Notes
-----
The NIfTI1 spec appears to require the slice dimension to be
defined for slice_duration to have meaning.
|
def get_slice_duration(self):
"""Get slice duration
Returns
-------
slice_duration : float
time to acquire one slice
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(slice=2)
>>> hdr.set_slice_duration(0.3)
>>> print("%0.1f" % hdr.get_slice_duration())
0.3
Notes
-----
The NIfTI1 spec appears to require the slice dimension to be
defined for slice_duration to have meaning.
"""
_, _, slice_dim = self.get_dim_info()
if slice_dim is None:
raise HeaderDataError('Slice dimension must be set for duration to be valid')
return float(self._structarr['slice_duration'])
|
(self)
|
56,840 |
nibabel.nifti1
|
get_slice_times
|
Get slice times from slice timing information
Returns
-------
slice_times : tuple
Times of acquisition of slices, where 0 is the beginning of
the acquisition, ordered by position in file. nifti allows
slices at the top and bottom of the volume to be excluded from
the standard slice timing specification, and calls these
"padding slices". We give padding slices ``None`` as a time
of acquisition
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(slice=2)
>>> hdr.set_data_shape((1, 1, 7))
>>> hdr.set_slice_duration(0.1)
>>> hdr['slice_code'] = slice_order_codes['sequential increasing']
>>> slice_times = hdr.get_slice_times()
>>> np.allclose(slice_times, [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
True
|
def get_slice_times(self):
"""Get slice times from slice timing information
Returns
-------
slice_times : tuple
Times of acquisition of slices, where 0 is the beginning of
the acquisition, ordered by position in file. nifti allows
slices at the top and bottom of the volume to be excluded from
the standard slice timing specification, and calls these
"padding slices". We give padding slices ``None`` as a time
of acquisition
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(slice=2)
>>> hdr.set_data_shape((1, 1, 7))
>>> hdr.set_slice_duration(0.1)
>>> hdr['slice_code'] = slice_order_codes['sequential increasing']
>>> slice_times = hdr.get_slice_times()
>>> np.allclose(slice_times, [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
True
"""
hdr = self._structarr
slice_len = self.get_n_slices()
duration = self.get_slice_duration()
slabel = self.get_value_label('slice_code')
if slabel == 'unknown':
raise HeaderDataError('Cannot get slice times when slice code is "unknown"')
slice_start, slice_end = (int(hdr['slice_start']), int(hdr['slice_end']))
if slice_start < 0:
raise HeaderDataError('slice_start should be >= 0')
if slice_end == 0:
slice_end = slice_len - 1
n_timed = slice_end - slice_start + 1
if n_timed < 1:
raise HeaderDataError('slice_end should be > slice_start')
st_order = self._slice_time_order(slabel, n_timed)
times = st_order * duration
return (None,) * slice_start + tuple(times) + (None,) * (slice_len - slice_end - 1)
|
(self)
|
56,841 |
nibabel.nifti1
|
get_slope_inter
|
Get data scaling (slope) and DC offset (intercept) from header data
Returns
-------
slope : None or float
scaling (slope). None if there is no valid scaling from these
fields
inter : None or float
offset (intercept). None if there is no valid scaling or if offset
is not finite.
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.get_slope_inter()
(1.0, 0.0)
>>> hdr['scl_slope'] = 0
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['scl_slope'] = np.nan
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['scl_slope'] = 1
>>> hdr['scl_inter'] = 1
>>> hdr.get_slope_inter()
(1.0, 1.0)
>>> hdr['scl_inter'] = np.inf
>>> hdr.get_slope_inter() #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
HeaderDataError: Valid slope but invalid intercept inf
|
def get_slope_inter(self):
"""Get data scaling (slope) and DC offset (intercept) from header data
Returns
-------
slope : None or float
scaling (slope). None if there is no valid scaling from these
fields
inter : None or float
offset (intercept). None if there is no valid scaling or if offset
is not finite.
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.get_slope_inter()
(1.0, 0.0)
>>> hdr['scl_slope'] = 0
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['scl_slope'] = np.nan
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['scl_slope'] = 1
>>> hdr['scl_inter'] = 1
>>> hdr.get_slope_inter()
(1.0, 1.0)
>>> hdr['scl_inter'] = np.inf
>>> hdr.get_slope_inter() #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
HeaderDataError: Valid slope but invalid intercept inf
"""
# Note that we are returning float (float64) scalefactors and
# intercepts, although they are stored as in nifti1 as float32.
slope = float(self['scl_slope'])
inter = float(self['scl_inter'])
if slope == 0 or not np.isfinite(slope):
return None, None
if not np.isfinite(inter):
raise HeaderDataError(f'Valid slope but invalid intercept {inter}')
return slope, inter
|
(self)
|
56,843 |
nibabel.nifti1
|
get_xyzt_units
| null |
def get_xyzt_units(self):
xyz_code = self.structarr['xyzt_units'] % 8
t_code = self.structarr['xyzt_units'] - xyz_code
return (unit_codes.label[xyz_code], unit_codes.label[t_code])
|
(self)
|
56,848 |
nibabel.nifti1
|
set_data_dtype
|
Set numpy dtype for data from code or dtype or type
Using :py:class:`int` or ``"int"`` is disallowed, as these types
will be interpreted as ``np.int64``, which is almost never desired.
``np.int64`` is permitted for those intent on making poor choices.
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_data_dtype(np.uint8)
>>> hdr.get_data_dtype()
dtype('uint8')
>>> hdr.set_data_dtype(np.dtype(np.uint8))
>>> hdr.get_data_dtype()
dtype('uint8')
>>> hdr.set_data_dtype('implausible')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "implausible" not recognized
>>> hdr.set_data_dtype('none')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "none" known but not supported
>>> hdr.set_data_dtype(np.void)
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "<class 'numpy.void'>" known
but not supported
>>> hdr.set_data_dtype('int')
Traceback (most recent call last):
...
ValueError: Invalid data type 'int'. Specify a sized integer, e.g., 'uint8' or numpy.int16.
>>> hdr.set_data_dtype(int)
Traceback (most recent call last):
...
ValueError: Invalid data type <class 'int'>. Specify a sized integer, e.g., 'uint8' or
numpy.int16.
>>> hdr.set_data_dtype('int64')
>>> hdr.get_data_dtype() == np.dtype('int64')
True
|
def set_data_dtype(self, datatype):
"""Set numpy dtype for data from code or dtype or type
Using :py:class:`int` or ``"int"`` is disallowed, as these types
will be interpreted as ``np.int64``, which is almost never desired.
``np.int64`` is permitted for those intent on making poor choices.
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_data_dtype(np.uint8)
>>> hdr.get_data_dtype()
dtype('uint8')
>>> hdr.set_data_dtype(np.dtype(np.uint8))
>>> hdr.get_data_dtype()
dtype('uint8')
>>> hdr.set_data_dtype('implausible')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "implausible" not recognized
>>> hdr.set_data_dtype('none')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "none" known but not supported
>>> hdr.set_data_dtype(np.void)
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "<class 'numpy.void'>" known
but not supported
>>> hdr.set_data_dtype('int')
Traceback (most recent call last):
...
ValueError: Invalid data type 'int'. Specify a sized integer, e.g., 'uint8' or numpy.int16.
>>> hdr.set_data_dtype(int)
Traceback (most recent call last):
...
ValueError: Invalid data type <class 'int'>. Specify a sized integer, e.g., 'uint8' or
numpy.int16.
>>> hdr.set_data_dtype('int64')
>>> hdr.get_data_dtype() == np.dtype('int64')
True
"""
if not isinstance(datatype, np.dtype) and datatype in (int, 'int'):
raise ValueError(
f'Invalid data type {datatype!r}. Specify a sized integer, '
"e.g., 'uint8' or numpy.int16."
)
super().set_data_dtype(datatype)
|
(self, datatype)
|
56,850 |
nibabel.nifti1
|
set_data_shape
|
Set shape of data # noqa
If ``ndims == len(shape)`` then we set zooms for dimensions higher than
``ndims`` to 1.0
Nifti1 images can have up to seven dimensions. For FreeSurfer-variant
Nifti surface files, the first dimension is assumed to correspond to
vertices/nodes on a surface, and dimensions two and three are
constrained to have depth of 1. Dimensions 4-7 are constrained only by
type bounds.
Parameters
----------
shape : sequence
sequence of integers specifying data array shape
Notes
-----
Applies freesurfer hack for large vectors described in `issue 100`_ and
`save_nifti.m <save77_>`_.
Allows for freesurfer hack for 7th order icosahedron surface described
in `issue 309`_, load_nifti.m_, and `save_nifti.m <save50_>`_.
The Nifti1 `standard header`_ allows for the following "point set"
definition of a surface, not currently implemented in nibabel.
::
To signify that the vector value at each voxel is really a
spatial coordinate (e.g., the vertices or nodes of a surface mesh):
- dataset must have a 5th dimension
- intent_code must be NIFTI_INTENT_POINTSET
- dim[0] = 5
- dim[1] = number of points
- dim[2] = dim[3] = dim[4] = 1
- dim[5] must be the dimensionality of space (e.g., 3 => 3D space).
- intent_name may describe the object these points come from
(e.g., "pial", "gray/white" , "EEG", "MEG").
.. _issue 100: https://github.com/nipy/nibabel/issues/100
.. _issue 309: https://github.com/nipy/nibabel/issues/309
.. _save77:
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/save_nifti.m#L77-L82
.. _save50:
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/save_nifti.m#L50-L56
.. _load_nifti.m:
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/load_nifti.m#L86-L89
.. _standard header: http://nifti.nimh.nih.gov/pub/dist/src/niftilib/nifti1.h
|
def set_data_shape(self, shape):
"""Set shape of data # noqa
If ``ndims == len(shape)`` then we set zooms for dimensions higher than
``ndims`` to 1.0
Nifti1 images can have up to seven dimensions. For FreeSurfer-variant
Nifti surface files, the first dimension is assumed to correspond to
vertices/nodes on a surface, and dimensions two and three are
constrained to have depth of 1. Dimensions 4-7 are constrained only by
type bounds.
Parameters
----------
shape : sequence
sequence of integers specifying data array shape
Notes
-----
Applies freesurfer hack for large vectors described in `issue 100`_ and
`save_nifti.m <save77_>`_.
Allows for freesurfer hack for 7th order icosahedron surface described
in `issue 309`_, load_nifti.m_, and `save_nifti.m <save50_>`_.
The Nifti1 `standard header`_ allows for the following "point set"
definition of a surface, not currently implemented in nibabel.
::
To signify that the vector value at each voxel is really a
spatial coordinate (e.g., the vertices or nodes of a surface mesh):
- dataset must have a 5th dimension
- intent_code must be NIFTI_INTENT_POINTSET
- dim[0] = 5
- dim[1] = number of points
- dim[2] = dim[3] = dim[4] = 1
- dim[5] must be the dimensionality of space (e.g., 3 => 3D space).
- intent_name may describe the object these points come from
(e.g., "pial", "gray/white" , "EEG", "MEG").
.. _issue 100: https://github.com/nipy/nibabel/issues/100
.. _issue 309: https://github.com/nipy/nibabel/issues/309
.. _save77:
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/save_nifti.m#L77-L82
.. _save50:
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/save_nifti.m#L50-L56
.. _load_nifti.m:
https://github.com/fieldtrip/fieldtrip/blob/428798b/external/freesurfer/load_nifti.m#L86-L89
.. _standard header: http://nifti.nimh.nih.gov/pub/dist/src/niftilib/nifti1.h
"""
hdr = self._structarr
shape = tuple(shape)
# Apply freesurfer hack for ico7 surface
if shape[:3] == (163842, 1, 1):
shape = (27307, 1, 6) + shape[3:]
# Apply freesurfer hack for large vectors
elif (
len(shape) >= 3
and shape[1:3] == (1, 1)
and shape[0] > np.iinfo(hdr['dim'].dtype.base).max
):
try:
hdr['glmin'] = shape[0]
except OverflowError:
overflow = True
else:
overflow = hdr['glmin'] != shape[0]
if overflow:
raise HeaderDataError(f'shape[0] {shape[0]} does not fit in glmax datatype')
warnings.warn(
'Using large vector Freesurfer hack; header will '
'not be compatible with SPM or FSL',
stacklevel=2,
)
shape = (-1, 1, 1) + shape[3:]
super().set_data_shape(shape)
|
(self, shape)
|
56,851 |
nibabel.nifti1
|
set_dim_info
|
Sets nifti MRI slice etc dimension information
Parameters
----------
freq : {None, 0, 1, 2}
axis of data array referring to frequency encoding
phase : {None, 0, 1, 2}
axis of data array referring to phase encoding
slice : {None, 0, 1, 2}
axis of data array referring to slice encoding
``None`` means the axis is not specified.
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(1, 2, 0)
>>> hdr.get_dim_info()
(1, 2, 0)
>>> hdr.set_dim_info(freq=1, phase=2, slice=0)
>>> hdr.get_dim_info()
(1, 2, 0)
>>> hdr.set_dim_info()
>>> hdr.get_dim_info()
(None, None, None)
>>> hdr.set_dim_info(freq=1, phase=None, slice=0)
>>> hdr.get_dim_info()
(1, None, 0)
Notes
-----
This is stored in one byte in the header
|
def set_dim_info(self, freq=None, phase=None, slice=None):
"""Sets nifti MRI slice etc dimension information
Parameters
----------
freq : {None, 0, 1, 2}
axis of data array referring to frequency encoding
phase : {None, 0, 1, 2}
axis of data array referring to phase encoding
slice : {None, 0, 1, 2}
axis of data array referring to slice encoding
``None`` means the axis is not specified.
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(1, 2, 0)
>>> hdr.get_dim_info()
(1, 2, 0)
>>> hdr.set_dim_info(freq=1, phase=2, slice=0)
>>> hdr.get_dim_info()
(1, 2, 0)
>>> hdr.set_dim_info()
>>> hdr.get_dim_info()
(None, None, None)
>>> hdr.set_dim_info(freq=1, phase=None, slice=0)
>>> hdr.get_dim_info()
(1, None, 0)
Notes
-----
This is stored in one byte in the header
"""
for inp in (freq, phase, slice):
# Don't use == on None to avoid a FutureWarning in python3
if inp is not None and inp not in (0, 1, 2):
raise HeaderDataError('Inputs must be in [None, 0, 1, 2]')
info = 0
if freq is not None:
info = info | ((freq + 1) & 3)
if phase is not None:
info = info | (((phase + 1) & 3) << 2)
if slice is not None:
info = info | (((slice + 1) & 3) << 4)
self._structarr['dim_info'] = info
|
(self, freq=None, phase=None, slice=None)
|
56,852 |
nibabel.nifti1
|
set_intent
|
Set the intent code, parameters and name
If parameters are not specified, assumed to be all zero. Each
intent code has a set number of parameters associated. If you
specify any parameters, then it will need to be the correct number
(e.g the "f test" intent requires 2). However, parameters can
also be set in the file data, so we also allow not setting any
parameters (empty parameter tuple).
Parameters
----------
code : integer or string
code specifying nifti intent
params : list, tuple of scalars
parameters relating to intent (see intent_codes)
defaults to (). Unspecified parameters are set to 0.0
name : string
intent name (description). Defaults to ''
allow_unknown : {False, True}, optional
Allow unknown integer intent codes. If False (the default),
a KeyError is raised on attempts to set the intent
to an unknown code.
Returns
-------
None
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_intent(0) # no intent
>>> hdr.set_intent('z score')
>>> hdr.get_intent()
('z score', (), '')
>>> hdr.get_intent('code')
(5, (), '')
>>> hdr.set_intent('t test', (10,), name='some score')
>>> hdr.get_intent()
('t test', (10.0,), 'some score')
>>> hdr.set_intent('f test', (2, 10), name='another score')
>>> hdr.get_intent()
('f test', (2.0, 10.0), 'another score')
>>> hdr.set_intent('f test')
>>> hdr.get_intent()
('f test', (0.0, 0.0), '')
>>> hdr.set_intent(9999, allow_unknown=True) # unknown code
>>> hdr.get_intent()
('unknown code 9999', (), '')
|
def set_intent(self, code, params=(), name='', allow_unknown=False):
"""Set the intent code, parameters and name
If parameters are not specified, assumed to be all zero. Each
intent code has a set number of parameters associated. If you
specify any parameters, then it will need to be the correct number
(e.g the "f test" intent requires 2). However, parameters can
also be set in the file data, so we also allow not setting any
parameters (empty parameter tuple).
Parameters
----------
code : integer or string
code specifying nifti intent
params : list, tuple of scalars
parameters relating to intent (see intent_codes)
defaults to (). Unspecified parameters are set to 0.0
name : string
intent name (description). Defaults to ''
allow_unknown : {False, True}, optional
Allow unknown integer intent codes. If False (the default),
a KeyError is raised on attempts to set the intent
to an unknown code.
Returns
-------
None
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_intent(0) # no intent
>>> hdr.set_intent('z score')
>>> hdr.get_intent()
('z score', (), '')
>>> hdr.get_intent('code')
(5, (), '')
>>> hdr.set_intent('t test', (10,), name='some score')
>>> hdr.get_intent()
('t test', (10.0,), 'some score')
>>> hdr.set_intent('f test', (2, 10), name='another score')
>>> hdr.get_intent()
('f test', (2.0, 10.0), 'another score')
>>> hdr.set_intent('f test')
>>> hdr.get_intent()
('f test', (0.0, 0.0), '')
>>> hdr.set_intent(9999, allow_unknown=True) # unknown code
>>> hdr.get_intent()
('unknown code 9999', (), '')
"""
hdr = self._structarr
known_intent = code in intent_codes
if not known_intent:
# We can set intent via an unknown integer code, but can't via an
# unknown string label
if not allow_unknown or isinstance(code, str):
raise KeyError('Unknown intent code: ' + str(code))
if known_intent:
icode = intent_codes.code[code]
p_descr = intent_codes.parameters[code]
else:
icode = code
p_descr = ('p1', 'p2', 'p3')
if len(params) and len(params) != len(p_descr):
raise HeaderDataError(f'Need params of form {p_descr}, or empty')
hdr['intent_code'] = icode
hdr['intent_name'] = name
all_params = [0] * 3
all_params[: len(params)] = params[:]
for i, param in enumerate(all_params):
hdr['intent_p%d' % (i + 1)] = param
|
(self, code, params=(), name='', allow_unknown=False)
|
56,853 |
nibabel.nifti1
|
set_qform
|
Set qform header values from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set code.
code : None, string or integer, optional
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing qform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing qform code in header != 0,
`code`-> existing qform code in header
strip_shears : bool, optional
Whether to strip shears in `affine`. If True, shears will be
silently stripped. If False, the presence of shears will raise a
``HeaderDataError``
Notes
-----
The qform transform only encodes translations, rotations and
zooms. If there are shear components to the `affine` transform, and
`strip_shears` is True (the default), the written qform gives the
closest approximation where the rotation matrix is orthogonal. This is
to allow quaternion representation. The orthogonal representation
enforces orthogonal axes.
Examples
--------
>>> hdr = Nifti1Header()
>>> int(hdr['qform_code']) # gives 0 - unknown
0
>>> affine = np.diag([1,2,3,1])
>>> np.all(hdr.get_qform() == affine)
False
>>> hdr.set_qform(affine)
>>> np.all(hdr.get_qform() == affine)
True
>>> int(hdr['qform_code']) # gives 2 - aligned
2
>>> hdr.set_qform(affine, code='talairach')
>>> int(hdr['qform_code'])
3
>>> hdr.set_qform(affine, code=None)
>>> int(hdr['qform_code'])
3
>>> hdr.set_qform(affine, code='scanner')
>>> int(hdr['qform_code'])
1
>>> hdr.set_qform(None)
>>> int(hdr['qform_code'])
0
|
def set_qform(self, affine, code=None, strip_shears=True):
"""Set qform header values from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set code.
code : None, string or integer, optional
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing qform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing qform code in header != 0,
`code`-> existing qform code in header
strip_shears : bool, optional
Whether to strip shears in `affine`. If True, shears will be
silently stripped. If False, the presence of shears will raise a
``HeaderDataError``
Notes
-----
The qform transform only encodes translations, rotations and
zooms. If there are shear components to the `affine` transform, and
`strip_shears` is True (the default), the written qform gives the
closest approximation where the rotation matrix is orthogonal. This is
to allow quaternion representation. The orthogonal representation
enforces orthogonal axes.
Examples
--------
>>> hdr = Nifti1Header()
>>> int(hdr['qform_code']) # gives 0 - unknown
0
>>> affine = np.diag([1,2,3,1])
>>> np.all(hdr.get_qform() == affine)
False
>>> hdr.set_qform(affine)
>>> np.all(hdr.get_qform() == affine)
True
>>> int(hdr['qform_code']) # gives 2 - aligned
2
>>> hdr.set_qform(affine, code='talairach')
>>> int(hdr['qform_code'])
3
>>> hdr.set_qform(affine, code=None)
>>> int(hdr['qform_code'])
3
>>> hdr.set_qform(affine, code='scanner')
>>> int(hdr['qform_code'])
1
>>> hdr.set_qform(None)
>>> int(hdr['qform_code'])
0
"""
hdr = self._structarr
old_code = hdr['qform_code']
if code is None:
if affine is None:
code = 0
elif old_code == 0:
code = 2 # aligned
else:
code = old_code
else: # code set
code = self._field_recoders['qform_code'][code]
hdr['qform_code'] = code
if affine is None:
return
affine = np.asarray(affine)
if not affine.shape == (4, 4):
raise TypeError('Need 4x4 affine as input')
trans = affine[:3, 3]
RZS = affine[:3, :3]
zooms = np.sqrt(np.sum(RZS * RZS, axis=0))
R = RZS / zooms
# Set qfac to make R determinant positive
if npl.det(R) > 0:
qfac = 1
else:
qfac = -1
R[:, -1] *= -1
# Make R orthogonal (to allow quaternion representation)
# The orthogonal representation enforces orthogonal axes
# (a subtle requirement of the NIFTI format qform transform)
# Transform below is polar decomposition, returning the closest
# orthogonal matrix PR, to input R
P, S, Qs = npl.svd(R)
PR = np.dot(P, Qs)
if not strip_shears and not np.allclose(PR, R):
raise HeaderDataError('Shears in affine and `strip_shears` is False')
# Convert to quaternion
quat = mat2quat(PR)
# Set into header
hdr['qoffset_x'], hdr['qoffset_y'], hdr['qoffset_z'] = trans
hdr['pixdim'][0] = qfac
hdr['pixdim'][1:4] = zooms
hdr['quatern_b'], hdr['quatern_c'], hdr['quatern_d'] = quat[1:]
|
(self, affine, code=None, strip_shears=True)
|
56,854 |
nibabel.nifti1
|
set_sform
|
Set sform transform from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set `code`
code : None, string or integer, optional
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing sform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing sform code in header != 0,
`code`-> existing sform code in header
Examples
--------
>>> hdr = Nifti1Header()
>>> int(hdr['sform_code']) # gives 0 - unknown
0
>>> affine = np.diag([1,2,3,1])
>>> np.all(hdr.get_sform() == affine)
False
>>> hdr.set_sform(affine)
>>> np.all(hdr.get_sform() == affine)
True
>>> int(hdr['sform_code']) # gives 2 - aligned
2
>>> hdr.set_sform(affine, code='talairach')
>>> int(hdr['sform_code'])
3
>>> hdr.set_sform(affine, code=None)
>>> int(hdr['sform_code'])
3
>>> hdr.set_sform(affine, code='scanner')
>>> int(hdr['sform_code'])
1
>>> hdr.set_sform(None)
>>> int(hdr['sform_code'])
0
|
def set_sform(self, affine, code=None):
"""Set sform transform from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set `code`
code : None, string or integer, optional
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing sform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing sform code in header != 0,
`code`-> existing sform code in header
Examples
--------
>>> hdr = Nifti1Header()
>>> int(hdr['sform_code']) # gives 0 - unknown
0
>>> affine = np.diag([1,2,3,1])
>>> np.all(hdr.get_sform() == affine)
False
>>> hdr.set_sform(affine)
>>> np.all(hdr.get_sform() == affine)
True
>>> int(hdr['sform_code']) # gives 2 - aligned
2
>>> hdr.set_sform(affine, code='talairach')
>>> int(hdr['sform_code'])
3
>>> hdr.set_sform(affine, code=None)
>>> int(hdr['sform_code'])
3
>>> hdr.set_sform(affine, code='scanner')
>>> int(hdr['sform_code'])
1
>>> hdr.set_sform(None)
>>> int(hdr['sform_code'])
0
"""
hdr = self._structarr
old_code = hdr['sform_code']
if code is None:
if affine is None:
code = 0
elif old_code == 0:
code = 2 # aligned
else:
code = old_code
else: # code set
code = self._field_recoders['sform_code'][code]
hdr['sform_code'] = code
if affine is None:
return
affine = np.asarray(affine)
hdr['srow_x'][:] = affine[0, :]
hdr['srow_y'][:] = affine[1, :]
hdr['srow_z'][:] = affine[2, :]
|
(self, affine, code=None)
|
56,855 |
nibabel.nifti1
|
set_slice_duration
|
Set slice duration
Parameters
----------
duration : scalar
time to acquire one slice
Examples
--------
See ``get_slice_duration``
|
def set_slice_duration(self, duration):
"""Set slice duration
Parameters
----------
duration : scalar
time to acquire one slice
Examples
--------
See ``get_slice_duration``
"""
_, _, slice_dim = self.get_dim_info()
if slice_dim is None:
raise HeaderDataError('Slice dimension must be set for duration to be valid')
self._structarr['slice_duration'] = duration
|
(self, duration)
|
56,856 |
nibabel.nifti1
|
set_slice_times
|
Set slice times into *hdr*
Parameters
----------
slice_times : tuple
tuple of slice times, one value per slice
tuple can include None to indicate no slice time for that slice
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(slice=2)
>>> hdr.set_data_shape([1, 1, 7])
>>> hdr.set_slice_duration(0.1)
>>> times = [None, 0.2, 0.4, 0.1, 0.3, 0.0, None]
>>> hdr.set_slice_times(times)
>>> hdr.get_value_label('slice_code')
'alternating decreasing'
>>> int(hdr['slice_start'])
1
>>> int(hdr['slice_end'])
5
|
def set_slice_times(self, slice_times):
"""Set slice times into *hdr*
Parameters
----------
slice_times : tuple
tuple of slice times, one value per slice
tuple can include None to indicate no slice time for that slice
Examples
--------
>>> hdr = Nifti1Header()
>>> hdr.set_dim_info(slice=2)
>>> hdr.set_data_shape([1, 1, 7])
>>> hdr.set_slice_duration(0.1)
>>> times = [None, 0.2, 0.4, 0.1, 0.3, 0.0, None]
>>> hdr.set_slice_times(times)
>>> hdr.get_value_label('slice_code')
'alternating decreasing'
>>> int(hdr['slice_start'])
1
>>> int(hdr['slice_end'])
5
"""
# Check if number of slices matches header
hdr = self._structarr
slice_len = self.get_n_slices()
if slice_len != len(slice_times):
raise HeaderDataError('Number of slice times does not match number of slices')
# Extract Nones at beginning and end. Check for others
for ind, time in enumerate(slice_times):
if time is not None:
slice_start = ind
break
else:
raise HeaderDataError('Not all slice times can be None')
for ind, time in enumerate(slice_times[::-1]):
if time is not None:
slice_end = slice_len - ind - 1
break
timed = slice_times[slice_start : slice_end + 1]
for time in timed:
if time is None:
raise HeaderDataError('Cannot have None in middle of slice time vector')
# Find slice duration, check times are compatible with single
# duration
tdiffs = np.diff(np.sort(timed))
if not np.allclose(np.diff(tdiffs), 0):
raise HeaderDataError('Slice times not compatible with single slice duration')
duration = np.mean(tdiffs)
# To slice time order
st_order = np.round(np.array(timed) / duration)
# Check if slice times fit known schemes
n_timed = len(timed)
so_recoder = self._field_recoders['slice_code']
labels = so_recoder.value_set('label')
labels.remove('unknown')
matching_labels = [
label for label in labels if np.all(st_order == self._slice_time_order(label, n_timed))
]
if not matching_labels:
raise HeaderDataError(f'slice ordering of {st_order} fits with no known scheme')
if len(matching_labels) > 1:
warnings.warn(
f"Multiple slice orders satisfy: {', '.join(matching_labels)}. "
'Choosing the first one'
)
label = matching_labels[0]
# Set values into header
hdr['slice_start'] = slice_start
hdr['slice_end'] = slice_end
hdr['slice_duration'] = duration
hdr['slice_code'] = slice_order_codes.code[label]
|
(self, slice_times)
|
56,857 |
nibabel.nifti1
|
set_slope_inter
|
Set slope and / or intercept into header
Set slope and intercept for image data, such that, if the image
data is ``arr``, then the scaled image data will be ``(arr *
slope) + inter``
(`slope`, `inter`) of (NaN, NaN) is a signal to a containing image to
set `slope`, `inter` automatically on write.
Parameters
----------
slope : None or float
If None, implies `slope` of NaN. If `slope` is None or NaN then
`inter` should be None or NaN. Values of 0, Inf or -Inf raise
HeaderDataError
inter : None or float, optional
Intercept. If None, implies `inter` of NaN. If `slope` is None or
NaN then `inter` should be None or NaN. Values of Inf or -Inf raise
HeaderDataError
|
def set_slope_inter(self, slope, inter=None):
"""Set slope and / or intercept into header
Set slope and intercept for image data, such that, if the image
data is ``arr``, then the scaled image data will be ``(arr *
slope) + inter``
(`slope`, `inter`) of (NaN, NaN) is a signal to a containing image to
set `slope`, `inter` automatically on write.
Parameters
----------
slope : None or float
If None, implies `slope` of NaN. If `slope` is None or NaN then
`inter` should be None or NaN. Values of 0, Inf or -Inf raise
HeaderDataError
inter : None or float, optional
Intercept. If None, implies `inter` of NaN. If `slope` is None or
NaN then `inter` should be None or NaN. Values of Inf or -Inf raise
HeaderDataError
"""
if slope is None:
slope = np.nan
if inter is None:
inter = np.nan
if slope in (0, np.inf, -np.inf):
raise HeaderDataError('Slope cannot be 0 or infinite')
if inter in (np.inf, -np.inf):
raise HeaderDataError('Intercept cannot be infinite')
if np.isnan(slope) ^ np.isnan(inter):
raise HeaderDataError('None or both of slope, inter should be nan')
self._structarr['scl_slope'] = slope
self._structarr['scl_inter'] = inter
|
(self, slope, inter=None)
|
56,858 |
nibabel.nifti1
|
set_xyzt_units
| null |
def set_xyzt_units(self, xyz=None, t=None):
if xyz is None:
xyz = 0
if t is None:
t = 0
xyz_code = self.structarr['xyzt_units'] % 8
t_code = self.structarr['xyzt_units'] - xyz_code
xyz_code = unit_codes[xyz]
t_code = unit_codes[t]
self.structarr['xyzt_units'] = xyz_code + t_code
|
(self, xyz=None, t=None)
|
56,861 |
nibabel.nifti1
|
write_to
| null |
def write_to(self, fileobj):
# First check that vox offset is large enough; set if necessary
if self.is_single:
vox_offset = self._structarr['vox_offset']
min_vox_offset = self.single_vox_offset + self.extensions.get_sizeondisk()
if vox_offset == 0: # vox offset unset; set as necessary
self._structarr['vox_offset'] = min_vox_offset
elif vox_offset < min_vox_offset:
raise HeaderDataError(
f'vox offset set to {vox_offset}, but need at least {min_vox_offset}'
)
super().write_to(fileobj)
# Write extensions
if len(self.extensions) == 0:
# If single file, write required 0 stream to signal no extensions
if self.is_single:
fileobj.write(b'\x00' * 4)
return
# Signal there are extensions that follow
fileobj.write(b'\x01\x00\x00\x00')
byteswap = endian_codes['native'] != self.endianness
self.extensions.write_to(fileobj, byteswap)
|
(self, fileobj)
|
56,862 |
nibabel.nifti1
|
Nifti1Image
|
Class for single file NIfTI1 format image
|
class Nifti1Image(Nifti1Pair, SerializableImage):
"""Class for single file NIfTI1 format image"""
header_class = Nifti1Header
valid_exts = ('.nii',)
files_types = (('image', '.nii'),)
@staticmethod
def _get_fileholders(file_map):
"""Return fileholder for header and image
For single-file niftis, the fileholder for the header and the image
will be the same
"""
return file_map['image'], file_map['image']
def update_header(self):
"""Harmonize header with image data and affine"""
super().update_header()
hdr = self._header
hdr['magic'] = hdr.single_magic
|
(dataobj, affine, header=None, extra=None, file_map=None, dtype=None)
|
56,864 |
nibabel.nifti1
|
__init__
|
Initialize image
The image is a combination of (array-like, affine matrix, header), with
optional metadata in `extra`, and filename / file-like objects
contained in the `file_map` mapping.
Parameters
----------
dataobj : object
Object containing image data. It should be some object that returns an
array from ``np.asanyarray``. It should have a ``shape`` attribute
or property
affine : None or (4,4) array-like
homogeneous affine giving relationship between voxel coordinates and
world coordinates. Affine can also be None. In this case,
``obj.affine`` also returns None, and the affine as written to disk
will depend on the file format.
header : None or mapping or header instance, optional
metadata for this image format
extra : None or mapping, optional
metadata to associate with image that cannot be stored in the
metadata of this image type
file_map : mapping, optional
mapping giving file information for this image format
Notes
-----
If both a `header` and an `affine` are specified, and the `affine` does
not match the affine that is in the `header`, the `affine` will be used,
but the ``sform_code`` and ``qform_code`` fields in the header will be
re-initialised to their default values. This is performed on the basis
that, if you are changing the affine, you are likely to be changing the
space to which the affine is pointing. The :meth:`set_sform` and
:meth:`set_qform` methods can be used to update the codes after an image
has been created - see those methods, and the :ref:`manual
<default-sform-qform-codes>` for more details.
|
def __init__(self, dataobj, affine, header=None, extra=None, file_map=None, dtype=None):
# Special carve-out for 64 bit integers
# See GitHub issues
# * https://github.com/nipy/nibabel/issues/1046
# * https://github.com/nipy/nibabel/issues/1089
# This only applies to NIfTI because the parent Analyze formats did
# not support 64-bit integer data, so `set_data_dtype(int64)` would
# already fail.
danger_dts = (np.dtype('int64'), np.dtype('uint64'))
if header is None and dtype is None and get_obj_dtype(dataobj) in danger_dts:
alert_future_error(
f'Image data has type {dataobj.dtype}, which may cause '
'incompatibilities with other tools.',
'5.0',
warning_rec='This warning can be silenced by passing the dtype argument'
f' to {self.__class__.__name__}().',
error_rec='To use this type, pass an explicit header or dtype argument'
f' to {self.__class__.__name__}().',
error_class=ValueError,
)
super().__init__(dataobj, affine, header, extra, file_map, dtype)
# Force set of s/q form when header is None unless affine is also None
if header is None and affine is not None:
self._affine2header()
|
(self, dataobj, affine, header=None, extra=None, file_map=None, dtype=None)
|
56,866 |
nibabel.nifti1
|
_affine2header
|
Unconditionally set affine into the header
|
def _affine2header(self):
"""Unconditionally set affine into the header"""
hdr = self._header
# Set affine into sform with default code
hdr.set_sform(self._affine, code='aligned')
# Make qform 'unknown'
hdr.set_qform(self._affine, code='unknown')
|
(self)
|
56,867 |
nibabel.nifti1
|
_get_fileholders
|
Return fileholder for header and image
For single-file niftis, the fileholder for the header and the image
will be the same
|
@staticmethod
def _get_fileholders(file_map):
"""Return fileholder for header and image
For single-file niftis, the fileholder for the header and the image
will be the same
"""
return file_map['image'], file_map['image']
|
(file_map)
|
56,868 |
nibabel.nifti1
|
as_reoriented
|
Apply an orientation change and return a new image
If ornt is identity transform, return the original image, unchanged
Parameters
----------
ornt : (n,2) orientation array
orientation transform. ``ornt[N,1]` is flip of axis N of the
array implied by `shape`, where 1 means no flip and -1 means
flip. For example, if ``N==0`` and ``ornt[0,1] == -1``, and
there's an array ``arr`` of shape `shape`, the flip would
correspond to the effect of ``np.flipud(arr)``. ``ornt[:,0]`` is
the transpose that needs to be done to the implied array, as in
``arr.transpose(ornt[:,0])``
|
def as_reoriented(self, ornt):
"""Apply an orientation change and return a new image
If ornt is identity transform, return the original image, unchanged
Parameters
----------
ornt : (n,2) orientation array
orientation transform. ``ornt[N,1]` is flip of axis N of the
array implied by `shape`, where 1 means no flip and -1 means
flip. For example, if ``N==0`` and ``ornt[0,1] == -1``, and
there's an array ``arr`` of shape `shape`, the flip would
correspond to the effect of ``np.flipud(arr)``. ``ornt[:,0]`` is
the transpose that needs to be done to the implied array, as in
``arr.transpose(ornt[:,0])``
"""
img = super().as_reoriented(ornt)
if img is self:
return img
# Also apply the transform to the dim_info fields
new_dim = [
None if orig_dim is None else int(ornt[orig_dim, 0])
for orig_dim in img.header.get_dim_info()
]
img.header.set_dim_info(*new_dim)
return img
|
(self, ornt)
|
56,870 |
nibabel.nifti1
|
get_data_dtype
|
Get numpy dtype for data
If ``set_data_dtype()`` has been called with an alias
and ``finalize`` is ``False``, return the alias.
If ``finalize`` is ``True``, determine the appropriate dtype
from the image data object and set the final dtype in the
header before returning it.
|
def get_data_dtype(self, finalize=False):
"""Get numpy dtype for data
If ``set_data_dtype()`` has been called with an alias
and ``finalize`` is ``False``, return the alias.
If ``finalize`` is ``True``, determine the appropriate dtype
from the image data object and set the final dtype in the
header before returning it.
"""
if self._dtype_alias is None:
return super().get_data_dtype()
if not finalize:
return self._dtype_alias
datatype = None
if self._dtype_alias == 'compat':
datatype = _get_analyze_compat_dtype(self._dataobj)
descrip = 'an Analyze-compatible dtype'
elif self._dtype_alias == 'smallest':
datatype = _get_smallest_dtype(self._dataobj)
descrip = 'an integer type with fewer than 64 bits'
else:
raise ValueError(f'Unknown dtype alias {self._dtype_alias}.')
if datatype is None:
dt = get_obj_dtype(self._dataobj)
raise ValueError(
f'Cannot automatically cast array (of type {dt}) to {descrip}.'
' Please set_data_dtype() to an explicit data type.'
)
self.set_data_dtype(datatype) # Clears the alias
return super().get_data_dtype()
|
(self, finalize=False)
|
56,873 |
nibabel.nifti1
|
get_qform
|
Return 4x4 affine matrix from qform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and qform code. If False, just
return affine. {affine or None} means, return None if qform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine reconstructed from qform
quaternion. If `coded` is True, return None if qform code is 0,
else return the affine.
code : int
Qform code. Only returned if `coded` is True.
See also
--------
set_qform
get_sform
|
def get_qform(self, coded=False):
"""Return 4x4 affine matrix from qform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and qform code. If False, just
return affine. {affine or None} means, return None if qform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine reconstructed from qform
quaternion. If `coded` is True, return None if qform code is 0,
else return the affine.
code : int
Qform code. Only returned if `coded` is True.
See also
--------
set_qform
get_sform
"""
return self._header.get_qform(coded)
|
(self, coded=False)
|
56,874 |
nibabel.nifti1
|
get_sform
|
Return 4x4 affine matrix from sform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and sform code. If False, just
return affine. {affine or None} means, return None if sform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine from sform fields. If
`coded` is True, return None if sform code is 0, else return the
affine.
code : int
Sform code. Only returned if `coded` is True.
See also
--------
set_sform
get_qform
|
def get_sform(self, coded=False):
"""Return 4x4 affine matrix from sform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and sform code. If False, just
return affine. {affine or None} means, return None if sform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine from sform fields. If
`coded` is True, return None if sform code is 0, else return the
affine.
code : int
Sform code. Only returned if `coded` is True.
See also
--------
set_sform
get_qform
"""
return self._header.get_sform(coded)
|
(self, coded=False)
|
56,876 |
nibabel.nifti1
|
set_data_dtype
|
Set numpy dtype for data from code, dtype, type or alias
Using :py:class:`int` or ``"int"`` is disallowed, as these types
will be interpreted as ``np.int64``, which is almost never desired.
``np.int64`` is permitted for those intent on making poor choices.
The following aliases are defined to allow for flexible specification:
* ``'mask'`` - Alias for ``uint8``
* ``'compat'`` - The nearest Analyze-compatible datatype
(``uint8``, ``int16``, ``int32``, ``float32``)
* ``'smallest'`` - The smallest Analyze-compatible integer
(``uint8``, ``int16``, ``int32``)
Dynamic aliases are resolved when ``get_data_dtype()`` is called
with a ``finalize=True`` flag. Until then, these aliases are not
written to the header and will not persist to new images.
Examples
--------
>>> ints = np.arange(24, dtype='i4').reshape((2,3,4))
>>> img = Nifti1Image(ints, np.eye(4))
>>> img.set_data_dtype(np.uint8)
>>> img.get_data_dtype()
dtype('uint8')
>>> img.set_data_dtype('mask')
>>> img.get_data_dtype()
dtype('uint8')
>>> img.set_data_dtype('compat')
>>> img.get_data_dtype()
'compat'
>>> img.get_data_dtype(finalize=True)
dtype('<i4')
>>> img.get_data_dtype()
dtype('<i4')
>>> img.set_data_dtype('smallest')
>>> img.get_data_dtype()
'smallest'
>>> img.get_data_dtype(finalize=True)
dtype('uint8')
>>> img.get_data_dtype()
dtype('uint8')
Note that floating point values will not be coerced to ``int``
>>> floats = np.arange(24, dtype='f4').reshape((2,3,4))
>>> img = Nifti1Image(floats, np.eye(4))
>>> img.set_data_dtype('smallest')
>>> img.get_data_dtype(finalize=True)
Traceback (most recent call last):
...
ValueError: Cannot automatically cast array (of type float32) to an integer
type with fewer than 64 bits. Please set_data_dtype() to an explicit data type.
>>> arr = np.arange(1000, 1024, dtype='i4').reshape((2,3,4))
>>> img = Nifti1Image(arr, np.eye(4))
>>> img.set_data_dtype('smallest')
>>> img.set_data_dtype('implausible')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "implausible" not recognized
>>> img.set_data_dtype('none')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "none" known but not supported
>>> img.set_data_dtype(np.void)
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "<class 'numpy.void'>" known
but not supported
>>> img.set_data_dtype('int')
Traceback (most recent call last):
...
ValueError: Invalid data type 'int'. Specify a sized integer, e.g., 'uint8' or numpy.int16.
>>> img.set_data_dtype(int)
Traceback (most recent call last):
...
ValueError: Invalid data type <class 'int'>. Specify a sized integer, e.g., 'uint8' or
numpy.int16.
>>> img.set_data_dtype('int64')
>>> img.get_data_dtype() == np.dtype('int64')
True
|
def set_data_dtype(self, datatype):
"""Set numpy dtype for data from code, dtype, type or alias
Using :py:class:`int` or ``"int"`` is disallowed, as these types
will be interpreted as ``np.int64``, which is almost never desired.
``np.int64`` is permitted for those intent on making poor choices.
The following aliases are defined to allow for flexible specification:
* ``'mask'`` - Alias for ``uint8``
* ``'compat'`` - The nearest Analyze-compatible datatype
(``uint8``, ``int16``, ``int32``, ``float32``)
* ``'smallest'`` - The smallest Analyze-compatible integer
(``uint8``, ``int16``, ``int32``)
Dynamic aliases are resolved when ``get_data_dtype()`` is called
with a ``finalize=True`` flag. Until then, these aliases are not
written to the header and will not persist to new images.
Examples
--------
>>> ints = np.arange(24, dtype='i4').reshape((2,3,4))
>>> img = Nifti1Image(ints, np.eye(4))
>>> img.set_data_dtype(np.uint8)
>>> img.get_data_dtype()
dtype('uint8')
>>> img.set_data_dtype('mask')
>>> img.get_data_dtype()
dtype('uint8')
>>> img.set_data_dtype('compat')
>>> img.get_data_dtype()
'compat'
>>> img.get_data_dtype(finalize=True)
dtype('<i4')
>>> img.get_data_dtype()
dtype('<i4')
>>> img.set_data_dtype('smallest')
>>> img.get_data_dtype()
'smallest'
>>> img.get_data_dtype(finalize=True)
dtype('uint8')
>>> img.get_data_dtype()
dtype('uint8')
Note that floating point values will not be coerced to ``int``
>>> floats = np.arange(24, dtype='f4').reshape((2,3,4))
>>> img = Nifti1Image(floats, np.eye(4))
>>> img.set_data_dtype('smallest')
>>> img.get_data_dtype(finalize=True)
Traceback (most recent call last):
...
ValueError: Cannot automatically cast array (of type float32) to an integer
type with fewer than 64 bits. Please set_data_dtype() to an explicit data type.
>>> arr = np.arange(1000, 1024, dtype='i4').reshape((2,3,4))
>>> img = Nifti1Image(arr, np.eye(4))
>>> img.set_data_dtype('smallest')
>>> img.set_data_dtype('implausible')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "implausible" not recognized
>>> img.set_data_dtype('none')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "none" known but not supported
>>> img.set_data_dtype(np.void)
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "<class 'numpy.void'>" known
but not supported
>>> img.set_data_dtype('int')
Traceback (most recent call last):
...
ValueError: Invalid data type 'int'. Specify a sized integer, e.g., 'uint8' or numpy.int16.
>>> img.set_data_dtype(int)
Traceback (most recent call last):
...
ValueError: Invalid data type <class 'int'>. Specify a sized integer, e.g., 'uint8' or
numpy.int16.
>>> img.set_data_dtype('int64')
>>> img.get_data_dtype() == np.dtype('int64')
True
"""
# Comparing dtypes to strings, numpy will attempt to call, e.g., dtype('mask'),
# so only check for aliases if the type is a string
# See https://github.com/numpy/numpy/issues/7242
if isinstance(datatype, str):
# Static aliases
if datatype == 'mask':
datatype = 'u1'
# Dynamic aliases
elif datatype in ('compat', 'smallest'):
self._dtype_alias = datatype
return
self._dtype_alias = None
super().set_data_dtype(datatype)
|
(self, datatype)
|
56,878 |
nibabel.nifti1
|
set_qform
|
Set qform header values from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set code.
code : None, string or integer
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing qform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing qform code in header != 0,
`code`-> existing qform code in header
strip_shears : bool, optional
Whether to strip shears in `affine`. If True, shears will be
silently stripped. If False, the presence of shears will raise a
``HeaderDataError``
update_affine : bool, optional
Whether to update the image affine from the header best affine
after setting the qform. Must be keyword argument (because of
different position in `set_qform`). Default is True
See also
--------
get_qform
set_sform
Examples
--------
>>> data = np.arange(24, dtype='f4').reshape((2,3,4))
>>> aff = np.diag([2, 3, 4, 1])
>>> img = Nifti1Pair(data, aff)
>>> img.get_qform()
array([[2., 0., 0., 0.],
[0., 3., 0., 0.],
[0., 0., 4., 0.],
[0., 0., 0., 1.]])
>>> img.get_qform(coded=True)
(None, 0)
>>> aff2 = np.diag([3, 4, 5, 1])
>>> img.set_qform(aff2, 'talairach')
>>> qaff, code = img.get_qform(coded=True)
>>> np.all(qaff == aff2)
True
>>> int(code)
3
|
def set_qform(self, affine, code=None, strip_shears=True, **kwargs):
"""Set qform header values from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set code.
code : None, string or integer
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing qform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing qform code in header != 0,
`code`-> existing qform code in header
strip_shears : bool, optional
Whether to strip shears in `affine`. If True, shears will be
silently stripped. If False, the presence of shears will raise a
``HeaderDataError``
update_affine : bool, optional
Whether to update the image affine from the header best affine
after setting the qform. Must be keyword argument (because of
different position in `set_qform`). Default is True
See also
--------
get_qform
set_sform
Examples
--------
>>> data = np.arange(24, dtype='f4').reshape((2,3,4))
>>> aff = np.diag([2, 3, 4, 1])
>>> img = Nifti1Pair(data, aff)
>>> img.get_qform()
array([[2., 0., 0., 0.],
[0., 3., 0., 0.],
[0., 0., 4., 0.],
[0., 0., 0., 1.]])
>>> img.get_qform(coded=True)
(None, 0)
>>> aff2 = np.diag([3, 4, 5, 1])
>>> img.set_qform(aff2, 'talairach')
>>> qaff, code = img.get_qform(coded=True)
>>> np.all(qaff == aff2)
True
>>> int(code)
3
"""
update_affine = kwargs.pop('update_affine', True)
if kwargs:
raise TypeError(f'Unexpected keyword argument(s) {kwargs}')
self._header.set_qform(affine, code, strip_shears)
if update_affine:
if self._affine is None:
self._affine = self._header.get_best_affine()
else:
self._affine[:] = self._header.get_best_affine()
|
(self, affine, code=None, strip_shears=True, **kwargs)
|
56,879 |
nibabel.nifti1
|
set_sform
|
Set sform transform from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set `code`
code : None, string or integer
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing sform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing sform code in header != 0,
`code`-> existing sform code in header
update_affine : bool, optional
Whether to update the image affine from the header best affine
after setting the qform. Must be keyword argument (because of
different position in `set_qform`). Default is True
See also
--------
get_sform
set_qform
Examples
--------
>>> data = np.arange(24, dtype='f4').reshape((2,3,4))
>>> aff = np.diag([2, 3, 4, 1])
>>> img = Nifti1Pair(data, aff)
>>> img.get_sform()
array([[2., 0., 0., 0.],
[0., 3., 0., 0.],
[0., 0., 4., 0.],
[0., 0., 0., 1.]])
>>> saff, code = img.get_sform(coded=True)
>>> saff
array([[2., 0., 0., 0.],
[0., 3., 0., 0.],
[0., 0., 4., 0.],
[0., 0., 0., 1.]])
>>> int(code)
2
>>> aff2 = np.diag([3, 4, 5, 1])
>>> img.set_sform(aff2, 'talairach')
>>> saff, code = img.get_sform(coded=True)
>>> np.all(saff == aff2)
True
>>> int(code)
3
|
def set_sform(self, affine, code=None, **kwargs):
"""Set sform transform from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set `code`
code : None, string or integer
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing sform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing sform code in header != 0,
`code`-> existing sform code in header
update_affine : bool, optional
Whether to update the image affine from the header best affine
after setting the qform. Must be keyword argument (because of
different position in `set_qform`). Default is True
See also
--------
get_sform
set_qform
Examples
--------
>>> data = np.arange(24, dtype='f4').reshape((2,3,4))
>>> aff = np.diag([2, 3, 4, 1])
>>> img = Nifti1Pair(data, aff)
>>> img.get_sform()
array([[2., 0., 0., 0.],
[0., 3., 0., 0.],
[0., 0., 4., 0.],
[0., 0., 0., 1.]])
>>> saff, code = img.get_sform(coded=True)
>>> saff
array([[2., 0., 0., 0.],
[0., 3., 0., 0.],
[0., 0., 4., 0.],
[0., 0., 0., 1.]])
>>> int(code)
2
>>> aff2 = np.diag([3, 4, 5, 1])
>>> img.set_sform(aff2, 'talairach')
>>> saff, code = img.get_sform(coded=True)
>>> np.all(saff == aff2)
True
>>> int(code)
3
"""
update_affine = kwargs.pop('update_affine', True)
if kwargs:
raise TypeError(f'Unexpected keyword argument(s) {kwargs}')
self._header.set_sform(affine, code)
if update_affine:
if self._affine is None:
self._affine = self._header.get_best_affine()
else:
self._affine[:] = self._header.get_best_affine()
|
(self, affine, code=None, **kwargs)
|
56,881 |
nibabel.nifti1
|
to_file_map
|
Write image to `file_map` or contained ``self.file_map``
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead
dtype : dtype-like, optional
The on-disk data type to coerce the data array.
|
def to_file_map(self, file_map=None, dtype=None):
"""Write image to `file_map` or contained ``self.file_map``
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead
dtype : dtype-like, optional
The on-disk data type to coerce the data array.
"""
img_dtype = self.get_data_dtype()
self.get_data_dtype(finalize=True)
try:
super().to_file_map(file_map, dtype)
finally:
self.set_data_dtype(img_dtype)
|
(self, file_map=None, dtype=None)
|
56,885 |
nibabel.nifti1
|
update_header
|
Harmonize header with image data and affine
|
def update_header(self):
"""Harmonize header with image data and affine"""
super().update_header()
hdr = self._header
hdr['magic'] = hdr.single_magic
|
(self)
|
56,886 |
nibabel.nifti1
|
Nifti1Pair
|
Class for NIfTI1 format image, header pair
|
class Nifti1Pair(analyze.AnalyzeImage):
"""Class for NIfTI1 format image, header pair"""
header_class: type[Nifti1Header] = Nifti1PairHeader
header: Nifti1Header
_meta_sniff_len = header_class.sizeof_hdr
rw = True
# If a _dtype_alias has been set, it can only be resolved by inspecting
# the data at serialization time
_dtype_alias = None
def __init__(self, dataobj, affine, header=None, extra=None, file_map=None, dtype=None):
# Special carve-out for 64 bit integers
# See GitHub issues
# * https://github.com/nipy/nibabel/issues/1046
# * https://github.com/nipy/nibabel/issues/1089
# This only applies to NIfTI because the parent Analyze formats did
# not support 64-bit integer data, so `set_data_dtype(int64)` would
# already fail.
danger_dts = (np.dtype('int64'), np.dtype('uint64'))
if header is None and dtype is None and get_obj_dtype(dataobj) in danger_dts:
alert_future_error(
f'Image data has type {dataobj.dtype}, which may cause '
'incompatibilities with other tools.',
'5.0',
warning_rec='This warning can be silenced by passing the dtype argument'
f' to {self.__class__.__name__}().',
error_rec='To use this type, pass an explicit header or dtype argument'
f' to {self.__class__.__name__}().',
error_class=ValueError,
)
super().__init__(dataobj, affine, header, extra, file_map, dtype)
# Force set of s/q form when header is None unless affine is also None
if header is None and affine is not None:
self._affine2header()
# Copy docstring
__init__.__doc__ = f"""{analyze.AnalyzeImage.__init__.__doc__}
Notes
-----
If both a `header` and an `affine` are specified, and the `affine` does
not match the affine that is in the `header`, the `affine` will be used,
but the ``sform_code`` and ``qform_code`` fields in the header will be
re-initialised to their default values. This is performed on the basis
that, if you are changing the affine, you are likely to be changing the
space to which the affine is pointing. The :meth:`set_sform` and
:meth:`set_qform` methods can be used to update the codes after an image
has been created - see those methods, and the :ref:`manual
<default-sform-qform-codes>` for more details. """
def update_header(self):
"""Harmonize header with image data and affine
See AnalyzeImage.update_header for more examples
Examples
--------
>>> data = np.zeros((2,3,4))
>>> affine = np.diag([1.0,2.0,3.0,1.0])
>>> img = Nifti1Image(data, affine)
>>> hdr = img.header
>>> np.all(hdr.get_qform() == affine)
True
>>> np.all(hdr.get_sform() == affine)
True
"""
super().update_header()
hdr = self._header
hdr['magic'] = hdr.pair_magic
def _affine2header(self):
"""Unconditionally set affine into the header"""
hdr = self._header
# Set affine into sform with default code
hdr.set_sform(self._affine, code='aligned')
# Make qform 'unknown'
hdr.set_qform(self._affine, code='unknown')
def get_qform(self, coded=False):
"""Return 4x4 affine matrix from qform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and qform code. If False, just
return affine. {affine or None} means, return None if qform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine reconstructed from qform
quaternion. If `coded` is True, return None if qform code is 0,
else return the affine.
code : int
Qform code. Only returned if `coded` is True.
See also
--------
set_qform
get_sform
"""
return self._header.get_qform(coded)
def set_qform(self, affine, code=None, strip_shears=True, **kwargs):
"""Set qform header values from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set code.
code : None, string or integer
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing qform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing qform code in header != 0,
`code`-> existing qform code in header
strip_shears : bool, optional
Whether to strip shears in `affine`. If True, shears will be
silently stripped. If False, the presence of shears will raise a
``HeaderDataError``
update_affine : bool, optional
Whether to update the image affine from the header best affine
after setting the qform. Must be keyword argument (because of
different position in `set_qform`). Default is True
See also
--------
get_qform
set_sform
Examples
--------
>>> data = np.arange(24, dtype='f4').reshape((2,3,4))
>>> aff = np.diag([2, 3, 4, 1])
>>> img = Nifti1Pair(data, aff)
>>> img.get_qform()
array([[2., 0., 0., 0.],
[0., 3., 0., 0.],
[0., 0., 4., 0.],
[0., 0., 0., 1.]])
>>> img.get_qform(coded=True)
(None, 0)
>>> aff2 = np.diag([3, 4, 5, 1])
>>> img.set_qform(aff2, 'talairach')
>>> qaff, code = img.get_qform(coded=True)
>>> np.all(qaff == aff2)
True
>>> int(code)
3
"""
update_affine = kwargs.pop('update_affine', True)
if kwargs:
raise TypeError(f'Unexpected keyword argument(s) {kwargs}')
self._header.set_qform(affine, code, strip_shears)
if update_affine:
if self._affine is None:
self._affine = self._header.get_best_affine()
else:
self._affine[:] = self._header.get_best_affine()
def get_sform(self, coded=False):
"""Return 4x4 affine matrix from sform parameters in header
Parameters
----------
coded : bool, optional
If True, return {affine or None}, and sform code. If False, just
return affine. {affine or None} means, return None if sform code
== 0, and affine otherwise.
Returns
-------
affine : None or (4,4) ndarray
If `coded` is False, always return affine from sform fields. If
`coded` is True, return None if sform code is 0, else return the
affine.
code : int
Sform code. Only returned if `coded` is True.
See also
--------
set_sform
get_qform
"""
return self._header.get_sform(coded)
def set_sform(self, affine, code=None, **kwargs):
"""Set sform transform from 4x4 affine
Parameters
----------
affine : None or 4x4 array
affine transform to write into sform. If None, only set `code`
code : None, string or integer
String or integer giving meaning of transform in *affine*.
The default is None. If code is None, then:
* If affine is None, `code`-> 0
* If affine not None and existing sform code in header == 0,
`code`-> 2 (aligned)
* If affine not None and existing sform code in header != 0,
`code`-> existing sform code in header
update_affine : bool, optional
Whether to update the image affine from the header best affine
after setting the qform. Must be keyword argument (because of
different position in `set_qform`). Default is True
See also
--------
get_sform
set_qform
Examples
--------
>>> data = np.arange(24, dtype='f4').reshape((2,3,4))
>>> aff = np.diag([2, 3, 4, 1])
>>> img = Nifti1Pair(data, aff)
>>> img.get_sform()
array([[2., 0., 0., 0.],
[0., 3., 0., 0.],
[0., 0., 4., 0.],
[0., 0., 0., 1.]])
>>> saff, code = img.get_sform(coded=True)
>>> saff
array([[2., 0., 0., 0.],
[0., 3., 0., 0.],
[0., 0., 4., 0.],
[0., 0., 0., 1.]])
>>> int(code)
2
>>> aff2 = np.diag([3, 4, 5, 1])
>>> img.set_sform(aff2, 'talairach')
>>> saff, code = img.get_sform(coded=True)
>>> np.all(saff == aff2)
True
>>> int(code)
3
"""
update_affine = kwargs.pop('update_affine', True)
if kwargs:
raise TypeError(f'Unexpected keyword argument(s) {kwargs}')
self._header.set_sform(affine, code)
if update_affine:
if self._affine is None:
self._affine = self._header.get_best_affine()
else:
self._affine[:] = self._header.get_best_affine()
def set_data_dtype(self, datatype):
"""Set numpy dtype for data from code, dtype, type or alias
Using :py:class:`int` or ``"int"`` is disallowed, as these types
will be interpreted as ``np.int64``, which is almost never desired.
``np.int64`` is permitted for those intent on making poor choices.
The following aliases are defined to allow for flexible specification:
* ``'mask'`` - Alias for ``uint8``
* ``'compat'`` - The nearest Analyze-compatible datatype
(``uint8``, ``int16``, ``int32``, ``float32``)
* ``'smallest'`` - The smallest Analyze-compatible integer
(``uint8``, ``int16``, ``int32``)
Dynamic aliases are resolved when ``get_data_dtype()`` is called
with a ``finalize=True`` flag. Until then, these aliases are not
written to the header and will not persist to new images.
Examples
--------
>>> ints = np.arange(24, dtype='i4').reshape((2,3,4))
>>> img = Nifti1Image(ints, np.eye(4))
>>> img.set_data_dtype(np.uint8)
>>> img.get_data_dtype()
dtype('uint8')
>>> img.set_data_dtype('mask')
>>> img.get_data_dtype()
dtype('uint8')
>>> img.set_data_dtype('compat')
>>> img.get_data_dtype()
'compat'
>>> img.get_data_dtype(finalize=True)
dtype('<i4')
>>> img.get_data_dtype()
dtype('<i4')
>>> img.set_data_dtype('smallest')
>>> img.get_data_dtype()
'smallest'
>>> img.get_data_dtype(finalize=True)
dtype('uint8')
>>> img.get_data_dtype()
dtype('uint8')
Note that floating point values will not be coerced to ``int``
>>> floats = np.arange(24, dtype='f4').reshape((2,3,4))
>>> img = Nifti1Image(floats, np.eye(4))
>>> img.set_data_dtype('smallest')
>>> img.get_data_dtype(finalize=True)
Traceback (most recent call last):
...
ValueError: Cannot automatically cast array (of type float32) to an integer
type with fewer than 64 bits. Please set_data_dtype() to an explicit data type.
>>> arr = np.arange(1000, 1024, dtype='i4').reshape((2,3,4))
>>> img = Nifti1Image(arr, np.eye(4))
>>> img.set_data_dtype('smallest')
>>> img.set_data_dtype('implausible')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "implausible" not recognized
>>> img.set_data_dtype('none')
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "none" known but not supported
>>> img.set_data_dtype(np.void)
Traceback (most recent call last):
...
nibabel.spatialimages.HeaderDataError: data dtype "<class 'numpy.void'>" known
but not supported
>>> img.set_data_dtype('int')
Traceback (most recent call last):
...
ValueError: Invalid data type 'int'. Specify a sized integer, e.g., 'uint8' or numpy.int16.
>>> img.set_data_dtype(int)
Traceback (most recent call last):
...
ValueError: Invalid data type <class 'int'>. Specify a sized integer, e.g., 'uint8' or
numpy.int16.
>>> img.set_data_dtype('int64')
>>> img.get_data_dtype() == np.dtype('int64')
True
"""
# Comparing dtypes to strings, numpy will attempt to call, e.g., dtype('mask'),
# so only check for aliases if the type is a string
# See https://github.com/numpy/numpy/issues/7242
if isinstance(datatype, str):
# Static aliases
if datatype == 'mask':
datatype = 'u1'
# Dynamic aliases
elif datatype in ('compat', 'smallest'):
self._dtype_alias = datatype
return
self._dtype_alias = None
super().set_data_dtype(datatype)
def get_data_dtype(self, finalize=False):
"""Get numpy dtype for data
If ``set_data_dtype()`` has been called with an alias
and ``finalize`` is ``False``, return the alias.
If ``finalize`` is ``True``, determine the appropriate dtype
from the image data object and set the final dtype in the
header before returning it.
"""
if self._dtype_alias is None:
return super().get_data_dtype()
if not finalize:
return self._dtype_alias
datatype = None
if self._dtype_alias == 'compat':
datatype = _get_analyze_compat_dtype(self._dataobj)
descrip = 'an Analyze-compatible dtype'
elif self._dtype_alias == 'smallest':
datatype = _get_smallest_dtype(self._dataobj)
descrip = 'an integer type with fewer than 64 bits'
else:
raise ValueError(f'Unknown dtype alias {self._dtype_alias}.')
if datatype is None:
dt = get_obj_dtype(self._dataobj)
raise ValueError(
f'Cannot automatically cast array (of type {dt}) to {descrip}.'
' Please set_data_dtype() to an explicit data type.'
)
self.set_data_dtype(datatype) # Clears the alias
return super().get_data_dtype()
def to_file_map(self, file_map=None, dtype=None):
"""Write image to `file_map` or contained ``self.file_map``
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead
dtype : dtype-like, optional
The on-disk data type to coerce the data array.
"""
img_dtype = self.get_data_dtype()
self.get_data_dtype(finalize=True)
try:
super().to_file_map(file_map, dtype)
finally:
self.set_data_dtype(img_dtype)
def as_reoriented(self, ornt):
"""Apply an orientation change and return a new image
If ornt is identity transform, return the original image, unchanged
Parameters
----------
ornt : (n,2) orientation array
orientation transform. ``ornt[N,1]` is flip of axis N of the
array implied by `shape`, where 1 means no flip and -1 means
flip. For example, if ``N==0`` and ``ornt[0,1] == -1``, and
there's an array ``arr`` of shape `shape`, the flip would
correspond to the effect of ``np.flipud(arr)``. ``ornt[:,0]`` is
the transpose that needs to be done to the implied array, as in
``arr.transpose(ornt[:,0])``
"""
img = super().as_reoriented(ornt)
if img is self:
return img
# Also apply the transform to the dim_info fields
new_dim = [
None if orig_dim is None else int(ornt[orig_dim, 0])
for orig_dim in img.header.get_dim_info()
]
img.header.set_dim_info(*new_dim)
return img
|
(dataobj, affine, header=None, extra=None, file_map=None, dtype=None)
|
56,907 |
nibabel.nifti1
|
update_header
|
Harmonize header with image data and affine
See AnalyzeImage.update_header for more examples
Examples
--------
>>> data = np.zeros((2,3,4))
>>> affine = np.diag([1.0,2.0,3.0,1.0])
>>> img = Nifti1Image(data, affine)
>>> hdr = img.header
>>> np.all(hdr.get_qform() == affine)
True
>>> np.all(hdr.get_sform() == affine)
True
|
def update_header(self):
"""Harmonize header with image data and affine
See AnalyzeImage.update_header for more examples
Examples
--------
>>> data = np.zeros((2,3,4))
>>> affine = np.diag([1.0,2.0,3.0,1.0])
>>> img = Nifti1Image(data, affine)
>>> hdr = img.header
>>> np.all(hdr.get_qform() == affine)
True
>>> np.all(hdr.get_sform() == affine)
True
"""
super().update_header()
hdr = self._header
hdr['magic'] = hdr.pair_magic
|
(self)
|
56,908 |
nibabel.nifti2
|
Nifti2Header
|
Class for NIfTI2 header
NIfTI2 is a slightly simplified variant of NIfTI1 which replaces 32-bit
floats with 64-bit floats, and increases some integer widths to 32 or 64
bits.
|
class Nifti2Header(Nifti1Header):
"""Class for NIfTI2 header
NIfTI2 is a slightly simplified variant of NIfTI1 which replaces 32-bit
floats with 64-bit floats, and increases some integer widths to 32 or 64
bits.
"""
template_dtype = header_dtype
pair_vox_offset = 0
single_vox_offset = 544
# Magics for single and pair
pair_magic = b'ni2'
single_magic = b'n+2'
# Size of header in sizeof_hdr field
sizeof_hdr = 540
# Quaternion threshold near 0, based on float64 preicision
quaternion_threshold = -np.finfo(np.float64).eps * 3
def get_data_shape(self):
"""Get shape of data
Examples
--------
>>> hdr = Nifti2Header()
>>> hdr.get_data_shape()
(0,)
>>> hdr.set_data_shape((1,2,3))
>>> hdr.get_data_shape()
(1, 2, 3)
Expanding number of dimensions gets default zooms
>>> hdr.get_zooms()
(1.0, 1.0, 1.0)
Notes
-----
Does not use Nifti1 freesurfer hack for large vectors described in
:meth:`Nifti1Header.set_data_shape`
"""
return AnalyzeHeader.get_data_shape(self)
def set_data_shape(self, shape):
"""Set shape of data
If ``ndims == len(shape)`` then we set zooms for dimensions higher than
``ndims`` to 1.0
Parameters
----------
shape : sequence
sequence of integers specifying data array shape
Notes
-----
Does not apply nifti1 Freesurfer hack for long vectors (see
:meth:`Nifti1Header.set_data_shape`)
"""
AnalyzeHeader.set_data_shape(self, shape)
@classmethod
def default_structarr(klass, endianness=None):
"""Create empty header binary block with given endianness"""
hdr_data = super().default_structarr(endianness)
hdr_data['eol_check'] = (13, 10, 26, 10)
return hdr_data
""" Checks only below here """
@classmethod
def _get_checks(klass):
# Add our own checks
return super()._get_checks() + (klass._chk_eol_check,)
@staticmethod
def _chk_eol_check(hdr, fix=False):
rep = Report(HeaderDataError)
if np.all(hdr['eol_check'] == (13, 10, 26, 10)):
return hdr, rep
if np.all(hdr['eol_check'] == 0):
rep.problem_level = 20
rep.problem_msg = 'EOL check all 0'
if fix:
hdr['eol_check'] = (13, 10, 26, 10)
rep.fix_msg = 'setting EOL check to 13, 10, 26, 10'
return hdr, rep
rep.problem_level = 40
rep.problem_msg = (
'EOL check not 0 or 13, 10, 26, 10; data may be corrupted by EOL conversion'
)
if fix:
hdr['eol_check'] = (13, 10, 26, 10)
rep.fix_msg = 'setting EOL check to 13, 10, 26, 10'
return hdr, rep
@classmethod
def may_contain_header(klass, binaryblock):
if len(binaryblock) < klass.sizeof_hdr:
return False
hdr_struct = np.ndarray(
shape=(), dtype=header_dtype, buffer=binaryblock[: klass.sizeof_hdr]
)
bs_hdr_struct = hdr_struct.byteswap()
return 540 in (hdr_struct['sizeof_hdr'], bs_hdr_struct['sizeof_hdr'])
|
(binaryblock=None, endianness=None, check=True, extensions=())
|
56,917 |
nibabel.nifti2
|
_chk_eol_check
| null |
@staticmethod
def _chk_eol_check(hdr, fix=False):
rep = Report(HeaderDataError)
if np.all(hdr['eol_check'] == (13, 10, 26, 10)):
return hdr, rep
if np.all(hdr['eol_check'] == 0):
rep.problem_level = 20
rep.problem_msg = 'EOL check all 0'
if fix:
hdr['eol_check'] = (13, 10, 26, 10)
rep.fix_msg = 'setting EOL check to 13, 10, 26, 10'
return hdr, rep
rep.problem_level = 40
rep.problem_msg = (
'EOL check not 0 or 13, 10, 26, 10; data may be corrupted by EOL conversion'
)
if fix:
hdr['eol_check'] = (13, 10, 26, 10)
rep.fix_msg = 'setting EOL check to 13, 10, 26, 10'
return hdr, rep
|
(hdr, fix=False)
|
56,935 |
nibabel.nifti2
|
get_data_shape
|
Get shape of data
Examples
--------
>>> hdr = Nifti2Header()
>>> hdr.get_data_shape()
(0,)
>>> hdr.set_data_shape((1,2,3))
>>> hdr.get_data_shape()
(1, 2, 3)
Expanding number of dimensions gets default zooms
>>> hdr.get_zooms()
(1.0, 1.0, 1.0)
Notes
-----
Does not use Nifti1 freesurfer hack for large vectors described in
:meth:`Nifti1Header.set_data_shape`
|
def get_data_shape(self):
"""Get shape of data
Examples
--------
>>> hdr = Nifti2Header()
>>> hdr.get_data_shape()
(0,)
>>> hdr.set_data_shape((1,2,3))
>>> hdr.get_data_shape()
(1, 2, 3)
Expanding number of dimensions gets default zooms
>>> hdr.get_zooms()
(1.0, 1.0, 1.0)
Notes
-----
Does not use Nifti1 freesurfer hack for large vectors described in
:meth:`Nifti1Header.set_data_shape`
"""
return AnalyzeHeader.get_data_shape(self)
|
(self)
|
56,953 |
nibabel.nifti2
|
set_data_shape
|
Set shape of data
If ``ndims == len(shape)`` then we set zooms for dimensions higher than
``ndims`` to 1.0
Parameters
----------
shape : sequence
sequence of integers specifying data array shape
Notes
-----
Does not apply nifti1 Freesurfer hack for long vectors (see
:meth:`Nifti1Header.set_data_shape`)
|
def set_data_shape(self, shape):
"""Set shape of data
If ``ndims == len(shape)`` then we set zooms for dimensions higher than
``ndims`` to 1.0
Parameters
----------
shape : sequence
sequence of integers specifying data array shape
Notes
-----
Does not apply nifti1 Freesurfer hack for long vectors (see
:meth:`Nifti1Header.set_data_shape`)
"""
AnalyzeHeader.set_data_shape(self, shape)
|
(self, shape)
|
56,965 |
nibabel.nifti2
|
Nifti2Image
|
Class for single file NIfTI2 format image
|
class Nifti2Image(Nifti1Image):
"""Class for single file NIfTI2 format image"""
header_class = Nifti2Header
_meta_sniff_len = header_class.sizeof_hdr
|
(dataobj, affine, header=None, extra=None, file_map=None, dtype=None)
|
56,989 |
nibabel.nifti2
|
Nifti2Pair
|
Class for NIfTI2 format image, header pair
|
class Nifti2Pair(Nifti1Pair):
"""Class for NIfTI2 format image, header pair"""
header_class = Nifti2PairHeader
_meta_sniff_len = header_class.sizeof_hdr
|
(dataobj, affine, header=None, extra=None, file_map=None, dtype=None)
|
57,011 |
nibabel.orientations
|
OrientationError
| null |
class OrientationError(Exception):
pass
| null |
57,012 |
nibabel.spm2analyze
|
Spm2AnalyzeHeader
|
Class for SPM2 variant of basic Analyze header
SPM2 variant adds the following to basic Analyze format:
* voxel origin;
* slope scaling of data;
* reading - but not writing - intercept of data.
|
class Spm2AnalyzeHeader(spm99.Spm99AnalyzeHeader):
"""Class for SPM2 variant of basic Analyze header
SPM2 variant adds the following to basic Analyze format:
* voxel origin;
* slope scaling of data;
* reading - but not writing - intercept of data.
"""
# Copies of module level definitions
template_dtype = header_dtype
def get_slope_inter(self):
"""Get data scaling (slope) and intercept from header data
Uses the algorithm from SPM2 spm_vol_ana.m by John Ashburner
Parameters
----------
self : header
Mapping with fields:
* scl_slope - slope
* scl_inter - possible intercept (SPM2 use - shared by nifti)
* glmax - the (recorded) maximum value in the data (unscaled)
* glmin - recorded minimum unscaled value
* cal_max - the calibrated (scaled) maximum value in the dataset
* cal_min - ditto minimum value
Returns
-------
scl_slope : None or float
slope. None if there is no valid scaling from these fields
scl_inter : None or float
intercept. Also None if there is no valid slope, intercept
Examples
--------
>>> fields = {'scl_slope': 1, 'scl_inter': 0, 'glmax': 0, 'glmin': 0,
... 'cal_max': 0, 'cal_min': 0}
>>> hdr = Spm2AnalyzeHeader()
>>> for key, value in fields.items():
... hdr[key] = value
>>> hdr.get_slope_inter()
(1.0, 0.0)
>>> hdr['scl_inter'] = 0.5
>>> hdr.get_slope_inter()
(1.0, 0.5)
>>> hdr['scl_inter'] = np.nan
>>> hdr.get_slope_inter()
(1.0, 0.0)
If 'scl_slope' is 0, nan or inf, cannot use 'scl_slope'.
Without valid information in the gl / cal fields, we cannot get
scaling, and return None
>>> hdr['scl_slope'] = 0
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['scl_slope'] = np.nan
>>> hdr.get_slope_inter()
(None, None)
Valid information in the gl AND cal fields are needed
>>> hdr['cal_max'] = 0.8
>>> hdr['cal_min'] = 0.2
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['glmax'] = 110
>>> hdr['glmin'] = 10
>>> np.allclose(hdr.get_slope_inter(), [0.6/100, 0.2-0.6/100*10])
True
"""
# get scaling factor from 'scl_slope' (funused1)
slope = float(self['scl_slope'])
if np.isfinite(slope) and slope:
# try to get offset from scl_inter
inter = float(self['scl_inter'])
if not np.isfinite(inter):
inter = 0.0
return slope, inter
# no non-zero and finite scaling, try gl/cal fields
unscaled_range = self['glmax'] - self['glmin']
scaled_range = self['cal_max'] - self['cal_min']
if unscaled_range and scaled_range:
slope = float(scaled_range) / unscaled_range
inter = self['cal_min'] - slope * self['glmin']
return slope, inter
return None, None
@classmethod
def may_contain_header(klass, binaryblock):
if len(binaryblock) < klass.sizeof_hdr:
return False
hdr_struct = np.ndarray(
shape=(), dtype=header_dtype, buffer=binaryblock[: klass.sizeof_hdr]
)
bs_hdr_struct = hdr_struct.byteswap()
return binaryblock[344:348] not in (b'ni1\x00', b'n+1\x00') and 348 in (
hdr_struct['sizeof_hdr'],
bs_hdr_struct['sizeof_hdr'],
)
|
(binaryblock=None, endianness=None, check=True)
|
57,021 |
nibabel.spm99analyze
|
_chk_origin
| null |
@staticmethod
def _chk_origin(hdr, fix=False):
rep = Report(HeaderDataError)
origin = hdr['origin'][0:3]
dims = hdr['dim'][1:4]
if not np.any(origin) or (np.all(origin > -dims) and np.all(origin < dims * 2)):
return hdr, rep
rep.problem_level = 20
rep.problem_msg = 'very large origin values relative to dims'
if fix:
rep.fix_msg = 'leaving as set, ignoring for affine'
return hdr, rep
|
(hdr, fix=False)
|
57,032 |
nibabel.spm99analyze
|
get_origin_affine
|
Get affine from header, using SPM origin field if sensible
The default translations are got from the ``origin``
field, if set, or from the center of the image otherwise.
Examples
--------
>>> hdr = Spm99AnalyzeHeader()
>>> hdr.set_data_shape((3, 5, 7))
>>> hdr.set_zooms((3, 2, 1))
>>> hdr.default_x_flip
True
>>> hdr.get_origin_affine() # from center of image
array([[-3., 0., 0., 3.],
[ 0., 2., 0., -4.],
[ 0., 0., 1., -3.],
[ 0., 0., 0., 1.]])
>>> hdr['origin'][:3] = [3,4,5]
>>> hdr.get_origin_affine() # using origin
array([[-3., 0., 0., 6.],
[ 0., 2., 0., -6.],
[ 0., 0., 1., -4.],
[ 0., 0., 0., 1.]])
>>> hdr['origin'] = 0 # unset origin
>>> hdr.set_data_shape((3, 5, 7))
>>> hdr.get_origin_affine() # from center of image
array([[-3., 0., 0., 3.],
[ 0., 2., 0., -4.],
[ 0., 0., 1., -3.],
[ 0., 0., 0., 1.]])
|
def get_origin_affine(self):
"""Get affine from header, using SPM origin field if sensible
The default translations are got from the ``origin``
field, if set, or from the center of the image otherwise.
Examples
--------
>>> hdr = Spm99AnalyzeHeader()
>>> hdr.set_data_shape((3, 5, 7))
>>> hdr.set_zooms((3, 2, 1))
>>> hdr.default_x_flip
True
>>> hdr.get_origin_affine() # from center of image
array([[-3., 0., 0., 3.],
[ 0., 2., 0., -4.],
[ 0., 0., 1., -3.],
[ 0., 0., 0., 1.]])
>>> hdr['origin'][:3] = [3,4,5]
>>> hdr.get_origin_affine() # using origin
array([[-3., 0., 0., 6.],
[ 0., 2., 0., -6.],
[ 0., 0., 1., -4.],
[ 0., 0., 0., 1.]])
>>> hdr['origin'] = 0 # unset origin
>>> hdr.set_data_shape((3, 5, 7))
>>> hdr.get_origin_affine() # from center of image
array([[-3., 0., 0., 3.],
[ 0., 2., 0., -4.],
[ 0., 0., 1., -3.],
[ 0., 0., 0., 1.]])
"""
hdr = self._structarr
zooms = hdr['pixdim'][1:4].copy()
if self.default_x_flip:
zooms[0] *= -1
# Get translations from origin, or center of image
# Remember that the origin is for matlab (1-based indexing)
origin = hdr['origin'][:3]
dims = hdr['dim'][1:4]
if np.any(origin) and np.all(origin > -dims) and np.all(origin < dims * 2):
origin = origin - 1
else:
origin = (dims - 1) / 2.0
aff = np.eye(4)
aff[:3, :3] = np.diag(zooms)
aff[:3, -1] = -origin * zooms
return aff
|
(self)
|
57,037 |
nibabel.spm2analyze
|
get_slope_inter
|
Get data scaling (slope) and intercept from header data
Uses the algorithm from SPM2 spm_vol_ana.m by John Ashburner
Parameters
----------
self : header
Mapping with fields:
* scl_slope - slope
* scl_inter - possible intercept (SPM2 use - shared by nifti)
* glmax - the (recorded) maximum value in the data (unscaled)
* glmin - recorded minimum unscaled value
* cal_max - the calibrated (scaled) maximum value in the dataset
* cal_min - ditto minimum value
Returns
-------
scl_slope : None or float
slope. None if there is no valid scaling from these fields
scl_inter : None or float
intercept. Also None if there is no valid slope, intercept
Examples
--------
>>> fields = {'scl_slope': 1, 'scl_inter': 0, 'glmax': 0, 'glmin': 0,
... 'cal_max': 0, 'cal_min': 0}
>>> hdr = Spm2AnalyzeHeader()
>>> for key, value in fields.items():
... hdr[key] = value
>>> hdr.get_slope_inter()
(1.0, 0.0)
>>> hdr['scl_inter'] = 0.5
>>> hdr.get_slope_inter()
(1.0, 0.5)
>>> hdr['scl_inter'] = np.nan
>>> hdr.get_slope_inter()
(1.0, 0.0)
If 'scl_slope' is 0, nan or inf, cannot use 'scl_slope'.
Without valid information in the gl / cal fields, we cannot get
scaling, and return None
>>> hdr['scl_slope'] = 0
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['scl_slope'] = np.nan
>>> hdr.get_slope_inter()
(None, None)
Valid information in the gl AND cal fields are needed
>>> hdr['cal_max'] = 0.8
>>> hdr['cal_min'] = 0.2
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['glmax'] = 110
>>> hdr['glmin'] = 10
>>> np.allclose(hdr.get_slope_inter(), [0.6/100, 0.2-0.6/100*10])
True
|
def get_slope_inter(self):
"""Get data scaling (slope) and intercept from header data
Uses the algorithm from SPM2 spm_vol_ana.m by John Ashburner
Parameters
----------
self : header
Mapping with fields:
* scl_slope - slope
* scl_inter - possible intercept (SPM2 use - shared by nifti)
* glmax - the (recorded) maximum value in the data (unscaled)
* glmin - recorded minimum unscaled value
* cal_max - the calibrated (scaled) maximum value in the dataset
* cal_min - ditto minimum value
Returns
-------
scl_slope : None or float
slope. None if there is no valid scaling from these fields
scl_inter : None or float
intercept. Also None if there is no valid slope, intercept
Examples
--------
>>> fields = {'scl_slope': 1, 'scl_inter': 0, 'glmax': 0, 'glmin': 0,
... 'cal_max': 0, 'cal_min': 0}
>>> hdr = Spm2AnalyzeHeader()
>>> for key, value in fields.items():
... hdr[key] = value
>>> hdr.get_slope_inter()
(1.0, 0.0)
>>> hdr['scl_inter'] = 0.5
>>> hdr.get_slope_inter()
(1.0, 0.5)
>>> hdr['scl_inter'] = np.nan
>>> hdr.get_slope_inter()
(1.0, 0.0)
If 'scl_slope' is 0, nan or inf, cannot use 'scl_slope'.
Without valid information in the gl / cal fields, we cannot get
scaling, and return None
>>> hdr['scl_slope'] = 0
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['scl_slope'] = np.nan
>>> hdr.get_slope_inter()
(None, None)
Valid information in the gl AND cal fields are needed
>>> hdr['cal_max'] = 0.8
>>> hdr['cal_min'] = 0.2
>>> hdr.get_slope_inter()
(None, None)
>>> hdr['glmax'] = 110
>>> hdr['glmin'] = 10
>>> np.allclose(hdr.get_slope_inter(), [0.6/100, 0.2-0.6/100*10])
True
"""
# get scaling factor from 'scl_slope' (funused1)
slope = float(self['scl_slope'])
if np.isfinite(slope) and slope:
# try to get offset from scl_inter
inter = float(self['scl_inter'])
if not np.isfinite(inter):
inter = 0.0
return slope, inter
# no non-zero and finite scaling, try gl/cal fields
unscaled_range = self['glmax'] - self['glmin']
scaled_range = self['cal_max'] - self['cal_min']
if unscaled_range and scaled_range:
slope = float(scaled_range) / unscaled_range
inter = self['cal_min'] - slope * self['glmin']
return slope, inter
return None, None
|
(self)
|
57,046 |
nibabel.spm99analyze
|
set_origin_from_affine
|
Set SPM origin to header from affine matrix.
The ``origin`` field was read but not written by SPM99 and 2. It was
used for storing a central voxel coordinate, that could be used in
aligning the image to some standard position - a proxy for a full
translation vector that was usually stored in a separate matlab .mat
file.
Nifti uses the space occupied by the SPM ``origin`` field for important
other information (the transform codes), so writing the origin will
make the header a confusing Nifti file. If you work with both Analyze
and Nifti, you should probably avoid doing this.
Parameters
----------
affine : array-like, shape (4,4)
Affine matrix to set
Returns
-------
None
Examples
--------
>>> hdr = Spm99AnalyzeHeader()
>>> hdr.set_data_shape((3, 5, 7))
>>> hdr.set_zooms((3,2,1))
>>> hdr.get_origin_affine()
array([[-3., 0., 0., 3.],
[ 0., 2., 0., -4.],
[ 0., 0., 1., -3.],
[ 0., 0., 0., 1.]])
>>> affine = np.diag([3,2,1,1])
>>> affine[:3,3] = [-6, -6, -4]
>>> hdr.set_origin_from_affine(affine)
>>> np.all(hdr['origin'][:3] == [3,4,5])
True
>>> hdr.get_origin_affine()
array([[-3., 0., 0., 6.],
[ 0., 2., 0., -6.],
[ 0., 0., 1., -4.],
[ 0., 0., 0., 1.]])
|
def set_origin_from_affine(self, affine):
"""Set SPM origin to header from affine matrix.
The ``origin`` field was read but not written by SPM99 and 2. It was
used for storing a central voxel coordinate, that could be used in
aligning the image to some standard position - a proxy for a full
translation vector that was usually stored in a separate matlab .mat
file.
Nifti uses the space occupied by the SPM ``origin`` field for important
other information (the transform codes), so writing the origin will
make the header a confusing Nifti file. If you work with both Analyze
and Nifti, you should probably avoid doing this.
Parameters
----------
affine : array-like, shape (4,4)
Affine matrix to set
Returns
-------
None
Examples
--------
>>> hdr = Spm99AnalyzeHeader()
>>> hdr.set_data_shape((3, 5, 7))
>>> hdr.set_zooms((3,2,1))
>>> hdr.get_origin_affine()
array([[-3., 0., 0., 3.],
[ 0., 2., 0., -4.],
[ 0., 0., 1., -3.],
[ 0., 0., 0., 1.]])
>>> affine = np.diag([3,2,1,1])
>>> affine[:3,3] = [-6, -6, -4]
>>> hdr.set_origin_from_affine(affine)
>>> np.all(hdr['origin'][:3] == [3,4,5])
True
>>> hdr.get_origin_affine()
array([[-3., 0., 0., 6.],
[ 0., 2., 0., -6.],
[ 0., 0., 1., -4.],
[ 0., 0., 0., 1.]])
"""
if affine.shape != (4, 4):
raise ValueError('Need 4x4 affine to set')
hdr = self._structarr
RZS = affine[:3, :3]
Z = np.sqrt(np.sum(RZS * RZS, axis=0))
T = affine[:3, 3]
# Remember that the origin is for matlab (1-based) indexing
hdr['origin'][:3] = -T / Z + 1
|
(self, affine)
|
57,047 |
nibabel.spm99analyze
|
set_slope_inter
|
Set slope and / or intercept into header
Set slope and intercept for image data, such that, if the image
data is ``arr``, then the scaled image data will be ``(arr *
slope) + inter``
The SPM Analyze header can't save an intercept value, and we raise an
error unless `inter` is None, NaN or 0
Parameters
----------
slope : None or float
If None, implies `slope` of NaN. NaN is a signal to the image
writing routines to rescale on save. 0, Inf, -Inf are invalid and
cause a HeaderDataError
inter : None or float, optional
intercept. Must be None, NaN or 0, because SPM99 cannot store
intercepts.
|
def set_slope_inter(self, slope, inter=None):
"""Set slope and / or intercept into header
Set slope and intercept for image data, such that, if the image
data is ``arr``, then the scaled image data will be ``(arr *
slope) + inter``
The SPM Analyze header can't save an intercept value, and we raise an
error unless `inter` is None, NaN or 0
Parameters
----------
slope : None or float
If None, implies `slope` of NaN. NaN is a signal to the image
writing routines to rescale on save. 0, Inf, -Inf are invalid and
cause a HeaderDataError
inter : None or float, optional
intercept. Must be None, NaN or 0, because SPM99 cannot store
intercepts.
"""
if slope is None:
slope = np.nan
if slope in (0, -np.inf, np.inf):
raise HeaderDataError('Slope cannot be 0 or infinite')
self._structarr['scl_slope'] = slope
if inter in (None, 0) or np.isnan(inter):
return
raise HeaderTypeError('Cannot set non-zero intercept for SPM headers')
|
(self, slope, inter=None)
|
57,051 |
nibabel.spm2analyze
|
Spm2AnalyzeImage
|
Class for SPM2 variant of basic Analyze image
|
class Spm2AnalyzeImage(spm99.Spm99AnalyzeImage):
"""Class for SPM2 variant of basic Analyze image"""
header_class = Spm2AnalyzeHeader
header: Spm2AnalyzeHeader
|
(dataobj, affine, header=None, extra=None, file_map=None, dtype=None)
|
57,065 |
nibabel.spm99analyze
|
to_file_map
|
Write image to `file_map` or contained ``self.file_map``
Extends Analyze ``to_file_map`` method by writing ``mat`` file
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead
|
def to_file_map(self, file_map=None, dtype=None):
"""Write image to `file_map` or contained ``self.file_map``
Extends Analyze ``to_file_map`` method by writing ``mat`` file
Parameters
----------
file_map : None or mapping, optional
files mapping. If None (default) use object's ``file_map``
attribute instead
"""
if file_map is None:
file_map = self.file_map
super().to_file_map(file_map, dtype=dtype)
mat = self._affine
if mat is None:
return
import scipy.io as sio
hdr = self._header
if hdr.default_x_flip:
M = np.dot(np.diag([-1, 1, 1, 1]), mat)
else:
M = mat
# Adjust for matlab 1,1,1 voxel origin
from_111 = np.eye(4)
from_111[:3, 3] = -1
M = np.dot(M, from_111)
mat = np.dot(mat, from_111)
# use matlab 4 format to allow gzipped write without error
with file_map['mat'].get_prepare_fileobj(mode='wb') as mfobj:
sio.savemat(mfobj, {'M': M, 'mat': mat}, format='4')
|
(self, file_map=None, dtype=None)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.