Skip to content

Buffer

Buffer

zarr.testing.buffer

__all__ module-attribute

__all__ = [
    "NDBufferUsingTestNDArrayLike",
    "StoreExpectingTestBuffer",
    "TestBuffer",
]

NDBufferUsingTestNDArrayLike

Bases: NDBuffer

Example of a custom NDBuffer that handles MyNDArrayLike

Source code in zarr/testing/buffer.py
class NDBufferUsingTestNDArrayLike(cpu.NDBuffer):
    """Example of a custom NDBuffer that handles MyNDArrayLike"""

    @classmethod
    def create(
        cls,
        *,
        shape: Iterable[int],
        dtype: npt.DTypeLike,
        order: Literal["C", "F"] = "C",
        fill_value: Any | None = None,
    ) -> Self:
        """Overwrite `NDBuffer.create` to create a TestNDArrayLike instance"""
        ret = cls(TestNDArrayLike(shape=shape, dtype=dtype, order=order))
        if fill_value is not None:
            ret.fill(fill_value)
        return ret

    @classmethod
    def empty(
        cls,
        shape: tuple[int, ...],
        dtype: npt.DTypeLike,
        order: Literal["C", "F"] = "C",
    ) -> Self:
        return super(cpu.NDBuffer, cls).empty(shape=shape, dtype=dtype, order=order)

byteorder property

byteorder: Endian

dtype property

dtype: dtype[Any]

shape property

shape: tuple[int, ...]

__getitem__

__getitem__(key: Any) -> Self
Source code in zarr/core/buffer/cpu.py
def __getitem__(self, key: Any) -> Self:
    return self.__class__(np.asanyarray(self._data.__getitem__(key)))

__init__

__init__(array: NDArrayLike) -> None
Source code in zarr/core/buffer/cpu.py
def __init__(self, array: NDArrayLike) -> None:
    super().__init__(array)

__len__

__len__() -> int
Source code in zarr/core/buffer/core.py
def __len__(self) -> int:
    return self._data.__len__()

__repr__

__repr__() -> str
Source code in zarr/core/buffer/core.py
def __repr__(self) -> str:
    return f"<NDBuffer shape={self.shape} dtype={self.dtype} {self._data!r}>"

__setitem__

__setitem__(key: Any, value: Any) -> None
Source code in zarr/core/buffer/cpu.py
def __setitem__(self, key: Any, value: Any) -> None:
    if isinstance(value, NDBuffer):
        value = value._data
    self._data.__setitem__(key, value)

all_equal

all_equal(other: Any, equal_nan: bool = True) -> bool

Compare to other using np.array_equal.

Source code in zarr/core/buffer/core.py
def all_equal(self, other: Any, equal_nan: bool = True) -> bool:
    """Compare to `other` using np.array_equal."""
    if other is None:
        # Handle None fill_value for Zarr V2
        return False
    # Handle positive and negative zero by comparing bit patterns:
    if (
        np.asarray(other).dtype.kind == "f"
        and other == 0.0
        and self._data.dtype.kind not in ("U", "S", "T", "O", "V")
    ):
        _data, other = np.broadcast_arrays(self._data, np.asarray(other, self._data.dtype))
        void_dtype = "V" + str(_data.dtype.itemsize)
        return np.array_equal(_data.view(void_dtype), other.view(void_dtype))
    # use array_equal to obtain equal_nan=True functionality
    # Since fill-value is a scalar, isn't there a faster path than allocating a new array for fill value
    # every single time we have to write data?
    _data, other = np.broadcast_arrays(self._data, other)
    return np.array_equal(
        _data,
        other,
        equal_nan=equal_nan
        if self._data.dtype.kind not in ("U", "S", "T", "O", "V")
        else False,
    )

as_ndarray_like

as_ndarray_like() -> NDArrayLike

Returns the underlying array (host or device memory) of this buffer

This will never copy data.

Returns:

  • The underlying array such as a NumPy or CuPy array.
Source code in zarr/core/buffer/core.py
def as_ndarray_like(self) -> NDArrayLike:
    """Returns the underlying array (host or device memory) of this buffer

    This will never copy data.

    Returns
    -------
        The underlying array such as a NumPy or CuPy array.
    """
    return self._data

as_numpy_array

as_numpy_array() -> NDArray[Any]

Returns the buffer as a NumPy array (host memory).

Warnings

Might have to copy data, consider using .as_ndarray_like() instead.

Returns:

  • NumPy array of this buffer (might be a data copy)
Source code in zarr/core/buffer/cpu.py
def as_numpy_array(self) -> npt.NDArray[Any]:
    """Returns the buffer as a NumPy array (host memory).

    Warnings
    --------
    Might have to copy data, consider using `.as_ndarray_like()` instead.

    Returns
    -------
        NumPy array of this buffer (might be a data copy)
    """
    return np.asanyarray(self._data)

as_scalar

as_scalar() -> ScalarType

Returns the buffer as a scalar value

Source code in zarr/core/buffer/core.py
def as_scalar(self) -> ScalarType:
    """Returns the buffer as a scalar value"""
    if self._data.size != 1:
        raise ValueError("Buffer does not contain a single scalar value")
    return cast("ScalarType", self.as_numpy_array()[()])

astype

astype(
    dtype: DTypeLike,
    order: Literal["K", "A", "C", "F"] = "K",
) -> Self
Source code in zarr/core/buffer/core.py
def astype(self, dtype: npt.DTypeLike, order: Literal["K", "A", "C", "F"] = "K") -> Self:
    return self.__class__(self._data.astype(dtype=dtype, order=order))

copy

copy() -> Self
Source code in zarr/core/buffer/core.py
def copy(self) -> Self:
    return self.__class__(self._data.copy())

create classmethod

create(
    *,
    shape: Iterable[int],
    dtype: DTypeLike,
    order: Literal["C", "F"] = "C",
    fill_value: Any | None = None,
) -> Self

Overwrite NDBuffer.create to create a TestNDArrayLike instance

Source code in zarr/testing/buffer.py
@classmethod
def create(
    cls,
    *,
    shape: Iterable[int],
    dtype: npt.DTypeLike,
    order: Literal["C", "F"] = "C",
    fill_value: Any | None = None,
) -> Self:
    """Overwrite `NDBuffer.create` to create a TestNDArrayLike instance"""
    ret = cls(TestNDArrayLike(shape=shape, dtype=dtype, order=order))
    if fill_value is not None:
        ret.fill(fill_value)
    return ret

empty classmethod

empty(
    shape: tuple[int, ...],
    dtype: DTypeLike,
    order: Literal["C", "F"] = "C",
) -> Self

Create an empty buffer with the given shape, dtype, and order.

This method can be faster than NDBuffer.create because it doesn't have to initialize the memory used by the underlying ndarray-like object.

Parameters:

  • shape (tuple[int, ...]) –

    The shape of the buffer and its underlying ndarray-like object

  • dtype (DTypeLike) –

    The datatype of the buffer and its underlying ndarray-like object

  • order (Literal['C', 'F'], default: 'C' ) –

    Whether to store multi-dimensional data in row-major (C-style) or column-major (Fortran-style) order in memory.

Returns:

  • buffer

    New buffer representing a new ndarray_like object with empty data.

See Also

NDBuffer.create Create a new buffer with some initial fill value.

Source code in zarr/testing/buffer.py
@classmethod
def empty(
    cls,
    shape: tuple[int, ...],
    dtype: npt.DTypeLike,
    order: Literal["C", "F"] = "C",
) -> Self:
    return super(cpu.NDBuffer, cls).empty(shape=shape, dtype=dtype, order=order)

fill

fill(value: Any) -> None
Source code in zarr/core/buffer/core.py
def fill(self, value: Any) -> None:
    self._data.fill(value)

from_ndarray_like classmethod

from_ndarray_like(ndarray_like: NDArrayLike) -> Self

Create a new buffer of an ndarray-like object

Parameters:

Returns:

  • New buffer representing `ndarray_like`
Source code in zarr/core/buffer/core.py
@classmethod
def from_ndarray_like(cls, ndarray_like: NDArrayLike) -> Self:
    """Create a new buffer of an ndarray-like object

    Parameters
    ----------
    ndarray_like
        ndarray-like object

    Returns
    -------
        New buffer representing `ndarray_like`
    """
    return cls(ndarray_like)

from_numpy_array classmethod

from_numpy_array(array_like: ArrayLike) -> Self

Create a new buffer of Numpy array-like object

Parameters:

  • array_like (ArrayLike) –

    Object that can be coerced into a Numpy array

Returns:

  • New buffer representing `array_like`
Source code in zarr/core/buffer/cpu.py
@classmethod
def from_numpy_array(cls, array_like: npt.ArrayLike) -> Self:
    return cls.from_ndarray_like(np.asanyarray(array_like))

reshape

reshape(newshape: tuple[int, ...] | Literal[-1]) -> Self
Source code in zarr/core/buffer/core.py
def reshape(self, newshape: tuple[int, ...] | Literal[-1]) -> Self:
    return self.__class__(self._data.reshape(newshape))

squeeze

squeeze(axis: tuple[int, ...]) -> Self
Source code in zarr/core/buffer/core.py
def squeeze(self, axis: tuple[int, ...]) -> Self:
    newshape = tuple(a for i, a in enumerate(self.shape) if i not in axis)
    return self.__class__(self._data.reshape(newshape))

transpose

transpose(
    axes: SupportsIndex | Sequence[SupportsIndex] | None,
) -> Self
Source code in zarr/core/buffer/core.py
def transpose(self, axes: SupportsIndex | Sequence[SupportsIndex] | None) -> Self:
    return self.__class__(self._data.transpose(axes))

StoreExpectingTestBuffer

Bases: MemoryStore

Example of a custom Store that expect MyBuffer for all its non-metadata

We assume that keys containing "json" is metadata

Source code in zarr/testing/buffer.py
class StoreExpectingTestBuffer(MemoryStore):
    """Example of a custom Store that expect MyBuffer for all its non-metadata

    We assume that keys containing "json" is metadata
    """

    async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None:
        if "json" not in key:
            assert isinstance(value, TestBuffer)
        await super().set(key, value, byte_range)

    async def get(
        self,
        key: str,
        prototype: BufferPrototype,
        byte_range: tuple[int, int | None] | None = None,
    ) -> Buffer | None:
        if "json" not in key:
            assert prototype.buffer is TestBuffer
        ret = await super().get(key=key, prototype=prototype, byte_range=byte_range)
        if ret is not None:
            assert isinstance(ret, prototype.buffer)
        return ret

read_only property

read_only: bool

Is the store read-only?

supports_consolidated_metadata property

supports_consolidated_metadata: bool

Does the store support consolidated metadata?.

If it doesn't an error will be raised on requests to consolidate the metadata. Returning False can be useful for stores which implement their own consolidation mechanism outside of the zarr-python implementation.

supports_deletes class-attribute instance-attribute

supports_deletes: bool = True

Does the store support deletes?

supports_listing class-attribute instance-attribute

supports_listing: bool = True

Does the store support listing?

supports_partial_writes property

supports_partial_writes: Literal[False]

Does the store support partial writes?

Partial writes are no longer used by Zarr, so this is always false.

supports_writes class-attribute instance-attribute

supports_writes: bool = True

Does the store support writes?

__enter__

__enter__() -> Self

Enter a context manager that will close the store upon exiting.

Source code in zarr/abc/store.py
def __enter__(self) -> Self:
    """Enter a context manager that will close the store upon exiting."""
    return self

__eq__

__eq__(other: object) -> bool

Equality comparison.

Source code in zarr/storage/_memory.py
def __eq__(self, other: object) -> bool:
    return (
        isinstance(other, type(self))
        and self._store_dict == other._store_dict
        and self.read_only == other.read_only
    )

__exit__

__exit__(
    exc_type: type[BaseException] | None,
    exc_value: BaseException | None,
    traceback: TracebackType | None,
) -> None

Close the store.

Source code in zarr/abc/store.py
def __exit__(
    self,
    exc_type: type[BaseException] | None,
    exc_value: BaseException | None,
    traceback: TracebackType | None,
) -> None:
    """Close the store."""
    self.close()

__init__

__init__(
    store_dict: MutableMapping[str, Buffer] | None = None,
    *,
    read_only: bool = False,
) -> None
Source code in zarr/storage/_memory.py
def __init__(
    self,
    store_dict: MutableMapping[str, Buffer] | None = None,
    *,
    read_only: bool = False,
) -> None:
    super().__init__(read_only=read_only)
    if store_dict is None:
        store_dict = {}
    self._store_dict = store_dict

__repr__

__repr__() -> str
Source code in zarr/storage/_memory.py
def __repr__(self) -> str:
    return f"MemoryStore('{self}')"

__str__

__str__() -> str
Source code in zarr/storage/_memory.py
def __str__(self) -> str:
    return f"memory://{id(self._store_dict)}"

clear async

clear() -> None

Clear the store.

Remove all keys and values from the store.

Source code in zarr/storage/_memory.py
async def clear(self) -> None:
    # docstring inherited
    self._store_dict.clear()

close

close() -> None

Close the store.

Source code in zarr/abc/store.py
def close(self) -> None:
    """Close the store."""
    self._is_open = False

delete async

delete(key: str) -> None

Remove a key from the store

Parameters:

  • key (str) –
Source code in zarr/storage/_memory.py
async def delete(self, key: str) -> None:
    # docstring inherited
    self._check_writable()
    try:
        del self._store_dict[key]
    except KeyError:
        logger.debug("Key %s does not exist.", key)

delete_dir async

delete_dir(prefix: str) -> None

Remove all keys and prefixes in the store that begin with a given prefix.

Source code in zarr/abc/store.py
async def delete_dir(self, prefix: str) -> None:
    """
    Remove all keys and prefixes in the store that begin with a given prefix.
    """
    if not self.supports_deletes:
        raise NotImplementedError
    if not self.supports_listing:
        raise NotImplementedError
    self._check_writable()
    if prefix != "" and not prefix.endswith("/"):
        prefix += "/"
    async for key in self.list_prefix(prefix):
        await self.delete(key)

exists async

exists(key: str) -> bool

Check if a key exists in the store.

Parameters:

  • key (str) –

Returns:

Source code in zarr/storage/_memory.py
async def exists(self, key: str) -> bool:
    # docstring inherited
    return key in self._store_dict

get async

get(
    key: str,
    prototype: BufferPrototype,
    byte_range: tuple[int, int | None] | None = None,
) -> Buffer | None

Retrieve the value associated with a given key.

Parameters:

  • key (str) –
  • prototype (BufferPrototype) –

    The prototype of the output buffer. Stores may support a default buffer prototype.

  • byte_range (ByteRequest, default: None ) –

    ByteRequest may be one of the following. If not provided, all data associated with the key is retrieved. - RangeByteRequest(int, int): Request a specific range of bytes in the form (start, end). The end is exclusive. If the given range is zero-length or starts after the end of the object, an error will be returned. Additionally, if the range ends after the end of the object, the entire remainder of the object will be returned. Otherwise, the exact requested range will be returned. - OffsetByteRequest(int): Request all bytes starting from a given byte offset. This is equivalent to bytes={int}- as an HTTP header. - SuffixByteRequest(int): Request the last int bytes. Note that here, int is the size of the request, not the byte offset. This is equivalent to bytes=-{int} as an HTTP header.

Returns:

Source code in zarr/testing/buffer.py
async def get(
    self,
    key: str,
    prototype: BufferPrototype,
    byte_range: tuple[int, int | None] | None = None,
) -> Buffer | None:
    if "json" not in key:
        assert prototype.buffer is TestBuffer
    ret = await super().get(key=key, prototype=prototype, byte_range=byte_range)
    if ret is not None:
        assert isinstance(ret, prototype.buffer)
    return ret

get_partial_values async

get_partial_values(
    prototype: BufferPrototype,
    key_ranges: Iterable[tuple[str, ByteRequest | None]],
) -> list[Buffer | None]

Retrieve possibly partial values from given key_ranges.

Parameters:

  • prototype (BufferPrototype) –

    The prototype of the output buffer. Stores may support a default buffer prototype.

  • key_ranges (Iterable[tuple[str, tuple[int | None, int | None]]]) –

    Ordered set of key, range pairs, a key may occur multiple times with different ranges

Returns:

  • list of values, in the order of the key_ranges, may contain null/none for missing keys
Source code in zarr/storage/_memory.py
async def get_partial_values(
    self,
    prototype: BufferPrototype,
    key_ranges: Iterable[tuple[str, ByteRequest | None]],
) -> list[Buffer | None]:
    # docstring inherited

    # All the key-ranges arguments goes with the same prototype
    async def _get(key: str, byte_range: ByteRequest | None) -> Buffer | None:
        return await self.get(key, prototype=prototype, byte_range=byte_range)

    return await concurrent_map(key_ranges, _get, limit=None)

getsize async

getsize(key: str) -> int

Return the size, in bytes, of a value in a Store.

Parameters:

  • key (str) –

Returns:

  • nbytes ( int ) –

    The size of the value (in bytes).

Raises:

Source code in zarr/abc/store.py
async def getsize(self, key: str) -> int:
    """
    Return the size, in bytes, of a value in a Store.

    Parameters
    ----------
    key : str

    Returns
    -------
    nbytes : int
        The size of the value (in bytes).

    Raises
    ------
    FileNotFoundError
        When the given key does not exist in the store.
    """
    # Note to implementers: this default implementation is very inefficient since
    # it requires reading the entire object. Many systems will have ways to get the
    # size of an object without reading it.
    # avoid circular import
    from zarr.core.buffer.core import default_buffer_prototype

    value = await self.get(key, prototype=default_buffer_prototype())
    if value is None:
        raise FileNotFoundError(key)
    return len(value)

getsize_prefix async

getsize_prefix(prefix: str) -> int

Return the size, in bytes, of all values under a prefix.

Parameters:

  • prefix (str) –

    The prefix of the directory to measure.

Returns:

  • nbytes ( int ) –

    The sum of the sizes of the values in the directory (in bytes).

See Also

zarr.Array.nbytes_stored Store.getsize

Notes

getsize_prefix is just provided as a potentially faster alternative to listing all the keys under a prefix calling Store.getsize on each.

In general, prefix should be the path of an Array or Group in the Store. Implementations may differ on the behavior when some other prefix is provided.

Source code in zarr/abc/store.py
async def getsize_prefix(self, prefix: str) -> int:
    """
    Return the size, in bytes, of all values under a prefix.

    Parameters
    ----------
    prefix : str
        The prefix of the directory to measure.

    Returns
    -------
    nbytes : int
        The sum of the sizes of the values in the directory (in bytes).

    See Also
    --------
    zarr.Array.nbytes_stored
    Store.getsize

    Notes
    -----
    ``getsize_prefix`` is just provided as a potentially faster alternative to
    listing all the keys under a prefix calling [`Store.getsize`][zarr.abc.store.Store.getsize] on each.

    In general, ``prefix`` should be the path of an Array or Group in the Store.
    Implementations may differ on the behavior when some other ``prefix``
    is provided.
    """
    # TODO: Overlap listing keys with getsize calls.
    # Currently, we load the list of keys into memory and only then move
    # on to getting sizes. Ideally we would overlap those two, which should
    # improve tail latency and might reduce memory pressure (since not all keys
    # would be in memory at once).

    # avoid circular import
    from zarr.core.common import concurrent_map
    from zarr.core.config import config

    keys = [(x,) async for x in self.list_prefix(prefix)]
    limit = config.get("async.concurrency")
    sizes = await concurrent_map(keys, self.getsize, limit=limit)
    return sum(sizes)

is_empty async

is_empty(prefix: str) -> bool

Check if the directory is empty.

Parameters:

  • prefix (str) –

    Prefix of keys to check.

Returns:

  • bool

    True if the store is empty, False otherwise.

Source code in zarr/abc/store.py
async def is_empty(self, prefix: str) -> bool:
    """
    Check if the directory is empty.

    Parameters
    ----------
    prefix : str
        Prefix of keys to check.

    Returns
    -------
    bool
        True if the store is empty, False otherwise.
    """
    if not self.supports_listing:
        raise NotImplementedError
    if prefix != "" and not prefix.endswith("/"):
        prefix += "/"
    async for _ in self.list_prefix(prefix):
        return False
    return True

list async

list() -> AsyncIterator[str]

Retrieve all keys in the store.

Returns:

Source code in zarr/storage/_memory.py
async def list(self) -> AsyncIterator[str]:
    # docstring inherited
    for key in self._store_dict:
        yield key

list_dir async

list_dir(prefix: str) -> AsyncIterator[str]

Retrieve all keys and prefixes with a given prefix and which do not contain the character “/” after the given prefix.

Parameters:

  • prefix (str) –

Returns:

Source code in zarr/storage/_memory.py
async def list_dir(self, prefix: str) -> AsyncIterator[str]:
    # docstring inherited
    prefix = prefix.rstrip("/")

    if prefix == "":
        keys_unique = {k.split("/")[0] for k in self._store_dict}
    else:
        # Our dictionary doesn't contain directory markers, but we want to include
        # a pseudo directory when there's a nested item and we're listing an
        # intermediate level.
        keys_unique = {
            key.removeprefix(prefix + "/").split("/")[0]
            for key in self._store_dict
            if key.startswith(prefix + "/") and key != prefix
        }

    for key in keys_unique:
        yield key

list_prefix async

list_prefix(prefix: str) -> AsyncIterator[str]

Retrieve all keys in the store that begin with a given prefix. Keys are returned relative to the root of the store.

Parameters:

  • prefix (str) –

Returns:

Source code in zarr/storage/_memory.py
async def list_prefix(self, prefix: str) -> AsyncIterator[str]:
    # docstring inherited
    # note: we materialize all dict keys into a list here so we can mutate the dict in-place (e.g. in delete_prefix)
    for key in list(self._store_dict):
        if key.startswith(prefix):
            yield key

open async classmethod

open(*args: Any, **kwargs: Any) -> Self

Create and open the store.

Parameters:

  • *args (Any, default: () ) –

    Positional arguments to pass to the store constructor.

  • **kwargs (Any, default: {} ) –

    Keyword arguments to pass to the store constructor.

Returns:

  • Store

    The opened store instance.

Source code in zarr/abc/store.py
@classmethod
async def open(cls, *args: Any, **kwargs: Any) -> Self:
    """
    Create and open the store.

    Parameters
    ----------
    *args : Any
        Positional arguments to pass to the store constructor.
    **kwargs : Any
        Keyword arguments to pass to the store constructor.

    Returns
    -------
    Store
        The opened store instance.
    """
    store = cls(*args, **kwargs)
    await store._open()
    return store

set async

set(
    key: str,
    value: Buffer,
    byte_range: tuple[int, int] | None = None,
) -> None

Store a (key, value) pair.

Parameters:

Source code in zarr/testing/buffer.py
async def set(self, key: str, value: Buffer, byte_range: tuple[int, int] | None = None) -> None:
    if "json" not in key:
        assert isinstance(value, TestBuffer)
    await super().set(key, value, byte_range)

set_if_not_exists async

set_if_not_exists(key: str, value: Buffer) -> None

Store a key to value if the key is not already present.

Parameters:

Source code in zarr/storage/_memory.py
async def set_if_not_exists(self, key: str, value: Buffer) -> None:
    # docstring inherited
    self._check_writable()
    await self._ensure_open()
    self._store_dict.setdefault(key, value)

with_read_only

with_read_only(read_only: bool = False) -> MemoryStore

Return a new store with a new read_only setting.

The new store points to the same location with the specified new read_only state. The returned Store is not automatically opened, and this store is not automatically closed.

Parameters:

  • read_only (bool, default: False ) –

    If True, the store will be created in read-only mode. Defaults to False.

Returns:

  • A new store of the same type with the new read only attribute.
Source code in zarr/storage/_memory.py
def with_read_only(self, read_only: bool = False) -> MemoryStore:
    # docstring inherited
    return type(self)(
        store_dict=self._store_dict,
        read_only=read_only,
    )

TestBuffer

Bases: Buffer

Example of a custom Buffer that handles ArrayLike

Source code in zarr/testing/buffer.py
class TestBuffer(cpu.Buffer):
    """Example of a custom Buffer that handles ArrayLike"""

    __test__ = False

__test__ class-attribute instance-attribute

__test__ = False

__add__

__add__(other: Buffer) -> Self

Concatenate two buffers

Source code in zarr/core/buffer/core.py
def __add__(self, other: Buffer) -> Self:
    """Concatenate two buffers"""
    return self.combine([other])

__eq__

__eq__(other: object) -> bool
Source code in zarr/core/buffer/core.py
def __eq__(self, other: object) -> bool:
    # Another Buffer class can override this to choose a more efficient path
    return isinstance(other, Buffer) and np.array_equal(
        self.as_numpy_array(), other.as_numpy_array()
    )

__getitem__

__getitem__(key: slice) -> Self
Source code in zarr/core/buffer/core.py
def __getitem__(self, key: slice) -> Self:
    check_item_key_is_1d_contiguous(key)
    return self.__class__(self._data.__getitem__(key))

__init__

__init__(array_like: ArrayLike) -> None
Source code in zarr/core/buffer/cpu.py
def __init__(self, array_like: ArrayLike) -> None:
    super().__init__(array_like)

__len__

__len__() -> int
Source code in zarr/core/buffer/core.py
def __len__(self) -> int:
    return self._data.size

__setitem__

__setitem__(key: slice, value: Any) -> None
Source code in zarr/core/buffer/core.py
def __setitem__(self, key: slice, value: Any) -> None:
    check_item_key_is_1d_contiguous(key)
    self._data.__setitem__(key, value)

as_array_like

as_array_like() -> ArrayLike

Returns the underlying array (host or device memory) of this buffer

This will never copy data.

Returns:

  • The underlying 1d array such as a NumPy or CuPy array.
Source code in zarr/core/buffer/core.py
def as_array_like(self) -> ArrayLike:
    """Returns the underlying array (host or device memory) of this buffer

    This will never copy data.

    Returns
    -------
        The underlying 1d array such as a NumPy or CuPy array.
    """
    return self._data

as_buffer_like

as_buffer_like() -> BytesLike

Returns the buffer as an object that implements the Python buffer protocol.

Notes

Might have to copy data, since the implementation uses .as_numpy_array().

Returns:

  • An object that implements the Python buffer protocol
Source code in zarr/core/buffer/core.py
def as_buffer_like(self) -> BytesLike:
    """Returns the buffer as an object that implements the Python buffer protocol.

    Notes
    -----
    Might have to copy data, since the implementation uses `.as_numpy_array()`.

    Returns
    -------
        An object that implements the Python buffer protocol
    """
    return memoryview(self.as_numpy_array())  # type: ignore[arg-type]

as_numpy_array

as_numpy_array() -> NDArray[Any]

Returns the buffer as a NumPy array (host memory).

Notes

Might have to copy data, consider using .as_array_like() instead.

Returns:

  • NumPy array of this buffer (might be a data copy)
Source code in zarr/core/buffer/cpu.py
def as_numpy_array(self) -> npt.NDArray[Any]:
    """Returns the buffer as a NumPy array (host memory).

    Notes
    -----
    Might have to copy data, consider using `.as_array_like()` instead.

    Returns
    -------
        NumPy array of this buffer (might be a data copy)
    """
    return np.asanyarray(self._data)

combine

combine(others: Iterable[Buffer]) -> Self

Concatenate many buffers

Source code in zarr/core/buffer/cpu.py
def combine(self, others: Iterable[core.Buffer]) -> Self:
    data = [np.asanyarray(self._data)]
    for buf in others:
        other_array = buf.as_array_like()
        assert other_array.dtype == np.dtype("B")
        data.append(np.asanyarray(other_array))
    return self.__class__(np.concatenate(data))

create_zero_length classmethod

create_zero_length() -> Self

Create an empty buffer with length zero

Returns:

  • New empty 0-length buffer
Source code in zarr/core/buffer/cpu.py
@classmethod
def create_zero_length(cls) -> Self:
    return cls(np.array([], dtype="B"))

from_array_like classmethod

from_array_like(array_like: ArrayLike) -> Self

Create a new buffer of an array-like object

Parameters:

  • array_like (ArrayLike) –

    array-like object that must be 1-dim, contiguous, and byte dtype.

Returns:

  • New buffer representing `array_like`
Source code in zarr/core/buffer/core.py
@classmethod
def from_array_like(cls, array_like: ArrayLike) -> Self:
    """Create a new buffer of an array-like object

    Parameters
    ----------
    array_like
        array-like object that must be 1-dim, contiguous, and byte dtype.

    Returns
    -------
        New buffer representing `array_like`
    """
    return cls(array_like)

from_buffer classmethod

from_buffer(buffer: Buffer) -> Self

Create a new buffer of an existing Buffer

This is useful if you want to ensure that an existing buffer is of the correct subclass of Buffer. E.g., MemoryStore uses this to return a buffer instance of the subclass specified by its BufferPrototype argument.

Typically, this only copies data if the data has to be moved between memory types, such as from host to device memory.

Parameters:

  • buffer (Buffer) –

    buffer object.

Returns:

  • A new buffer representing the content of the input buffer
Notes

Subclasses of Buffer must override this method to implement more optimal conversions that avoid copies where possible

Source code in zarr/core/buffer/cpu.py
@classmethod
def from_buffer(cls, buffer: core.Buffer) -> Self:
    """Create a new buffer of an existing Buffer

    This is useful if you want to ensure that an existing buffer is
    of the correct subclass of Buffer. E.g., MemoryStore uses this
    to return a buffer instance of the subclass specified by its
    BufferPrototype argument.

    Typically, this only copies data if the data has to be moved between
    memory types, such as from host to device memory.

    Parameters
    ----------
    buffer
        buffer object.

    Returns
    -------
        A new buffer representing the content of the input buffer

    Notes
    -----
    Subclasses of `Buffer` must override this method to implement
    more optimal conversions that avoid copies where possible
    """
    return cls.from_array_like(buffer.as_numpy_array())

from_bytes classmethod

from_bytes(bytes_like: BytesLike) -> Self

Create a new buffer of a bytes-like object (host memory)

Parameters:

  • bytes_like (BytesLike) –

    bytes-like object

Returns:

  • New buffer representing `bytes_like`
Source code in zarr/core/buffer/cpu.py
@classmethod
def from_bytes(cls, bytes_like: BytesLike) -> Self:
    """Create a new buffer of a bytes-like object (host memory)

    Parameters
    ----------
    bytes_like
        bytes-like object

    Returns
    -------
        New buffer representing `bytes_like`
    """
    return cls.from_array_like(np.frombuffer(bytes_like, dtype="B"))

to_bytes

to_bytes() -> bytes

Returns the buffer as bytes (host memory).

Warnings

Will always copy data, only use this method for small buffers such as metadata buffers. If possible, use .as_numpy_array() or .as_array_like() instead.

Returns:

  • `bytes` of this buffer (data copy)
Source code in zarr/core/buffer/core.py
def to_bytes(self) -> bytes:
    """Returns the buffer as `bytes` (host memory).

    Warnings
    --------
    Will always copy data, only use this method for small buffers such as metadata
    buffers. If possible, use `.as_numpy_array()` or `.as_array_like()` instead.

    Returns
    -------
        `bytes` of this buffer (data copy)
    """
    return bytes(self.as_numpy_array())

TestNDArrayLike

Bases: ndarray

An example of an ndarray-like class

Source code in zarr/testing/buffer.py
class TestNDArrayLike(np.ndarray):
    """An example of an ndarray-like class"""

    __test__ = False

__test__ class-attribute instance-attribute

__test__ = False