How to use the xarray.core.indexing function in xarray

To help you get started, we’ve selected a few xarray examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pydata / xarray / xarray / coding / strings.py View on Github external
def __init__(self, array):
        """
        Parameters
        ----------
        array : array-like
            Original array of values to wrap.
        """
        if array.dtype != "S1":
            raise ValueError(
                "can only use StackedBytesArray if argument has dtype='S1'"
            )
        self.array = indexing.as_indexable(array)
github pydata / xarray / xarray / backends / cfgrib_.py View on Github external
def open_store_variable(self, name, var):
        if isinstance(var.data, np.ndarray):
            data = var.data
        else:
            wrapped_array = CfGribArrayWrapper(self, var.data)
            data = indexing.LazilyOuterIndexedArray(wrapped_array)

        encoding = self.ds.encoding.copy()
        encoding["original_shape"] = var.data.shape

        return Variable(var.dimensions, data, var.attributes, encoding)
github pydata / xarray / xarray / backends / netCDF4_.py View on Github external
def open_store_variable(self, name, var):
        dimensions = var.dimensions
        data = indexing.LazilyOuterIndexedArray(NetCDF4ArrayWrapper(name, self))
        attributes = {k: var.getncattr(k) for k in var.ncattrs()}
        _ensure_fill_value_valid(data, attributes)
        # netCDF4 specific encoding; save _FillValue for later
        encoding = {}
        filters = var.filters()
        if filters is not None:
            encoding.update(filters)
        chunking = var.chunking()
        if chunking is not None:
            if chunking == "contiguous":
                encoding["contiguous"] = True
                encoding["chunksizes"] = None
            else:
                encoding["contiguous"] = False
                encoding["chunksizes"] = tuple(chunking)
        # TODO: figure out how to round-trip "endian-ness" without raising
github pydata / xarray / xarray / backends / pydap_.py View on Github external
def __getitem__(self, key):
        return indexing.explicit_indexing_adapter(
            key, self.shape, indexing.IndexingSupport.BASIC, self._getitem
        )
github corteva / rioxarray / rioxarray / _io.py View on Github external
da_name = attrs.pop("NETCDF_VARNAME", default_name)
    data = indexing.LazilyOuterIndexedArray(
        RasterioArrayWrapper(
            manager,
            lock,
            name=da_name,
            vrt_params=vrt_params,
            masked=masked,
            mask_and_scale=mask_and_scale,
            unsigned=unsigned,
        )
    )

    # this lets you write arrays loaded with rasterio
    data = indexing.CopyOnWriteArray(data)
    if cache and chunks is None:
        data = indexing.MemoryCachedArray(data)

    result = DataArray(
        data=data, dims=(coord_name, "y", "x"), coords=coords, attrs=attrs, name=da_name
    )
    result.encoding = encoding

    # update attributes from NetCDF attributess
    _load_netcdf_attrs(riods.tags(), result)
    result = _decode_datetime_cf(result)

    # make sure the _FillValue is correct dtype
    if "_FillValue" in attrs:
        attrs["_FillValue"] = result.dtype.type(attrs["_FillValue"])
github pydata / xarray / xarray / backends / cfgrib_.py View on Github external
def __getitem__(self, key):
        return indexing.explicit_indexing_adapter(
            key, self.shape, indexing.IndexingSupport.OUTER, self._getitem
        )
github pydata / xarray / xarray / backends / pynio_.py View on Github external
def open_store_variable(self, name, var):
        data = indexing.LazilyOuterIndexedArray(NioArrayWrapper(name, self))
        return Variable(var.dimensions, data, var.attributes)
github pydata / xarray / xarray / conventions.py View on Github external
dimensions, data, attributes, encoding = variables.unpack_for_decoding(var)
    # TODO(shoyer): convert everything below to use coders

    if decode_endianness and not data.dtype.isnative:
        # do this last, so it's only done if we didn't already unmask/scale
        data = NativeEndiannessArray(data)
        original_dtype = data.dtype

    encoding.setdefault("dtype", original_dtype)

    if "dtype" in attributes and attributes["dtype"] == "bool":
        del attributes["dtype"]
        data = BoolTypeArray(data)

    if not isinstance(data, dask_array_type):
        data = indexing.LazilyOuterIndexedArray(data)

    return Variable(dimensions, data, attributes, encoding=encoding)
github pydata / xarray / xarray / backends / common.py View on Github external
def __array__(self, dtype=None):
        key = indexing.BasicIndexer((slice(None),) * self.ndim)
        return np.asarray(self[key], dtype=dtype)
github pydata / xarray / xarray / core / variable.py View on Github external
>>> var.copy(data=[0.1, 0.2, 0.3])
        
        array([ 0.1,  0.2,  0.3])
        >>> var
        
        array([7, 2, 3])

        See Also
        --------
        pandas.DataFrame.copy
        """
        if data is None:
            data = self._data

            if isinstance(data, indexing.MemoryCachedArray):
                # don't share caching between copies
                data = indexing.MemoryCachedArray(data.array)

            if deep:
                if hasattr(data, "__array_function__") or isinstance(
                    data, dask_array_type
                ):
                    data = data.copy()
                elif not isinstance(data, PandasIndexAdapter):
                    # pandas.Index is immutable
                    data = np.array(data)
        else:
            data = as_compatible_data(data)
            if self.shape != data.shape:
                raise ValueError(
                    "Data shape {} must match shape of object {}".format(