index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
22,571 | salem.gis | __eq__ | Two grids are considered equal when their defining coordinates
and projection are equal.
Note: equality also means floating point equality, with all the
problems that come with it.
(independent of the grid's cornered or centered representation.)
| def __eq__(self, other):
"""Two grids are considered equal when their defining coordinates
and projection are equal.
Note: equality also means floating point equality, with all the
problems that come with it.
(independent of the grid's cornered or centered representation.)
"""
# Attributes defining the instance
ckeys = ['x0', 'y0', 'nx', 'ny', 'dx', 'dy', 'origin']
a = dict((k, getattr(self.corner_grid, k)) for k in ckeys)
b = dict((k, getattr(other.corner_grid, k)) for k in ckeys)
p1 = self.corner_grid.proj
p2 = other.corner_grid.proj
return (a == b) and proj_is_same(p1, p2)
| (self, other) |
22,572 | salem.gis | __init__ |
Parameters
----------
proj : pyproj.Proj instance
defines the grid's map projection. Defaults to 'PlateCarree'
(wgs84)
nxny : (int, int)
(nx, ny) number of grid points
dxdy : (float, float)
(dx, dy) grid spacing in proj coordinates. dx must be positive,
while dy can be positive or negative depending on the origin
grid point's lecation (upper-left or lower-left)
x0y0 : (float, float)
(x0, y0) cartesian coordinates (in proj) of the upper left
or lower left corner, depending on the sign of dy
pixel_ref : str
either 'center' or 'corner' (default: 'center'). Tells
the Grid object where the (x0, y0) is located in the grid point.
If ``pixel_ref`` is set to 'corner' and dy < 0, the ``x0y0``
kwarg specifies the **grid point's upper left** corner
coordinates. Equivalently, if dy > 0, x0y0 specifies the
**grid point's lower left** coordinate.
corner : (float, float)
DEPRECATED in favor of ``x0y0``
(x0, y0) cartesian coordinates (in proj) of the upper left
or lower left corner, depending on the sign of dy
ul_corner : (float, float)
DEPRECATED in favor of ``x0y0``
(x0, y0) cartesian coordinates (in proj) of the upper left corner
ll_corner : (float, float)
DEPRECATED in favor of ``x0y0``
(x0, y0) cartesian coordinates (in proj) of the lower left corner
Examples
--------
>>> g = Grid(nxny=(3, 2), dxdy=(1, 1), x0y0=(0, 0), proj=wgs84)
>>> lon, lat = g.ll_coordinates
>>> lon
array([[ 0., 1., 2.],
[ 0., 1., 2.]])
>>> lat
array([[ 0., 0., 0.],
[ 1., 1., 1.]])
>>> lon, lat = g.corner_grid.ll_coordinates
>>> lon
array([[-0.5, 0.5, 1.5],
[-0.5, 0.5, 1.5]])
>>> lat
array([[-0.5, -0.5, -0.5],
[ 0.5, 0.5, 0.5]])
>>> g.corner_grid == g.center_grid # the two reprs are equivalent
True
| def __init__(self, proj=wgs84, nxny=None, dxdy=None, x0y0=None,
pixel_ref='center',
corner=None, ul_corner=None, ll_corner=None):
"""
Parameters
----------
proj : pyproj.Proj instance
defines the grid's map projection. Defaults to 'PlateCarree'
(wgs84)
nxny : (int, int)
(nx, ny) number of grid points
dxdy : (float, float)
(dx, dy) grid spacing in proj coordinates. dx must be positive,
while dy can be positive or negative depending on the origin
grid point's lecation (upper-left or lower-left)
x0y0 : (float, float)
(x0, y0) cartesian coordinates (in proj) of the upper left
or lower left corner, depending on the sign of dy
pixel_ref : str
either 'center' or 'corner' (default: 'center'). Tells
the Grid object where the (x0, y0) is located in the grid point.
If ``pixel_ref`` is set to 'corner' and dy < 0, the ``x0y0``
kwarg specifies the **grid point's upper left** corner
coordinates. Equivalently, if dy > 0, x0y0 specifies the
**grid point's lower left** coordinate.
corner : (float, float)
DEPRECATED in favor of ``x0y0``
(x0, y0) cartesian coordinates (in proj) of the upper left
or lower left corner, depending on the sign of dy
ul_corner : (float, float)
DEPRECATED in favor of ``x0y0``
(x0, y0) cartesian coordinates (in proj) of the upper left corner
ll_corner : (float, float)
DEPRECATED in favor of ``x0y0``
(x0, y0) cartesian coordinates (in proj) of the lower left corner
Examples
--------
>>> g = Grid(nxny=(3, 2), dxdy=(1, 1), x0y0=(0, 0), proj=wgs84)
>>> lon, lat = g.ll_coordinates
>>> lon
array([[ 0., 1., 2.],
[ 0., 1., 2.]])
>>> lat
array([[ 0., 0., 0.],
[ 1., 1., 1.]])
>>> lon, lat = g.corner_grid.ll_coordinates
>>> lon
array([[-0.5, 0.5, 1.5],
[-0.5, 0.5, 1.5]])
>>> lat
array([[-0.5, -0.5, -0.5],
[ 0.5, 0.5, 0.5]])
>>> g.corner_grid == g.center_grid # the two reprs are equivalent
True
"""
# Check for coordinate system
proj = check_crs(proj)
if proj is None:
raise ValueError('proj must be of type pyproj.Proj')
self._proj = proj
# deprecations
if corner is not None:
warnings.warn('The `corner` kwarg is deprecated: '
'use `x0y0` instead.', DeprecationWarning)
x0y0 = corner
if ul_corner is not None:
warnings.warn('The `ul_corner` kwarg is deprecated: '
'use `x0y0` instead.', DeprecationWarning)
if dxdy[1] > 0.:
raise ValueError('dxdy and input params not compatible')
x0y0 = ul_corner
if ll_corner is not None:
warnings.warn('The `ll_corner` kwarg is deprecated: '
'use `x0y0` instead.', DeprecationWarning)
if dxdy[1] < 0.:
raise ValueError('dxdy and input params not compatible')
x0y0 = ll_corner
# Check for shortcut
if dxdy[1] < 0.:
ul_corner = x0y0
else:
ll_corner = x0y0
# Initialise the rest
self._check_input(nxny=nxny, dxdy=dxdy,
ul_corner=ul_corner,
ll_corner=ll_corner,
pixel_ref=pixel_ref)
| (self, proj=<Other Coordinate Operation Transformer: longlat>
Description: PROJ-based coordinate operation
Area of Use:
- undefined, nxny=None, dxdy=None, x0y0=None, pixel_ref='center', corner=None, ul_corner=None, ll_corner=None) |
22,573 | salem.gis | __repr__ | null | def __repr__(self):
srs = '+'.join(sorted(self.proj.srs.split('+'))).strip()
summary = ['<salem.Grid>']
summary += [' proj: ' + srs]
summary += [' pixel_ref: ' + self.pixel_ref]
summary += [' origin: ' + str(self.origin)]
summary += [' (nx, ny): (' + str(self.nx) + ', ' + str(self.ny) + ')']
summary += [' (dx, dy): (' + str(self.dx) + ', ' + str(self.dy) + ')']
summary += [' (x0, y0): (' + str(self.x0) + ', ' + str(self.y0) + ')']
return '\n'.join(summary) + '\n'
| (self) |
22,574 | salem.gis | _check_input | See which parameter combination we have and set everything. | def _check_input(self, **kwargs):
"""See which parameter combination we have and set everything."""
combi_a = ['nxny', 'dxdy', 'ul_corner']
combi_b = ['nxny', 'dxdy', 'll_corner']
if all(kwargs[k] is not None for k in combi_a):
nx, ny = kwargs['nxny']
dx, dy = kwargs['dxdy']
x0, y0 = kwargs['ul_corner']
if (dx <= 0.) or (dy >= 0.):
raise ValueError('dxdy and input params not compatible')
origin = 'upper-left'
elif all(kwargs[k] is not None for k in combi_b):
nx, ny = kwargs['nxny']
dx, dy = kwargs['dxdy']
x0, y0 = kwargs['ll_corner']
if (dx <= 0.) or (dy <= 0.):
raise ValueError('dxdy and input params not compatible')
origin = 'lower-left'
else:
raise ValueError('Input params not compatible')
self._nx = int(nx)
self._ny = int(ny)
if (self._nx <= 0) or (self._ny <= 0):
raise ValueError('nxny not valid')
self._dx = float(dx)
self._dy = float(dy)
self._x0 = float(x0)
self._y0 = float(y0)
self._origin = origin
# Check for pixel ref
self._pixel_ref = kwargs['pixel_ref'].lower()
if self._pixel_ref not in ['corner', 'center']:
raise ValueError('pixel_ref not recognized')
| (self, **kwargs) |
22,575 | salem.gis | almost_equal | A less strict comparison between grids.
Two grids are considered equal when their defining coordinates
and projection are equal.
grid1 == grid2 uses floating point equality, which is very strict; here
we uses numpy's is close instead.
(independent of the grid's cornered or centered representation.)
| def almost_equal(self, other, rtol=1e-05, atol=1e-08):
"""A less strict comparison between grids.
Two grids are considered equal when their defining coordinates
and projection are equal.
grid1 == grid2 uses floating point equality, which is very strict; here
we uses numpy's is close instead.
(independent of the grid's cornered or centered representation.)
"""
# float attributes defining the instance
fkeys = ['x0', 'y0', 'dx', 'dy']
# unambiguous attributes
ckeys = ['nx', 'ny', 'origin']
ok = True
for k in fkeys:
ok = ok and np.isclose(getattr(self.corner_grid, k),
getattr(other.corner_grid, k),
rtol=rtol, atol=atol)
for k in ckeys:
_ok = getattr(self.corner_grid, k) == getattr(other.corner_grid, k)
ok = ok and _ok
p1 = self.corner_grid.proj
p2 = other.corner_grid.proj
return ok and proj_is_same(p1, p2)
| (self, other, rtol=1e-05, atol=1e-08) |
22,576 | salem.gis | extent_as_polygon | Get the extent of the grid in a shapely.Polygon and desired crs.
Parameters
----------
crs : crs
the target coordinate reference system.
Returns
-------
[left, right, bottom, top] boundaries of the grid.
| def extent_as_polygon(self, crs=wgs84):
"""Get the extent of the grid in a shapely.Polygon and desired crs.
Parameters
----------
crs : crs
the target coordinate reference system.
Returns
-------
[left, right, bottom, top] boundaries of the grid.
"""
from shapely.geometry import Polygon
# this is not so trivial
# for optimisation we will transform the boundaries only
_i = np.hstack([np.arange(self.nx+1),
np.ones(self.ny+1)*self.nx,
np.arange(self.nx+1)[::-1],
np.zeros(self.ny+1)]).flatten()
_j = np.hstack([np.zeros(self.nx+1),
np.arange(self.ny+1),
np.ones(self.nx+1)*self.ny,
np.arange(self.ny+1)[::-1]]).flatten()
_i, _j = self.corner_grid.ij_to_crs(_i, _j, crs=crs)
return Polygon(zip(_i, _j))
| (self, crs=<Other Coordinate Operation Transformer: longlat>
Description: PROJ-based coordinate operation
Area of Use:
- undefined) |
22,577 | salem.gis | extent_in_crs | Get the extent of the grid in a desired crs.
Parameters
----------
crs : crs
the target coordinate reference system.
Returns
-------
[left, right, bottom, top] boundaries of the grid.
| def extent_in_crs(self, crs=wgs84):
"""Get the extent of the grid in a desired crs.
Parameters
----------
crs : crs
the target coordinate reference system.
Returns
-------
[left, right, bottom, top] boundaries of the grid.
"""
# this is not so trivial
# for optimisation we will transform the boundaries only
poly = self.extent_as_polygon(crs=crs)
_i, _j = poly.exterior.xy
return [np.min(_i), np.max(_i), np.min(_j), np.max(_j)]
| (self, crs=<Other Coordinate Operation Transformer: longlat>
Description: PROJ-based coordinate operation
Area of Use:
- undefined) |
22,578 | salem.gis | grid_lookup | Performs forward transformation of any other grid into self.
The principle of forward transform is to obtain, for each grid point of
``self`` , all the indices of ``other`` that are located into the
given grid point. This transformation makes sense ONLY if ``other`` has
a higher resolution than the object grid. If ``other`` has a similar
or coarser resolution than ``self`` , choose the more general
(and much faster) :py:meth:`Grid.map_gridded_data` method.
Parameters
----------
other : salem.Grid
the grid that needs to be transformed into self
Returns
-------
a dict: each key (j, i) contains an array of shape (n, 2) where n is
the number of ``other`` 's grid points found within the grid point
(j, i)
| def grid_lookup(self, other):
"""Performs forward transformation of any other grid into self.
The principle of forward transform is to obtain, for each grid point of
``self`` , all the indices of ``other`` that are located into the
given grid point. This transformation makes sense ONLY if ``other`` has
a higher resolution than the object grid. If ``other`` has a similar
or coarser resolution than ``self`` , choose the more general
(and much faster) :py:meth:`Grid.map_gridded_data` method.
Parameters
----------
other : salem.Grid
the grid that needs to be transformed into self
Returns
-------
a dict: each key (j, i) contains an array of shape (n, 2) where n is
the number of ``other`` 's grid points found within the grid point
(j, i)
"""
# Input checks
other = check_crs(other)
if not isinstance(other, Grid):
raise ValueError('`other` should be a Grid instance')
# Transform the other grid into the local grid (forward transform)
# Work in center grid cause that's what we need
i, j = other.center_grid.ij_coordinates
i, j = i.flatten(), j.flatten()
oi, oj = self.center_grid.transform(i, j, crs=other.center_grid,
nearest=True, maskout=True)
# keep only valid values
oi, oj, i, j = oi[~oi.mask], oj[~oi.mask], i[~oi.mask], j[~oi.mask]
out_inds = oi.flatten() + self.nx * oj.flatten()
# find the links
ris = np.digitize(out_inds, bins=np.arange(self.nx*self.ny+1))
# some optim based on the fact that ris has many duplicates
sort_idx = np.argsort(ris)
unq_items, unq_count = np.unique(ris[sort_idx], return_counts=True)
unq_idx = np.split(sort_idx, np.cumsum(unq_count))
# lets go
out = dict()
for idx, ri in zip(unq_idx, unq_items):
ij = divmod(ri-1, self.nx)
out[ij] = np.stack((j[idx], i[idx]), axis=1)
return out
| (self, other) |
22,579 | salem.gis | ij_to_crs | Converts local i, j to cartesian coordinates in a specified crs
Parameters
----------
i : array of floats
the grid coordinates of the point(s) you want to convert
j : array of floats
the grid coordinates of the point(s) you want to convert
crs: crs
the target crs (default: self.proj)
nearest: bool
(for Grid crs only) convert to the nearest grid point
Returns
-------
(x, y) coordinates of the points in the specified crs.
| def ij_to_crs(self, i, j, crs=None, nearest=False):
"""Converts local i, j to cartesian coordinates in a specified crs
Parameters
----------
i : array of floats
the grid coordinates of the point(s) you want to convert
j : array of floats
the grid coordinates of the point(s) you want to convert
crs: crs
the target crs (default: self.proj)
nearest: bool
(for Grid crs only) convert to the nearest grid point
Returns
-------
(x, y) coordinates of the points in the specified crs.
"""
# Default
if crs is None:
crs = self.proj
# Convert i, j to x, y
try:
x = i * self.dx + self.x0
y = j * self.dy + self.y0
except TypeError:
x = np.asarray(i) * self.dx + self.x0
y = np.asarray(j) * self.dy + self.y0
# Convert x, y to crs
_crs = check_crs(crs, raise_on_error=True)
if isinstance(_crs, pyproj.Proj):
ret = transform_proj(self.proj, _crs, x, y)
elif isinstance(_crs, Grid):
ret = _crs.transform(x, y, crs=self.proj, nearest=nearest)
return ret
| (self, i, j, crs=None, nearest=False) |
22,580 | salem.gis | lookup_transform | Performs the forward transformation of gridded data into self.
This method is suitable when the data grid is of higher resolution
than ``self``. ``lookup_transform`` performs aggregation of data
according to a user given rule (e.g. ``np.mean``, ``len``, ``np.std``),
applied to all grid points found below a grid point in ``self``.
See also :py:meth:`Grid.grid_lookup` and examples in the docs
Parameters
----------
data : ndarray
an ndarray of dimensions 2, 3, or 4, the two last ones being y, x.
grid : Grid
a Grid instance matching the data
method : function, default: np.mean
the aggregation method. Possibilities: np.std, np.median, np.sum,
and more. Use ``len`` to count the number of grid points!
lut : ndarray, optional
computing the lookup table can be expensive. If you have several
operations to do with the same grid, set ``lut`` to an existing
table obtained from a previous call to :py:meth:`Grid.grid_lookup`
return_lut : bool, optional
set to True if you want to return the lookup table for later use.
in this case, returns a tuple
Returns
-------
An aggregated ndarray of the data, in ``self`` coordinates.
If ``return_lut==True``, also return the lookup table
| def lookup_transform(self, data, grid=None, method=np.mean, lut=None,
return_lut=False):
"""Performs the forward transformation of gridded data into self.
This method is suitable when the data grid is of higher resolution
than ``self``. ``lookup_transform`` performs aggregation of data
according to a user given rule (e.g. ``np.mean``, ``len``, ``np.std``),
applied to all grid points found below a grid point in ``self``.
See also :py:meth:`Grid.grid_lookup` and examples in the docs
Parameters
----------
data : ndarray
an ndarray of dimensions 2, 3, or 4, the two last ones being y, x.
grid : Grid
a Grid instance matching the data
method : function, default: np.mean
the aggregation method. Possibilities: np.std, np.median, np.sum,
and more. Use ``len`` to count the number of grid points!
lut : ndarray, optional
computing the lookup table can be expensive. If you have several
operations to do with the same grid, set ``lut`` to an existing
table obtained from a previous call to :py:meth:`Grid.grid_lookup`
return_lut : bool, optional
set to True if you want to return the lookup table for later use.
in this case, returns a tuple
Returns
-------
An aggregated ndarray of the data, in ``self`` coordinates.
If ``return_lut==True``, also return the lookup table
"""
# Input checks
if grid is None:
grid = check_crs(data) # xarray
if not isinstance(grid, Grid):
raise ValueError('grid should be a Grid instance')
if hasattr(data, 'values'):
data = data.values # xarray
# dimensional check
in_shape = data.shape
ndims = len(in_shape)
if (ndims < 2) or (ndims > 4):
raise ValueError('data dimension not accepted')
if (in_shape[-1] != grid.nx) or (in_shape[-2] != grid.ny):
raise ValueError('data dimension not compatible')
if lut is None:
lut = self.grid_lookup(grid)
# Prepare the output
out_shape = list(in_shape)
out_shape[-2:] = [self.ny, self.nx]
if data.dtype.kind == 'i':
out_data = np.zeros(out_shape, dtype=float) * np.NaN
else:
out_data = np.zeros(out_shape, dtype=data.dtype) * np.NaN
def _2d_trafo(ind, outd):
for ji, l in lut.items():
outd[ji] = method(ind[l[:, 0], l[:, 1]])
return outd
if ndims == 2:
_2d_trafo(data, out_data)
if ndims == 3:
for dimi, cdata in enumerate(data):
out_data[dimi, ...] = _2d_trafo(cdata, out_data[dimi, ...])
if ndims == 4:
for dimj, cdata in enumerate(data):
for dimi, ccdata in enumerate(cdata):
tmp = _2d_trafo(ccdata, out_data[dimj, dimi, ...])
out_data[dimj, dimi, ...] = tmp
# prepare output
if method is len:
out_data[~np.isfinite(out_data)] = 0
out_data = out_data.astype(int)
else:
out_data = np.ma.masked_invalid(out_data)
if return_lut:
return out_data, lut
else:
return out_data
| (self, data, grid=None, method=<function mean at 0x7f9b3c8b9c70>, lut=None, return_lut=False) |
22,581 | salem.gis | map_gridded_data | Reprojects any structured data onto the local grid.
The z and time dimensions of the data (if provided) are conserved, but
the projected data will have the x, y dimensions of the local grid.
Currently, nearest neighbor, linear, and spline interpolation are
available. The dtype of the input data is guaranteed to be conserved,
except for int which will be converted to floats if non nearest
neighbor interpolation is asked.
Parameters
----------
data : ndarray
an ndarray of dimensions 2, 3, or 4, the two last ones being y, x.
grid : Grid
a Grid instance matching the data
interp : str
'nearest' (default), 'linear', or 'spline'
ks : int
Degree of the bivariate spline. Default is 3.
missing : int
integer value to attribute to invalid data (for integer data
only, floats invalids are forced to NaNs)
out : ndarray
output array to fill instead of creating a new one (useful for
overwriting stuffs)
Returns
-------
A projected ndarray of the data, in ``self`` coordinates.
| def map_gridded_data(self, data, grid=None, interp='nearest',
ks=3, out=None):
"""Reprojects any structured data onto the local grid.
The z and time dimensions of the data (if provided) are conserved, but
the projected data will have the x, y dimensions of the local grid.
Currently, nearest neighbor, linear, and spline interpolation are
available. The dtype of the input data is guaranteed to be conserved,
except for int which will be converted to floats if non nearest
neighbor interpolation is asked.
Parameters
----------
data : ndarray
an ndarray of dimensions 2, 3, or 4, the two last ones being y, x.
grid : Grid
a Grid instance matching the data
interp : str
'nearest' (default), 'linear', or 'spline'
ks : int
Degree of the bivariate spline. Default is 3.
missing : int
integer value to attribute to invalid data (for integer data
only, floats invalids are forced to NaNs)
out : ndarray
output array to fill instead of creating a new one (useful for
overwriting stuffs)
Returns
-------
A projected ndarray of the data, in ``self`` coordinates.
"""
if grid is None:
try:
grid = data.salem.grid # try xarray
except AttributeError:
pass
# Input checks
if not isinstance(grid, Grid):
raise ValueError('grid should be a Grid instance')
try: # in case someone gave an xarray dataarray
data = data.values
except AttributeError:
pass
try: # in case someone gave a masked array (won't work with scipy)
data = data.filled(np.nan)
except AttributeError:
pass
if data.dtype == np.float32:
# New in scipy - issue with float32
data = data.astype(np.float64)
in_shape = data.shape
ndims = len(in_shape)
if (ndims < 2) or (ndims > 4):
raise ValueError('data dimension not accepted')
if (in_shape[-1] != grid.nx) or (in_shape[-2] != grid.ny):
raise ValueError('data dimension not compatible')
interp = interp.lower()
use_nn = False
if interp == 'nearest':
use_nn = True
# Transform the local grid into the input grid (backwards transform)
# Work in center grid cause that's what we need
# TODO: this stage could be optimized when many variables need transfo
i, j = self.center_grid.ij_coordinates
oi, oj = grid.center_grid.transform(i, j, crs=self.center_grid,
nearest=use_nn, maskout=False)
pv = np.nonzero((oi >= 0) & (oi < grid.nx) &
(oj >= 0) & (oj < grid.ny))
# Prepare the output
if out is not None:
out_data = np.ma.asarray(out)
else:
out_shape = list(in_shape)
out_shape[-2:] = [self.ny, self.nx]
if (data.dtype.kind == 'i') and (interp == 'nearest'):
# We dont do integer arithmetics other than nearest
out_data = np.ma.masked_all(out_shape, dtype=data.dtype)
elif data.dtype.kind == 'i':
out_data = np.ma.masked_all(out_shape, dtype=float)
else:
out_data = np.ma.masked_all(out_shape, dtype=data.dtype)
# Spare us the trouble
if len(pv[0]) == 0:
return out_data
i, j, oi, oj = i[pv], j[pv], oi[pv], oj[pv]
# Interpolate
if interp == 'nearest':
if out is not None:
if ndims > 2:
raise ValueError('Need 2D for now.')
vok = np.isfinite(data[oj, oi])
out_data[j[vok], i[vok]] = data[oj[vok], oi[vok]]
else:
out_data[..., j, i] = data[..., oj, oi]
elif interp == 'linear':
points = (np.arange(grid.ny), np.arange(grid.nx))
if ndims == 2:
f = RegularGridInterpolator(points, data, bounds_error=False)
if out is not None:
tmp = f((oj, oi))
vok = np.isfinite(tmp)
out_data[j[vok], i[vok]] = tmp[vok]
else:
out_data[j, i] = f((oj, oi))
if ndims == 3:
for dimi, cdata in enumerate(data):
f = RegularGridInterpolator(points, cdata,
bounds_error=False)
if out is not None:
tmp = f((oj, oi))
vok = np.isfinite(tmp)
out_data[dimi, j[vok], i[vok]] = tmp[vok]
else:
out_data[dimi, j, i] = f((oj, oi))
if ndims == 4:
for dimj, cdata in enumerate(data):
for dimi, ccdata in enumerate(cdata):
f = RegularGridInterpolator(points, ccdata,
bounds_error=False)
if out is not None:
tmp = f((oj, oi))
vok = np.isfinite(tmp)
out_data[dimj, dimi, j[vok], i[vok]] = tmp[vok]
else:
out_data[dimj, dimi, j, i] = f((oj, oi))
elif interp == 'spline':
px, py = np.arange(grid.ny), np.arange(grid.nx)
if ndims == 2:
f = RectBivariateSpline(px, py, data, kx=ks, ky=ks)
if out is not None:
tmp = f(oj, oi, grid=False)
vok = np.isfinite(tmp)
out_data[j[vok], i[vok]] = tmp[vok]
else:
out_data[j, i] = f(oj, oi, grid=False)
if ndims == 3:
for dimi, cdata in enumerate(data):
f = RectBivariateSpline(px, py, cdata, kx=ks, ky=ks)
if out is not None:
tmp = f(oj, oi, grid=False)
vok = np.isfinite(tmp)
out_data[dimi, j[vok], i[vok]] = tmp[vok]
else:
out_data[dimi, j, i] = f(oj, oi, grid=False)
if ndims == 4:
for dimj, cdata in enumerate(data):
for dimi, ccdata in enumerate(cdata):
f = RectBivariateSpline(px, py, ccdata, kx=ks, ky=ks)
if out is not None:
tmp = f(oj, oi, grid=False)
vok = np.isfinite(tmp)
out_data[dimj, dimi, j[vok], i[vok]] = tmp[vok]
else:
out_data[dimj, dimi, j, i] = f(oj, oi, grid=False)
else:
msg = 'interpolation not understood: {}'.format(interp)
raise ValueError(msg)
# we have to catch a warning for an unexplained reason
with warnings.catch_warnings():
mess = "invalid value encountered in isfinite"
warnings.filterwarnings("ignore", message=mess)
out_data = np.ma.masked_invalid(out_data)
return out_data
| (self, data, grid=None, interp='nearest', ks=3, out=None) |
22,582 | salem.gis | region_of_interest | Computes a region of interest (ROI).
A ROI is simply a mask of the same size as the grid.
Parameters
----------
shape : str
path to a shapefile
geometry : geometry
a shapely geometry (don't forget the crs keyword)
grid : Grid
a Grid object which extent will form the ROI
corners : tuple
a ((x0, y0), (x1, y1)) tuple of the corners of the square
to subset the dataset to (don't forget the crs keyword)
crs : crs, default wgs84
coordinate reference system of the geometry and corners
roi : ndarray
add the new region_of_interest to a previous one (useful for
multiple geometries for example)
all_touched : boolean
pass-through argument for rasterio.features.rasterize, indicating
that all grid cells which are clipped by the shapefile defining
the region of interest should be included (default=False)
| def region_of_interest(self, shape=None, geometry=None, grid=None,
corners=None, crs=wgs84, roi=None,
all_touched=False):
"""Computes a region of interest (ROI).
A ROI is simply a mask of the same size as the grid.
Parameters
----------
shape : str
path to a shapefile
geometry : geometry
a shapely geometry (don't forget the crs keyword)
grid : Grid
a Grid object which extent will form the ROI
corners : tuple
a ((x0, y0), (x1, y1)) tuple of the corners of the square
to subset the dataset to (don't forget the crs keyword)
crs : crs, default wgs84
coordinate reference system of the geometry and corners
roi : ndarray
add the new region_of_interest to a previous one (useful for
multiple geometries for example)
all_touched : boolean
pass-through argument for rasterio.features.rasterize, indicating
that all grid cells which are clipped by the shapefile defining
the region of interest should be included (default=False)
"""
# Initial mask
if roi is not None:
mask = np.array(roi, dtype=np.int16)
else:
mask = np.zeros((self.ny, self.nx), dtype=np.int16)
# Collect keyword arguments, overriding anything the user
# inadvertently added
rasterize_kws = dict(out=mask, all_touched=all_touched)
# Several cases
if shape is not None:
import pandas as pd
inplace = False
if not isinstance(shape, pd.DataFrame):
from salem.sio import read_shapefile
shape = read_shapefile(shape)
inplace = True
# corner grid is needed for rasterio
shape = transform_geopandas(shape, to_crs=self.corner_grid,
inplace=inplace)
import rasterio
from rasterio.features import rasterize
with rasterio.Env():
mask = rasterize(shape.geometry, **rasterize_kws)
if geometry is not None:
import rasterio
from rasterio.features import rasterize
# corner grid is needed for rasterio
geom = transform_geometry(geometry, crs=crs,
to_crs=self.corner_grid)
with rasterio.Env():
mask = rasterize(np.atleast_1d(geom), **rasterize_kws)
if grid is not None:
_tmp = np.ones((grid.ny, grid.nx), dtype=np.int16)
mask = self.map_gridded_data(_tmp, grid, out=mask).filled(0)
if corners is not None:
cgrid = self.center_grid
xy0, xy1 = corners
x0, y0 = cgrid.transform(*xy0, crs=crs, nearest=True)
x1, y1 = cgrid.transform(*xy1, crs=crs, nearest=True)
mask[np.min([y0, y1]):np.max([y0, y1]) + 1,
np.min([x0, x1]):np.max([x0, x1]) + 1] = 1
return mask
| (self, shape=None, geometry=None, grid=None, corners=None, crs=<Other Coordinate Operation Transformer: longlat>
Description: PROJ-based coordinate operation
Area of Use:
- undefined, roi=None, all_touched=False) |
22,583 | salem.gis | regrid | Make a copy of the grid with an updated spatial resolution.
The keyword parameters are mutually exclusive, because the x/y ratio
of the grid has to be preserved.
Parameters
----------
nx : int
the new number of x pixels
nx : int
the new number of y pixels
factor : int
multiplication factor (factor=3 will generate a grid with
a spatial resolution 3 times finer)
Returns
-------
a new Grid object.
| def regrid(self, nx=None, ny=None, factor=1):
"""Make a copy of the grid with an updated spatial resolution.
The keyword parameters are mutually exclusive, because the x/y ratio
of the grid has to be preserved.
Parameters
----------
nx : int
the new number of x pixels
nx : int
the new number of y pixels
factor : int
multiplication factor (factor=3 will generate a grid with
a spatial resolution 3 times finer)
Returns
-------
a new Grid object.
"""
if nx is not None:
factor = nx / self.nx
if ny is not None:
factor = ny / self.ny
nx = self.nx * factor
ny = self.ny * factor
dx = self.dx / factor
dy = self.dy / factor
x0 = self.corner_grid.x0
y0 = self.corner_grid.y0
args = dict(nxny=(nx, ny), dxdy=(dx, dy), x0y0=(x0, y0),
proj=self.proj, pixel_ref='corner')
g = Grid(**args)
if self.pixel_ref == 'center':
g = g.center_grid
return g
| (self, nx=None, ny=None, factor=1) |
22,584 | salem.gis | to_dataset | Creates an empty dataset based on the Grid's geolocalisation.
Returns
-------
An xarray.Dataset object ready to be filled with data
| def to_dataset(self):
"""Creates an empty dataset based on the Grid's geolocalisation.
Returns
-------
An xarray.Dataset object ready to be filled with data
"""
import xarray as xr
ds = xr.Dataset(coords={'x': (['x', ], self.center_grid.x_coord),
'y': (['y', ], self.center_grid.y_coord)}
)
ds.attrs['pyproj_srs'] = self.proj.srs
return ds
| (self) |
22,585 | salem.gis | to_dict | Serialize this grid to a dictionary.
Returns
-------
a grid dictionary
See Also
--------
from_dict : create a Grid from a dict
| def to_dict(self):
"""Serialize this grid to a dictionary.
Returns
-------
a grid dictionary
See Also
--------
from_dict : create a Grid from a dict
"""
return dict(proj=self.proj.srs, x0y0=(self.x0, self.y0),
nxny=(self.nx, self.ny), dxdy=(self.dx, self.dy),
pixel_ref=self.pixel_ref)
| (self) |
22,586 | salem.gis | to_geometry | Makes a geometrical representation of the grid (e.g. for drawing).
This can come also handy when doing shape-to-raster operations.
TODO: currently returns one polygon for each grid point, but this
could do more.
Returns
-------
a geopandas.GeoDataFrame
| def to_geometry(self, to_crs=None):
"""Makes a geometrical representation of the grid (e.g. for drawing).
This can come also handy when doing shape-to-raster operations.
TODO: currently returns one polygon for each grid point, but this
could do more.
Returns
-------
a geopandas.GeoDataFrame
"""
from geopandas import GeoDataFrame
from shapely.geometry import Polygon
out = GeoDataFrame()
geoms = []
ii = []
jj = []
xx = self.corner_grid.x0 + np.arange(self.nx+1) * self.dx
yy = self.corner_grid.y0 + np.arange(self.ny+1) * self.dy
for j, (y0, y1) in enumerate(zip(yy[:-1], yy[1:])):
for i, (x0, x1) in enumerate(zip(xx[:-1], xx[1:])):
coords = [(x0, y0), (x1, y0), (x1, y1), (x0, y1), (x0, y0)]
geoms.append(Polygon(coords))
jj.append(j)
ii.append(i)
out['j'] = jj
out['i'] = ii
out.set_geometry(geoms, crs=self.proj.srs, inplace=True)
if check_crs(to_crs):
transform_geopandas(out, to_crs=to_crs, inplace=True)
return out
| (self, to_crs=None) |
22,587 | salem.gis | to_json | Serialize this grid to a json file.
Parameters
----------
fpath : str, required
the path to the file to create
See Also
--------
from_json : read a json file
| def to_json(self, fpath):
"""Serialize this grid to a json file.
Parameters
----------
fpath : str, required
the path to the file to create
See Also
--------
from_json : read a json file
"""
import json
with open(fpath, 'w') as fp:
json.dump(self.to_dict(), fp)
| (self, fpath) |
22,588 | salem.gis | transform | Converts any coordinates into the local grid.
Parameters
----------
x : ndarray
the grid coordinates of the point(s) you want to convert
y : ndarray
the grid coordinates of the point(s) you want to convert
z : None
ignored (but necessary since some shapes have a z dimension)
crs : crs
reference system of x, y. Could be a pyproj.Proj instance or a
Grid instance. In the latter case (x, y) are actually (i, j).
(Default: lonlat in wgs84).
nearest : bool
set to True if you wish to return the closest i, j coordinates
instead of subpixel coords.
maskout : bool
set to true if you want to mask out the transformed
coordinates that are not within the grid.
Returns
-------
(i, j) coordinates of the points in the local grid.
| def transform(self, x, y, z=None, crs=wgs84, nearest=False, maskout=False):
"""Converts any coordinates into the local grid.
Parameters
----------
x : ndarray
the grid coordinates of the point(s) you want to convert
y : ndarray
the grid coordinates of the point(s) you want to convert
z : None
ignored (but necessary since some shapes have a z dimension)
crs : crs
reference system of x, y. Could be a pyproj.Proj instance or a
Grid instance. In the latter case (x, y) are actually (i, j).
(Default: lonlat in wgs84).
nearest : bool
set to True if you wish to return the closest i, j coordinates
instead of subpixel coords.
maskout : bool
set to true if you want to mask out the transformed
coordinates that are not within the grid.
Returns
-------
(i, j) coordinates of the points in the local grid.
"""
x, y = np.ma.array(x), np.ma.array(y)
# First to local proj
_crs = check_crs(crs, raise_on_error=True)
if isinstance(_crs, pyproj.Proj):
x, y = transform_proj(_crs, self.proj, x, y)
elif isinstance(_crs, Grid):
x, y = _crs.ij_to_crs(x, y, crs=self.proj)
# Then to local grid
x = (x - self.x0) / self.dx
y = (y - self.y0) / self.dy
# See if we need to round
if nearest:
f = np.rint if self.pixel_ref == 'center' else np.floor
x = f(x).astype(int)
y = f(y).astype(int)
# Mask?
if maskout:
if self.pixel_ref == 'center':
mask = ~((x >= -0.5) & (x < self.nx-0.5) &
(y >= -0.5) & (y < self.ny-0.5))
else:
mask = ~((x >= 0) & (x < self.nx) &
(y >= 0) & (y < self.ny))
x = np.ma.array(x, mask=mask)
y = np.ma.array(y, mask=mask)
return x, y
| (self, x, y, z=None, crs=<Other Coordinate Operation Transformer: longlat>
Description: PROJ-based coordinate operation
Area of Use:
- undefined, nearest=False, maskout=False) |
22,589 | salem | Map | null | def Map():
raise ImportError('requires matplotlib')
| () |
22,590 | scipy.interpolate._fitpack2 | RectBivariateSpline |
Bivariate spline approximation over a rectangular mesh.
Can be used for both smoothing and interpolating data.
Parameters
----------
x,y : array_like
1-D arrays of coordinates in strictly ascending order.
Evaluated points outside the data range will be extrapolated.
z : array_like
2-D array of data with shape (x.size,y.size).
bbox : array_like, optional
Sequence of length 4 specifying the boundary of the rectangular
approximation domain, which means the start and end spline knots of
each dimension are set by these values. By default,
``bbox=[min(x), max(x), min(y), max(y)]``.
kx, ky : ints, optional
Degrees of the bivariate spline. Default is 3.
s : float, optional
Positive smoothing factor defined for estimation condition:
``sum((z[i]-f(x[i], y[i]))**2, axis=0) <= s`` where f is a spline
function. Default is ``s=0``, which is for interpolation.
See Also
--------
BivariateSpline :
a base class for bivariate splines.
UnivariateSpline :
a smooth univariate spline to fit a given set of data points.
SmoothBivariateSpline :
a smoothing bivariate spline through the given points
LSQBivariateSpline :
a bivariate spline using weighted least-squares fitting
RectSphereBivariateSpline :
a bivariate spline over a rectangular mesh on a sphere
SmoothSphereBivariateSpline :
a smoothing bivariate spline in spherical coordinates
LSQSphereBivariateSpline :
a bivariate spline in spherical coordinates using weighted
least-squares fitting
bisplrep :
a function to find a bivariate B-spline representation of a surface
bisplev :
a function to evaluate a bivariate B-spline and its derivatives
Notes
-----
If the input data is such that input dimensions have incommensurate
units and differ by many orders of magnitude, the interpolant may have
numerical artifacts. Consider rescaling the data before interpolating.
| class RectBivariateSpline(BivariateSpline):
"""
Bivariate spline approximation over a rectangular mesh.
Can be used for both smoothing and interpolating data.
Parameters
----------
x,y : array_like
1-D arrays of coordinates in strictly ascending order.
Evaluated points outside the data range will be extrapolated.
z : array_like
2-D array of data with shape (x.size,y.size).
bbox : array_like, optional
Sequence of length 4 specifying the boundary of the rectangular
approximation domain, which means the start and end spline knots of
each dimension are set by these values. By default,
``bbox=[min(x), max(x), min(y), max(y)]``.
kx, ky : ints, optional
Degrees of the bivariate spline. Default is 3.
s : float, optional
Positive smoothing factor defined for estimation condition:
``sum((z[i]-f(x[i], y[i]))**2, axis=0) <= s`` where f is a spline
function. Default is ``s=0``, which is for interpolation.
See Also
--------
BivariateSpline :
a base class for bivariate splines.
UnivariateSpline :
a smooth univariate spline to fit a given set of data points.
SmoothBivariateSpline :
a smoothing bivariate spline through the given points
LSQBivariateSpline :
a bivariate spline using weighted least-squares fitting
RectSphereBivariateSpline :
a bivariate spline over a rectangular mesh on a sphere
SmoothSphereBivariateSpline :
a smoothing bivariate spline in spherical coordinates
LSQSphereBivariateSpline :
a bivariate spline in spherical coordinates using weighted
least-squares fitting
bisplrep :
a function to find a bivariate B-spline representation of a surface
bisplev :
a function to evaluate a bivariate B-spline and its derivatives
Notes
-----
If the input data is such that input dimensions have incommensurate
units and differ by many orders of magnitude, the interpolant may have
numerical artifacts. Consider rescaling the data before interpolating.
"""
def __init__(self, x, y, z, bbox=[None] * 4, kx=3, ky=3, s=0):
x, y, bbox = ravel(x), ravel(y), ravel(bbox)
z = np.asarray(z)
if not np.all(diff(x) > 0.0):
raise ValueError('x must be strictly increasing')
if not np.all(diff(y) > 0.0):
raise ValueError('y must be strictly increasing')
if not x.size == z.shape[0]:
raise ValueError('x dimension of z must have same number of '
'elements as x')
if not y.size == z.shape[1]:
raise ValueError('y dimension of z must have same number of '
'elements as y')
if not bbox.shape == (4,):
raise ValueError('bbox shape should be (4,)')
if s is not None and not s >= 0.0:
raise ValueError("s should be s >= 0.0")
z = ravel(z)
xb, xe, yb, ye = bbox
nx, tx, ny, ty, c, fp, ier = dfitpack.regrid_smth(x, y, z, xb, xe, yb,
ye, kx, ky, s)
if ier not in [0, -1, -2]:
msg = _surfit_messages.get(ier, 'ier=%s' % (ier))
raise ValueError(msg)
self.fp = fp
self.tck = tx[:nx], ty[:ny], c[:(nx - kx - 1) * (ny - ky - 1)]
self.degrees = kx, ky
| (x, y, z, bbox=[None, None, None, None], kx=3, ky=3, s=0) |
22,591 | scipy.interpolate._fitpack2 | __call__ |
Evaluate the spline or its derivatives at given positions.
Parameters
----------
x, y : array_like
Input coordinates.
If `grid` is False, evaluate the spline at points ``(x[i],
y[i]), i=0, ..., len(x)-1``. Standard Numpy broadcasting
is obeyed.
If `grid` is True: evaluate spline at the grid points
defined by the coordinate arrays x, y. The arrays must be
sorted to increasing order.
The ordering of axes is consistent with
``np.meshgrid(..., indexing="ij")`` and inconsistent with the
default ordering ``np.meshgrid(..., indexing="xy")``.
dx : int
Order of x-derivative
.. versionadded:: 0.14.0
dy : int
Order of y-derivative
.. versionadded:: 0.14.0
grid : bool
Whether to evaluate the results on a grid spanned by the
input arrays, or at points specified by the input arrays.
.. versionadded:: 0.14.0
Examples
--------
Suppose that we want to bilinearly interpolate an exponentially decaying
function in 2 dimensions.
>>> import numpy as np
>>> from scipy.interpolate import RectBivariateSpline
We sample the function on a coarse grid. Note that the default indexing="xy"
of meshgrid would result in an unexpected (transposed) result after
interpolation.
>>> xarr = np.linspace(-3, 3, 100)
>>> yarr = np.linspace(-3, 3, 100)
>>> xgrid, ygrid = np.meshgrid(xarr, yarr, indexing="ij")
The function to interpolate decays faster along one axis than the other.
>>> zdata = np.exp(-np.sqrt((xgrid / 2) ** 2 + ygrid**2))
Next we sample on a finer grid using interpolation (kx=ky=1 for bilinear).
>>> rbs = RectBivariateSpline(xarr, yarr, zdata, kx=1, ky=1)
>>> xarr_fine = np.linspace(-3, 3, 200)
>>> yarr_fine = np.linspace(-3, 3, 200)
>>> xgrid_fine, ygrid_fine = np.meshgrid(xarr_fine, yarr_fine, indexing="ij")
>>> zdata_interp = rbs(xgrid_fine, ygrid_fine, grid=False)
And check that the result agrees with the input by plotting both.
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax1 = fig.add_subplot(1, 2, 1, aspect="equal")
>>> ax2 = fig.add_subplot(1, 2, 2, aspect="equal")
>>> ax1.imshow(zdata)
>>> ax2.imshow(zdata_interp)
>>> plt.show()
| def __call__(self, x, y, dx=0, dy=0, grid=True):
"""
Evaluate the spline or its derivatives at given positions.
Parameters
----------
x, y : array_like
Input coordinates.
If `grid` is False, evaluate the spline at points ``(x[i],
y[i]), i=0, ..., len(x)-1``. Standard Numpy broadcasting
is obeyed.
If `grid` is True: evaluate spline at the grid points
defined by the coordinate arrays x, y. The arrays must be
sorted to increasing order.
The ordering of axes is consistent with
``np.meshgrid(..., indexing="ij")`` and inconsistent with the
default ordering ``np.meshgrid(..., indexing="xy")``.
dx : int
Order of x-derivative
.. versionadded:: 0.14.0
dy : int
Order of y-derivative
.. versionadded:: 0.14.0
grid : bool
Whether to evaluate the results on a grid spanned by the
input arrays, or at points specified by the input arrays.
.. versionadded:: 0.14.0
Examples
--------
Suppose that we want to bilinearly interpolate an exponentially decaying
function in 2 dimensions.
>>> import numpy as np
>>> from scipy.interpolate import RectBivariateSpline
We sample the function on a coarse grid. Note that the default indexing="xy"
of meshgrid would result in an unexpected (transposed) result after
interpolation.
>>> xarr = np.linspace(-3, 3, 100)
>>> yarr = np.linspace(-3, 3, 100)
>>> xgrid, ygrid = np.meshgrid(xarr, yarr, indexing="ij")
The function to interpolate decays faster along one axis than the other.
>>> zdata = np.exp(-np.sqrt((xgrid / 2) ** 2 + ygrid**2))
Next we sample on a finer grid using interpolation (kx=ky=1 for bilinear).
>>> rbs = RectBivariateSpline(xarr, yarr, zdata, kx=1, ky=1)
>>> xarr_fine = np.linspace(-3, 3, 200)
>>> yarr_fine = np.linspace(-3, 3, 200)
>>> xgrid_fine, ygrid_fine = np.meshgrid(xarr_fine, yarr_fine, indexing="ij")
>>> zdata_interp = rbs(xgrid_fine, ygrid_fine, grid=False)
And check that the result agrees with the input by plotting both.
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax1 = fig.add_subplot(1, 2, 1, aspect="equal")
>>> ax2 = fig.add_subplot(1, 2, 2, aspect="equal")
>>> ax1.imshow(zdata)
>>> ax2.imshow(zdata_interp)
>>> plt.show()
"""
x = np.asarray(x)
y = np.asarray(y)
tx, ty, c = self.tck[:3]
kx, ky = self.degrees
if grid:
if x.size == 0 or y.size == 0:
return np.zeros((x.size, y.size), dtype=self.tck[2].dtype)
if (x.size >= 2) and (not np.all(np.diff(x) >= 0.0)):
raise ValueError("x must be strictly increasing when `grid` is True")
if (y.size >= 2) and (not np.all(np.diff(y) >= 0.0)):
raise ValueError("y must be strictly increasing when `grid` is True")
if dx or dy:
z, ier = dfitpack.parder(tx, ty, c, kx, ky, dx, dy, x, y)
if not ier == 0:
raise ValueError("Error code returned by parder: %s" % ier)
else:
z, ier = dfitpack.bispev(tx, ty, c, kx, ky, x, y)
if not ier == 0:
raise ValueError("Error code returned by bispev: %s" % ier)
else:
# standard Numpy broadcasting
if x.shape != y.shape:
x, y = np.broadcast_arrays(x, y)
shape = x.shape
x = x.ravel()
y = y.ravel()
if x.size == 0 or y.size == 0:
return np.zeros(shape, dtype=self.tck[2].dtype)
if dx or dy:
z, ier = dfitpack.pardeu(tx, ty, c, kx, ky, dx, dy, x, y)
if not ier == 0:
raise ValueError("Error code returned by pardeu: %s" % ier)
else:
z, ier = dfitpack.bispeu(tx, ty, c, kx, ky, x, y)
if not ier == 0:
raise ValueError("Error code returned by bispeu: %s" % ier)
z = z.reshape(shape)
return z
| (self, x, y, dx=0, dy=0, grid=True) |
22,592 | scipy.interpolate._fitpack2 | __init__ | null | def __init__(self, x, y, z, bbox=[None] * 4, kx=3, ky=3, s=0):
x, y, bbox = ravel(x), ravel(y), ravel(bbox)
z = np.asarray(z)
if not np.all(diff(x) > 0.0):
raise ValueError('x must be strictly increasing')
if not np.all(diff(y) > 0.0):
raise ValueError('y must be strictly increasing')
if not x.size == z.shape[0]:
raise ValueError('x dimension of z must have same number of '
'elements as x')
if not y.size == z.shape[1]:
raise ValueError('y dimension of z must have same number of '
'elements as y')
if not bbox.shape == (4,):
raise ValueError('bbox shape should be (4,)')
if s is not None and not s >= 0.0:
raise ValueError("s should be s >= 0.0")
z = ravel(z)
xb, xe, yb, ye = bbox
nx, tx, ny, ty, c, fp, ier = dfitpack.regrid_smth(x, y, z, xb, xe, yb,
ye, kx, ky, s)
if ier not in [0, -1, -2]:
msg = _surfit_messages.get(ier, 'ier=%s' % (ier))
raise ValueError(msg)
self.fp = fp
self.tck = tx[:nx], ty[:ny], c[:(nx - kx - 1) * (ny - ky - 1)]
self.degrees = kx, ky
| (self, x, y, z, bbox=[None, None, None, None], kx=3, ky=3, s=0) |
22,593 | scipy.interpolate._fitpack2 | _validate_input | null | @staticmethod
def _validate_input(x, y, z, w, kx, ky, eps):
x, y, z = np.asarray(x), np.asarray(y), np.asarray(z)
if not x.size == y.size == z.size:
raise ValueError('x, y, and z should have a same length')
if w is not None:
w = np.asarray(w)
if x.size != w.size:
raise ValueError('x, y, z, and w should have a same length')
elif not np.all(w >= 0.0):
raise ValueError('w should be positive')
if (eps is not None) and (not 0.0 < eps < 1.0):
raise ValueError('eps should be between (0, 1)')
if not x.size >= (kx + 1) * (ky + 1):
raise ValueError('The length of x, y and z should be at least'
' (kx+1) * (ky+1)')
return x, y, z, w
| (x, y, z, w, kx, ky, eps) |
22,594 | scipy.interpolate._fitpack2 | ev |
Evaluate the spline at points
Returns the interpolated value at ``(xi[i], yi[i]),
i=0,...,len(xi)-1``.
Parameters
----------
xi, yi : array_like
Input coordinates. Standard Numpy broadcasting is obeyed.
The ordering of axes is consistent with
``np.meshgrid(..., indexing="ij")`` and inconsistent with the
default ordering ``np.meshgrid(..., indexing="xy")``.
dx : int, optional
Order of x-derivative
.. versionadded:: 0.14.0
dy : int, optional
Order of y-derivative
.. versionadded:: 0.14.0
Examples
--------
Suppose that we want to bilinearly interpolate an exponentially decaying
function in 2 dimensions.
>>> import numpy as np
>>> from scipy.interpolate import RectBivariateSpline
>>> def f(x, y):
... return np.exp(-np.sqrt((x / 2) ** 2 + y**2))
We sample the function on a coarse grid and set up the interpolator. Note that
the default ``indexing="xy"`` of meshgrid would result in an unexpected
(transposed) result after interpolation.
>>> xarr = np.linspace(-3, 3, 21)
>>> yarr = np.linspace(-3, 3, 21)
>>> xgrid, ygrid = np.meshgrid(xarr, yarr, indexing="ij")
>>> zdata = f(xgrid, ygrid)
>>> rbs = RectBivariateSpline(xarr, yarr, zdata, kx=1, ky=1)
Next we sample the function along a diagonal slice through the coordinate space
on a finer grid using interpolation.
>>> xinterp = np.linspace(-3, 3, 201)
>>> yinterp = np.linspace(3, -3, 201)
>>> zinterp = rbs.ev(xinterp, yinterp)
And check that the interpolation passes through the function evaluations as a
function of the distance from the origin along the slice.
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax1 = fig.add_subplot(1, 1, 1)
>>> ax1.plot(np.sqrt(xarr**2 + yarr**2), np.diag(zdata), "or")
>>> ax1.plot(np.sqrt(xinterp**2 + yinterp**2), zinterp, "-b")
>>> plt.show()
| def ev(self, xi, yi, dx=0, dy=0):
"""
Evaluate the spline at points
Returns the interpolated value at ``(xi[i], yi[i]),
i=0,...,len(xi)-1``.
Parameters
----------
xi, yi : array_like
Input coordinates. Standard Numpy broadcasting is obeyed.
The ordering of axes is consistent with
``np.meshgrid(..., indexing="ij")`` and inconsistent with the
default ordering ``np.meshgrid(..., indexing="xy")``.
dx : int, optional
Order of x-derivative
.. versionadded:: 0.14.0
dy : int, optional
Order of y-derivative
.. versionadded:: 0.14.0
Examples
--------
Suppose that we want to bilinearly interpolate an exponentially decaying
function in 2 dimensions.
>>> import numpy as np
>>> from scipy.interpolate import RectBivariateSpline
>>> def f(x, y):
... return np.exp(-np.sqrt((x / 2) ** 2 + y**2))
We sample the function on a coarse grid and set up the interpolator. Note that
the default ``indexing="xy"`` of meshgrid would result in an unexpected
(transposed) result after interpolation.
>>> xarr = np.linspace(-3, 3, 21)
>>> yarr = np.linspace(-3, 3, 21)
>>> xgrid, ygrid = np.meshgrid(xarr, yarr, indexing="ij")
>>> zdata = f(xgrid, ygrid)
>>> rbs = RectBivariateSpline(xarr, yarr, zdata, kx=1, ky=1)
Next we sample the function along a diagonal slice through the coordinate space
on a finer grid using interpolation.
>>> xinterp = np.linspace(-3, 3, 201)
>>> yinterp = np.linspace(3, -3, 201)
>>> zinterp = rbs.ev(xinterp, yinterp)
And check that the interpolation passes through the function evaluations as a
function of the distance from the origin along the slice.
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax1 = fig.add_subplot(1, 1, 1)
>>> ax1.plot(np.sqrt(xarr**2 + yarr**2), np.diag(zdata), "or")
>>> ax1.plot(np.sqrt(xinterp**2 + yinterp**2), zinterp, "-b")
>>> plt.show()
"""
return self.__call__(xi, yi, dx=dx, dy=dy, grid=False)
| (self, xi, yi, dx=0, dy=0) |
22,595 | scipy.interpolate._fitpack2 | get_coeffs | Return spline coefficients. | def get_coeffs(self):
""" Return spline coefficients."""
return self.tck[2]
| (self) |
22,596 | scipy.interpolate._fitpack2 | get_knots | Return a tuple (tx,ty) where tx,ty contain knots positions
of the spline with respect to x-, y-variable, respectively.
The position of interior and additional knots are given as
t[k+1:-k-1] and t[:k+1]=b, t[-k-1:]=e, respectively.
| def get_knots(self):
""" Return a tuple (tx,ty) where tx,ty contain knots positions
of the spline with respect to x-, y-variable, respectively.
The position of interior and additional knots are given as
t[k+1:-k-1] and t[:k+1]=b, t[-k-1:]=e, respectively.
"""
return self.tck[:2]
| (self) |
22,597 | scipy.interpolate._fitpack2 | get_residual | Return weighted sum of squared residuals of the spline
approximation: sum ((w[i]*(z[i]-s(x[i],y[i])))**2,axis=0)
| def get_residual(self):
""" Return weighted sum of squared residuals of the spline
approximation: sum ((w[i]*(z[i]-s(x[i],y[i])))**2,axis=0)
"""
return self.fp
| (self) |
22,598 | scipy.interpolate._fitpack2 | integral |
Evaluate the integral of the spline over area [xa,xb] x [ya,yb].
Parameters
----------
xa, xb : float
The end-points of the x integration interval.
ya, yb : float
The end-points of the y integration interval.
Returns
-------
integ : float
The value of the resulting integral.
| def integral(self, xa, xb, ya, yb):
"""
Evaluate the integral of the spline over area [xa,xb] x [ya,yb].
Parameters
----------
xa, xb : float
The end-points of the x integration interval.
ya, yb : float
The end-points of the y integration interval.
Returns
-------
integ : float
The value of the resulting integral.
"""
tx, ty, c = self.tck[:3]
kx, ky = self.degrees
return dfitpack.dblint(tx, ty, c, kx, ky, xa, xb, ya, yb)
| (self, xa, xb, ya, yb) |
22,599 | scipy.interpolate._fitpack2 | partial_derivative | Construct a new spline representing a partial derivative of this
spline.
Parameters
----------
dx, dy : int
Orders of the derivative in x and y respectively. They must be
non-negative integers and less than the respective degree of the
original spline (self) in that direction (``kx``, ``ky``).
Returns
-------
spline :
A new spline of degrees (``kx - dx``, ``ky - dy``) representing the
derivative of this spline.
Notes
-----
.. versionadded:: 1.9.0
| def partial_derivative(self, dx, dy):
"""Construct a new spline representing a partial derivative of this
spline.
Parameters
----------
dx, dy : int
Orders of the derivative in x and y respectively. They must be
non-negative integers and less than the respective degree of the
original spline (self) in that direction (``kx``, ``ky``).
Returns
-------
spline :
A new spline of degrees (``kx - dx``, ``ky - dy``) representing the
derivative of this spline.
Notes
-----
.. versionadded:: 1.9.0
"""
if dx == 0 and dy == 0:
return self
else:
kx, ky = self.degrees
if not (dx >= 0 and dy >= 0):
raise ValueError("order of derivative must be positive or"
" zero")
if not (dx < kx and dy < ky):
raise ValueError("order of derivative must be less than"
" degree of spline")
tx, ty, c = self.tck[:3]
newc, ier = dfitpack.pardtc(tx, ty, c, kx, ky, dx, dy)
if ier != 0:
# This should not happen under normal conditions.
raise ValueError("Unexpected error code returned by"
" pardtc: %d" % ier)
nx = len(tx)
ny = len(ty)
newtx = tx[dx:nx - dx]
newty = ty[dy:ny - dy]
newkx, newky = kx - dx, ky - dy
newclen = (nx - dx - kx - 1) * (ny - dy - ky - 1)
return _DerivedBivariateSpline._from_tck((newtx, newty,
newc[:newclen],
newkx, newky))
| (self, dx, dy) |
22,600 | scipy.interpolate._rgi | RegularGridInterpolator |
Interpolator on a regular or rectilinear grid in arbitrary dimensions.
The data must be defined on a rectilinear grid; that is, a rectangular
grid with even or uneven spacing. Linear, nearest-neighbor, spline
interpolations are supported. After setting up the interpolator object,
the interpolation method may be chosen at each evaluation.
Parameters
----------
points : tuple of ndarray of float, with shapes (m1, ), ..., (mn, )
The points defining the regular grid in n dimensions. The points in
each dimension (i.e. every elements of the points tuple) must be
strictly ascending or descending.
values : array_like, shape (m1, ..., mn, ...)
The data on the regular grid in n dimensions. Complex data is
accepted.
.. deprecated:: 1.13.0
Complex data is deprecated with ``method="pchip"`` and will raise an
error in SciPy 1.15.0. This is because ``PchipInterpolator`` only
works with real values. If you are trying to use the real components of
the passed array, use ``np.real`` on ``values``.
method : str, optional
The method of interpolation to perform. Supported are "linear",
"nearest", "slinear", "cubic", "quintic" and "pchip". This
parameter will become the default for the object's ``__call__``
method. Default is "linear".
bounds_error : bool, optional
If True, when interpolated values are requested outside of the
domain of the input data, a ValueError is raised.
If False, then `fill_value` is used.
Default is True.
fill_value : float or None, optional
The value to use for points outside of the interpolation domain.
If None, values outside the domain are extrapolated.
Default is ``np.nan``.
solver : callable, optional
Only used for methods "slinear", "cubic" and "quintic".
Sparse linear algebra solver for construction of the NdBSpline instance.
Default is the iterative solver `scipy.sparse.linalg.gcrotmk`.
.. versionadded:: 1.13
solver_args: dict, optional
Additional arguments to pass to `solver`, if any.
.. versionadded:: 1.13
Methods
-------
__call__
Attributes
----------
grid : tuple of ndarrays
The points defining the regular grid in n dimensions.
This tuple defines the full grid via
``np.meshgrid(*grid, indexing='ij')``
values : ndarray
Data values at the grid.
method : str
Interpolation method.
fill_value : float or ``None``
Use this value for out-of-bounds arguments to `__call__`.
bounds_error : bool
If ``True``, out-of-bounds argument raise a ``ValueError``.
Notes
-----
Contrary to `LinearNDInterpolator` and `NearestNDInterpolator`, this class
avoids expensive triangulation of the input data by taking advantage of the
regular grid structure.
In other words, this class assumes that the data is defined on a
*rectilinear* grid.
.. versionadded:: 0.14
The 'slinear'(k=1), 'cubic'(k=3), and 'quintic'(k=5) methods are
tensor-product spline interpolators, where `k` is the spline degree,
If any dimension has fewer points than `k` + 1, an error will be raised.
.. versionadded:: 1.9
If the input data is such that dimensions have incommensurate
units and differ by many orders of magnitude, the interpolant may have
numerical artifacts. Consider rescaling the data before interpolating.
**Choosing a solver for spline methods**
Spline methods, "slinear", "cubic" and "quintic" involve solving a
large sparse linear system at instantiation time. Depending on data,
the default solver may or may not be adequate. When it is not, you may
need to experiment with an optional `solver` argument, where you may
choose between the direct solver (`scipy.sparse.linalg.spsolve`) or
iterative solvers from `scipy.sparse.linalg`. You may need to supply
additional parameters via the optional `solver_args` parameter (for instance,
you may supply the starting value or target tolerance). See the
`scipy.sparse.linalg` documentation for the full list of available options.
Alternatively, you may instead use the legacy methods, "slinear_legacy",
"cubic_legacy" and "quintic_legacy". These methods allow faster construction
but evaluations will be much slower.
Examples
--------
**Evaluate a function on the points of a 3-D grid**
As a first example, we evaluate a simple example function on the points of
a 3-D grid:
>>> from scipy.interpolate import RegularGridInterpolator
>>> import numpy as np
>>> def f(x, y, z):
... return 2 * x**3 + 3 * y**2 - z
>>> x = np.linspace(1, 4, 11)
>>> y = np.linspace(4, 7, 22)
>>> z = np.linspace(7, 9, 33)
>>> xg, yg ,zg = np.meshgrid(x, y, z, indexing='ij', sparse=True)
>>> data = f(xg, yg, zg)
``data`` is now a 3-D array with ``data[i, j, k] = f(x[i], y[j], z[k])``.
Next, define an interpolating function from this data:
>>> interp = RegularGridInterpolator((x, y, z), data)
Evaluate the interpolating function at the two points
``(x,y,z) = (2.1, 6.2, 8.3)`` and ``(3.3, 5.2, 7.1)``:
>>> pts = np.array([[2.1, 6.2, 8.3],
... [3.3, 5.2, 7.1]])
>>> interp(pts)
array([ 125.80469388, 146.30069388])
which is indeed a close approximation to
>>> f(2.1, 6.2, 8.3), f(3.3, 5.2, 7.1)
(125.54200000000002, 145.894)
**Interpolate and extrapolate a 2D dataset**
As a second example, we interpolate and extrapolate a 2D data set:
>>> x, y = np.array([-2, 0, 4]), np.array([-2, 0, 2, 5])
>>> def ff(x, y):
... return x**2 + y**2
>>> xg, yg = np.meshgrid(x, y, indexing='ij')
>>> data = ff(xg, yg)
>>> interp = RegularGridInterpolator((x, y), data,
... bounds_error=False, fill_value=None)
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax = fig.add_subplot(projection='3d')
>>> ax.scatter(xg.ravel(), yg.ravel(), data.ravel(),
... s=60, c='k', label='data')
Evaluate and plot the interpolator on a finer grid
>>> xx = np.linspace(-4, 9, 31)
>>> yy = np.linspace(-4, 9, 31)
>>> X, Y = np.meshgrid(xx, yy, indexing='ij')
>>> # interpolator
>>> ax.plot_wireframe(X, Y, interp((X, Y)), rstride=3, cstride=3,
... alpha=0.4, color='m', label='linear interp')
>>> # ground truth
>>> ax.plot_wireframe(X, Y, ff(X, Y), rstride=3, cstride=3,
... alpha=0.4, label='ground truth')
>>> plt.legend()
>>> plt.show()
Other examples are given
:ref:`in the tutorial <tutorial-interpolate_regular_grid_interpolator>`.
See Also
--------
NearestNDInterpolator : Nearest neighbor interpolator on *unstructured*
data in N dimensions
LinearNDInterpolator : Piecewise linear interpolator on *unstructured* data
in N dimensions
interpn : a convenience function which wraps `RegularGridInterpolator`
scipy.ndimage.map_coordinates : interpolation on grids with equal spacing
(suitable for e.g., N-D image resampling)
References
----------
.. [1] Python package *regulargrid* by Johannes Buchner, see
https://pypi.python.org/pypi/regulargrid/
.. [2] Wikipedia, "Trilinear interpolation",
https://en.wikipedia.org/wiki/Trilinear_interpolation
.. [3] Weiser, Alan, and Sergio E. Zarantonello. "A note on piecewise linear
and multilinear table interpolation in many dimensions." MATH.
COMPUT. 50.181 (1988): 189-196.
https://www.ams.org/journals/mcom/1988-50-181/S0025-5718-1988-0917826-0/S0025-5718-1988-0917826-0.pdf
:doi:`10.1090/S0025-5718-1988-0917826-0`
| class RegularGridInterpolator:
"""
Interpolator on a regular or rectilinear grid in arbitrary dimensions.
The data must be defined on a rectilinear grid; that is, a rectangular
grid with even or uneven spacing. Linear, nearest-neighbor, spline
interpolations are supported. After setting up the interpolator object,
the interpolation method may be chosen at each evaluation.
Parameters
----------
points : tuple of ndarray of float, with shapes (m1, ), ..., (mn, )
The points defining the regular grid in n dimensions. The points in
each dimension (i.e. every elements of the points tuple) must be
strictly ascending or descending.
values : array_like, shape (m1, ..., mn, ...)
The data on the regular grid in n dimensions. Complex data is
accepted.
.. deprecated:: 1.13.0
Complex data is deprecated with ``method="pchip"`` and will raise an
error in SciPy 1.15.0. This is because ``PchipInterpolator`` only
works with real values. If you are trying to use the real components of
the passed array, use ``np.real`` on ``values``.
method : str, optional
The method of interpolation to perform. Supported are "linear",
"nearest", "slinear", "cubic", "quintic" and "pchip". This
parameter will become the default for the object's ``__call__``
method. Default is "linear".
bounds_error : bool, optional
If True, when interpolated values are requested outside of the
domain of the input data, a ValueError is raised.
If False, then `fill_value` is used.
Default is True.
fill_value : float or None, optional
The value to use for points outside of the interpolation domain.
If None, values outside the domain are extrapolated.
Default is ``np.nan``.
solver : callable, optional
Only used for methods "slinear", "cubic" and "quintic".
Sparse linear algebra solver for construction of the NdBSpline instance.
Default is the iterative solver `scipy.sparse.linalg.gcrotmk`.
.. versionadded:: 1.13
solver_args: dict, optional
Additional arguments to pass to `solver`, if any.
.. versionadded:: 1.13
Methods
-------
__call__
Attributes
----------
grid : tuple of ndarrays
The points defining the regular grid in n dimensions.
This tuple defines the full grid via
``np.meshgrid(*grid, indexing='ij')``
values : ndarray
Data values at the grid.
method : str
Interpolation method.
fill_value : float or ``None``
Use this value for out-of-bounds arguments to `__call__`.
bounds_error : bool
If ``True``, out-of-bounds argument raise a ``ValueError``.
Notes
-----
Contrary to `LinearNDInterpolator` and `NearestNDInterpolator`, this class
avoids expensive triangulation of the input data by taking advantage of the
regular grid structure.
In other words, this class assumes that the data is defined on a
*rectilinear* grid.
.. versionadded:: 0.14
The 'slinear'(k=1), 'cubic'(k=3), and 'quintic'(k=5) methods are
tensor-product spline interpolators, where `k` is the spline degree,
If any dimension has fewer points than `k` + 1, an error will be raised.
.. versionadded:: 1.9
If the input data is such that dimensions have incommensurate
units and differ by many orders of magnitude, the interpolant may have
numerical artifacts. Consider rescaling the data before interpolating.
**Choosing a solver for spline methods**
Spline methods, "slinear", "cubic" and "quintic" involve solving a
large sparse linear system at instantiation time. Depending on data,
the default solver may or may not be adequate. When it is not, you may
need to experiment with an optional `solver` argument, where you may
choose between the direct solver (`scipy.sparse.linalg.spsolve`) or
iterative solvers from `scipy.sparse.linalg`. You may need to supply
additional parameters via the optional `solver_args` parameter (for instance,
you may supply the starting value or target tolerance). See the
`scipy.sparse.linalg` documentation for the full list of available options.
Alternatively, you may instead use the legacy methods, "slinear_legacy",
"cubic_legacy" and "quintic_legacy". These methods allow faster construction
but evaluations will be much slower.
Examples
--------
**Evaluate a function on the points of a 3-D grid**
As a first example, we evaluate a simple example function on the points of
a 3-D grid:
>>> from scipy.interpolate import RegularGridInterpolator
>>> import numpy as np
>>> def f(x, y, z):
... return 2 * x**3 + 3 * y**2 - z
>>> x = np.linspace(1, 4, 11)
>>> y = np.linspace(4, 7, 22)
>>> z = np.linspace(7, 9, 33)
>>> xg, yg ,zg = np.meshgrid(x, y, z, indexing='ij', sparse=True)
>>> data = f(xg, yg, zg)
``data`` is now a 3-D array with ``data[i, j, k] = f(x[i], y[j], z[k])``.
Next, define an interpolating function from this data:
>>> interp = RegularGridInterpolator((x, y, z), data)
Evaluate the interpolating function at the two points
``(x,y,z) = (2.1, 6.2, 8.3)`` and ``(3.3, 5.2, 7.1)``:
>>> pts = np.array([[2.1, 6.2, 8.3],
... [3.3, 5.2, 7.1]])
>>> interp(pts)
array([ 125.80469388, 146.30069388])
which is indeed a close approximation to
>>> f(2.1, 6.2, 8.3), f(3.3, 5.2, 7.1)
(125.54200000000002, 145.894)
**Interpolate and extrapolate a 2D dataset**
As a second example, we interpolate and extrapolate a 2D data set:
>>> x, y = np.array([-2, 0, 4]), np.array([-2, 0, 2, 5])
>>> def ff(x, y):
... return x**2 + y**2
>>> xg, yg = np.meshgrid(x, y, indexing='ij')
>>> data = ff(xg, yg)
>>> interp = RegularGridInterpolator((x, y), data,
... bounds_error=False, fill_value=None)
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax = fig.add_subplot(projection='3d')
>>> ax.scatter(xg.ravel(), yg.ravel(), data.ravel(),
... s=60, c='k', label='data')
Evaluate and plot the interpolator on a finer grid
>>> xx = np.linspace(-4, 9, 31)
>>> yy = np.linspace(-4, 9, 31)
>>> X, Y = np.meshgrid(xx, yy, indexing='ij')
>>> # interpolator
>>> ax.plot_wireframe(X, Y, interp((X, Y)), rstride=3, cstride=3,
... alpha=0.4, color='m', label='linear interp')
>>> # ground truth
>>> ax.plot_wireframe(X, Y, ff(X, Y), rstride=3, cstride=3,
... alpha=0.4, label='ground truth')
>>> plt.legend()
>>> plt.show()
Other examples are given
:ref:`in the tutorial <tutorial-interpolate_regular_grid_interpolator>`.
See Also
--------
NearestNDInterpolator : Nearest neighbor interpolator on *unstructured*
data in N dimensions
LinearNDInterpolator : Piecewise linear interpolator on *unstructured* data
in N dimensions
interpn : a convenience function which wraps `RegularGridInterpolator`
scipy.ndimage.map_coordinates : interpolation on grids with equal spacing
(suitable for e.g., N-D image resampling)
References
----------
.. [1] Python package *regulargrid* by Johannes Buchner, see
https://pypi.python.org/pypi/regulargrid/
.. [2] Wikipedia, "Trilinear interpolation",
https://en.wikipedia.org/wiki/Trilinear_interpolation
.. [3] Weiser, Alan, and Sergio E. Zarantonello. "A note on piecewise linear
and multilinear table interpolation in many dimensions." MATH.
COMPUT. 50.181 (1988): 189-196.
https://www.ams.org/journals/mcom/1988-50-181/S0025-5718-1988-0917826-0/S0025-5718-1988-0917826-0.pdf
:doi:`10.1090/S0025-5718-1988-0917826-0`
"""
# this class is based on code originally programmed by Johannes Buchner,
# see https://github.com/JohannesBuchner/regulargrid
_SPLINE_DEGREE_MAP = {"slinear": 1, "cubic": 3, "quintic": 5, 'pchip': 3,
"slinear_legacy": 1, "cubic_legacy": 3, "quintic_legacy": 5,}
_SPLINE_METHODS_recursive = {"slinear_legacy", "cubic_legacy",
"quintic_legacy", "pchip"}
_SPLINE_METHODS_ndbspl = {"slinear", "cubic", "quintic"}
_SPLINE_METHODS = list(_SPLINE_DEGREE_MAP.keys())
_ALL_METHODS = ["linear", "nearest"] + _SPLINE_METHODS
def __init__(self, points, values, method="linear", bounds_error=True,
fill_value=np.nan, *, solver=None, solver_args=None):
if method not in self._ALL_METHODS:
raise ValueError("Method '%s' is not defined" % method)
elif method in self._SPLINE_METHODS:
self._validate_grid_dimensions(points, method)
self.method = method
self.bounds_error = bounds_error
self.grid, self._descending_dimensions = _check_points(points)
self.values = self._check_values(values)
self._check_dimensionality(self.grid, self.values)
self.fill_value = self._check_fill_value(self.values, fill_value)
if self._descending_dimensions:
self.values = np.flip(values, axis=self._descending_dimensions)
if self.method == "pchip" and np.iscomplexobj(self.values):
msg = ("`PchipInterpolator` only works with real values. Passing "
"complex-dtyped `values` with `method='pchip'` is deprecated "
"and will raise an error in SciPy 1.15.0. If you are trying to "
"use the real components of the passed array, use `np.real` on "
"the array before passing to `RegularGridInterpolator`.")
warnings.warn(msg, DeprecationWarning, stacklevel=2)
if method in self._SPLINE_METHODS_ndbspl:
if solver_args is None:
solver_args = {}
self._spline = self._construct_spline(method, solver, **solver_args)
else:
if solver is not None or solver_args:
raise ValueError(
f"{method =} does not accept the 'solver' argument. Got "
f" {solver = } and with arguments {solver_args}."
)
def _construct_spline(self, method, solver=None, **solver_args):
if solver is None:
solver = ssl.gcrotmk
spl = make_ndbspl(
self.grid, self.values, self._SPLINE_DEGREE_MAP[method],
solver=solver, **solver_args
)
return spl
def _check_dimensionality(self, grid, values):
_check_dimensionality(grid, values)
def _check_points(self, points):
return _check_points(points)
def _check_values(self, values):
if not hasattr(values, 'ndim'):
# allow reasonable duck-typed values
values = np.asarray(values)
if hasattr(values, 'dtype') and hasattr(values, 'astype'):
if not np.issubdtype(values.dtype, np.inexact):
values = values.astype(float)
return values
def _check_fill_value(self, values, fill_value):
if fill_value is not None:
fill_value_dtype = np.asarray(fill_value).dtype
if (hasattr(values, 'dtype') and not
np.can_cast(fill_value_dtype, values.dtype,
casting='same_kind')):
raise ValueError("fill_value must be either 'None' or "
"of a type compatible with values")
return fill_value
def __call__(self, xi, method=None, *, nu=None):
"""
Interpolation at coordinates.
Parameters
----------
xi : ndarray of shape (..., ndim)
The coordinates to evaluate the interpolator at.
method : str, optional
The method of interpolation to perform. Supported are "linear",
"nearest", "slinear", "cubic", "quintic" and "pchip". Default is
the method chosen when the interpolator was created.
nu : sequence of ints, length ndim, optional
If not None, the orders of the derivatives to evaluate.
Each entry must be non-negative.
Only allowed for methods "slinear", "cubic" and "quintic".
.. versionadded:: 1.13
Returns
-------
values_x : ndarray, shape xi.shape[:-1] + values.shape[ndim:]
Interpolated values at `xi`. See notes for behaviour when
``xi.ndim == 1``.
Notes
-----
In the case that ``xi.ndim == 1`` a new axis is inserted into
the 0 position of the returned array, values_x, so its shape is
instead ``(1,) + values.shape[ndim:]``.
Examples
--------
Here we define a nearest-neighbor interpolator of a simple function
>>> import numpy as np
>>> x, y = np.array([0, 1, 2]), np.array([1, 3, 7])
>>> def f(x, y):
... return x**2 + y**2
>>> data = f(*np.meshgrid(x, y, indexing='ij', sparse=True))
>>> from scipy.interpolate import RegularGridInterpolator
>>> interp = RegularGridInterpolator((x, y), data, method='nearest')
By construction, the interpolator uses the nearest-neighbor
interpolation
>>> interp([[1.5, 1.3], [0.3, 4.5]])
array([2., 9.])
We can however evaluate the linear interpolant by overriding the
`method` parameter
>>> interp([[1.5, 1.3], [0.3, 4.5]], method='linear')
array([ 4.7, 24.3])
"""
method = self.method if method is None else method
is_method_changed = self.method != method
if method not in self._ALL_METHODS:
raise ValueError("Method '%s' is not defined" % method)
if is_method_changed and method in self._SPLINE_METHODS_ndbspl:
self._spline = self._construct_spline(method)
if nu is not None and method not in self._SPLINE_METHODS_ndbspl:
raise ValueError(
f"Can only compute derivatives for methods "
f"{self._SPLINE_METHODS_ndbspl}, got {method =}."
)
xi, xi_shape, ndim, nans, out_of_bounds = self._prepare_xi(xi)
if method == "linear":
indices, norm_distances = self._find_indices(xi.T)
if (ndim == 2 and hasattr(self.values, 'dtype') and
self.values.ndim == 2 and self.values.flags.writeable and
self.values.dtype in (np.float64, np.complex128) and
self.values.dtype.byteorder == '='):
# until cython supports const fused types, the fast path
# cannot support non-writeable values
# a fast path
out = np.empty(indices.shape[1], dtype=self.values.dtype)
result = evaluate_linear_2d(self.values,
indices,
norm_distances,
self.grid,
out)
else:
result = self._evaluate_linear(indices, norm_distances)
elif method == "nearest":
indices, norm_distances = self._find_indices(xi.T)
result = self._evaluate_nearest(indices, norm_distances)
elif method in self._SPLINE_METHODS:
if is_method_changed:
self._validate_grid_dimensions(self.grid, method)
if method in self._SPLINE_METHODS_recursive:
result = self._evaluate_spline(xi, method)
else:
result = self._spline(xi, nu=nu)
if not self.bounds_error and self.fill_value is not None:
result[out_of_bounds] = self.fill_value
# f(nan) = nan, if any
if np.any(nans):
result[nans] = np.nan
return result.reshape(xi_shape[:-1] + self.values.shape[ndim:])
def _prepare_xi(self, xi):
ndim = len(self.grid)
xi = _ndim_coords_from_arrays(xi, ndim=ndim)
if xi.shape[-1] != len(self.grid):
raise ValueError("The requested sample points xi have dimension "
f"{xi.shape[-1]} but this "
f"RegularGridInterpolator has dimension {ndim}")
xi_shape = xi.shape
xi = xi.reshape(-1, xi_shape[-1])
xi = np.asarray(xi, dtype=float)
# find nans in input
nans = np.any(np.isnan(xi), axis=-1)
if self.bounds_error:
for i, p in enumerate(xi.T):
if not np.logical_and(np.all(self.grid[i][0] <= p),
np.all(p <= self.grid[i][-1])):
raise ValueError("One of the requested xi is out of bounds "
"in dimension %d" % i)
out_of_bounds = None
else:
out_of_bounds = self._find_out_of_bounds(xi.T)
return xi, xi_shape, ndim, nans, out_of_bounds
def _evaluate_linear(self, indices, norm_distances):
# slice for broadcasting over trailing dimensions in self.values
vslice = (slice(None),) + (None,)*(self.values.ndim - len(indices))
# Compute shifting up front before zipping everything together
shift_norm_distances = [1 - yi for yi in norm_distances]
shift_indices = [i + 1 for i in indices]
# The formula for linear interpolation in 2d takes the form:
# values = self.values[(i0, i1)] * (1 - y0) * (1 - y1) + \
# self.values[(i0, i1 + 1)] * (1 - y0) * y1 + \
# self.values[(i0 + 1, i1)] * y0 * (1 - y1) + \
# self.values[(i0 + 1, i1 + 1)] * y0 * y1
# We pair i with 1 - yi (zipped1) and i + 1 with yi (zipped2)
zipped1 = zip(indices, shift_norm_distances)
zipped2 = zip(shift_indices, norm_distances)
# Take all products of zipped1 and zipped2 and iterate over them
# to get the terms in the above formula. This corresponds to iterating
# over the vertices of a hypercube.
hypercube = itertools.product(*zip(zipped1, zipped2))
value = np.array([0.])
for h in hypercube:
edge_indices, weights = zip(*h)
weight = np.array([1.])
for w in weights:
weight = weight * w
term = np.asarray(self.values[edge_indices]) * weight[vslice]
value = value + term # cannot use += because broadcasting
return value
def _evaluate_nearest(self, indices, norm_distances):
idx_res = [np.where(yi <= .5, i, i + 1)
for i, yi in zip(indices, norm_distances)]
return self.values[tuple(idx_res)]
def _validate_grid_dimensions(self, points, method):
k = self._SPLINE_DEGREE_MAP[method]
for i, point in enumerate(points):
ndim = len(np.atleast_1d(point))
if ndim <= k:
raise ValueError(f"There are {ndim} points in dimension {i},"
f" but method {method} requires at least "
f" {k+1} points per dimension.")
def _evaluate_spline(self, xi, method):
# ensure xi is 2D list of points to evaluate (`m` is the number of
# points and `n` is the number of interpolation dimensions,
# ``n == len(self.grid)``.)
if xi.ndim == 1:
xi = xi.reshape((1, xi.size))
m, n = xi.shape
# Reorder the axes: n-dimensional process iterates over the
# interpolation axes from the last axis downwards: E.g. for a 4D grid
# the order of axes is 3, 2, 1, 0. Each 1D interpolation works along
# the 0th axis of its argument array (for 1D routine it's its ``y``
# array). Thus permute the interpolation axes of `values` *and keep
# trailing dimensions trailing*.
axes = tuple(range(self.values.ndim))
axx = axes[:n][::-1] + axes[n:]
values = self.values.transpose(axx)
if method == 'pchip':
_eval_func = self._do_pchip
else:
_eval_func = self._do_spline_fit
k = self._SPLINE_DEGREE_MAP[method]
# Non-stationary procedure: difficult to vectorize this part entirely
# into numpy-level operations. Unfortunately this requires explicit
# looping over each point in xi.
# can at least vectorize the first pass across all points in the
# last variable of xi.
last_dim = n - 1
first_values = _eval_func(self.grid[last_dim],
values,
xi[:, last_dim],
k)
# the rest of the dimensions have to be on a per point-in-xi basis
shape = (m, *self.values.shape[n:])
result = np.empty(shape, dtype=self.values.dtype)
for j in range(m):
# Main process: Apply 1D interpolate in each dimension
# sequentially, starting with the last dimension.
# These are then "folded" into the next dimension in-place.
folded_values = first_values[j, ...]
for i in range(last_dim-1, -1, -1):
# Interpolate for each 1D from the last dimensions.
# This collapses each 1D sequence into a scalar.
folded_values = _eval_func(self.grid[i],
folded_values,
xi[j, i],
k)
result[j, ...] = folded_values
return result
@staticmethod
def _do_spline_fit(x, y, pt, k):
local_interp = make_interp_spline(x, y, k=k, axis=0)
values = local_interp(pt)
return values
@staticmethod
def _do_pchip(x, y, pt, k):
local_interp = PchipInterpolator(x, y, axis=0)
values = local_interp(pt)
return values
def _find_indices(self, xi):
return find_indices(self.grid, xi)
def _find_out_of_bounds(self, xi):
# check for out of bounds xi
out_of_bounds = np.zeros((xi.shape[1]), dtype=bool)
# iterate through dimensions
for x, grid in zip(xi, self.grid):
out_of_bounds += x < grid[0]
out_of_bounds += x > grid[-1]
return out_of_bounds
| (points, values, method='linear', bounds_error=True, fill_value=nan, *, solver=None, solver_args=None) |
22,601 | scipy.interpolate._rgi | __call__ |
Interpolation at coordinates.
Parameters
----------
xi : ndarray of shape (..., ndim)
The coordinates to evaluate the interpolator at.
method : str, optional
The method of interpolation to perform. Supported are "linear",
"nearest", "slinear", "cubic", "quintic" and "pchip". Default is
the method chosen when the interpolator was created.
nu : sequence of ints, length ndim, optional
If not None, the orders of the derivatives to evaluate.
Each entry must be non-negative.
Only allowed for methods "slinear", "cubic" and "quintic".
.. versionadded:: 1.13
Returns
-------
values_x : ndarray, shape xi.shape[:-1] + values.shape[ndim:]
Interpolated values at `xi`. See notes for behaviour when
``xi.ndim == 1``.
Notes
-----
In the case that ``xi.ndim == 1`` a new axis is inserted into
the 0 position of the returned array, values_x, so its shape is
instead ``(1,) + values.shape[ndim:]``.
Examples
--------
Here we define a nearest-neighbor interpolator of a simple function
>>> import numpy as np
>>> x, y = np.array([0, 1, 2]), np.array([1, 3, 7])
>>> def f(x, y):
... return x**2 + y**2
>>> data = f(*np.meshgrid(x, y, indexing='ij', sparse=True))
>>> from scipy.interpolate import RegularGridInterpolator
>>> interp = RegularGridInterpolator((x, y), data, method='nearest')
By construction, the interpolator uses the nearest-neighbor
interpolation
>>> interp([[1.5, 1.3], [0.3, 4.5]])
array([2., 9.])
We can however evaluate the linear interpolant by overriding the
`method` parameter
>>> interp([[1.5, 1.3], [0.3, 4.5]], method='linear')
array([ 4.7, 24.3])
| def __call__(self, xi, method=None, *, nu=None):
"""
Interpolation at coordinates.
Parameters
----------
xi : ndarray of shape (..., ndim)
The coordinates to evaluate the interpolator at.
method : str, optional
The method of interpolation to perform. Supported are "linear",
"nearest", "slinear", "cubic", "quintic" and "pchip". Default is
the method chosen when the interpolator was created.
nu : sequence of ints, length ndim, optional
If not None, the orders of the derivatives to evaluate.
Each entry must be non-negative.
Only allowed for methods "slinear", "cubic" and "quintic".
.. versionadded:: 1.13
Returns
-------
values_x : ndarray, shape xi.shape[:-1] + values.shape[ndim:]
Interpolated values at `xi`. See notes for behaviour when
``xi.ndim == 1``.
Notes
-----
In the case that ``xi.ndim == 1`` a new axis is inserted into
the 0 position of the returned array, values_x, so its shape is
instead ``(1,) + values.shape[ndim:]``.
Examples
--------
Here we define a nearest-neighbor interpolator of a simple function
>>> import numpy as np
>>> x, y = np.array([0, 1, 2]), np.array([1, 3, 7])
>>> def f(x, y):
... return x**2 + y**2
>>> data = f(*np.meshgrid(x, y, indexing='ij', sparse=True))
>>> from scipy.interpolate import RegularGridInterpolator
>>> interp = RegularGridInterpolator((x, y), data, method='nearest')
By construction, the interpolator uses the nearest-neighbor
interpolation
>>> interp([[1.5, 1.3], [0.3, 4.5]])
array([2., 9.])
We can however evaluate the linear interpolant by overriding the
`method` parameter
>>> interp([[1.5, 1.3], [0.3, 4.5]], method='linear')
array([ 4.7, 24.3])
"""
method = self.method if method is None else method
is_method_changed = self.method != method
if method not in self._ALL_METHODS:
raise ValueError("Method '%s' is not defined" % method)
if is_method_changed and method in self._SPLINE_METHODS_ndbspl:
self._spline = self._construct_spline(method)
if nu is not None and method not in self._SPLINE_METHODS_ndbspl:
raise ValueError(
f"Can only compute derivatives for methods "
f"{self._SPLINE_METHODS_ndbspl}, got {method =}."
)
xi, xi_shape, ndim, nans, out_of_bounds = self._prepare_xi(xi)
if method == "linear":
indices, norm_distances = self._find_indices(xi.T)
if (ndim == 2 and hasattr(self.values, 'dtype') and
self.values.ndim == 2 and self.values.flags.writeable and
self.values.dtype in (np.float64, np.complex128) and
self.values.dtype.byteorder == '='):
# until cython supports const fused types, the fast path
# cannot support non-writeable values
# a fast path
out = np.empty(indices.shape[1], dtype=self.values.dtype)
result = evaluate_linear_2d(self.values,
indices,
norm_distances,
self.grid,
out)
else:
result = self._evaluate_linear(indices, norm_distances)
elif method == "nearest":
indices, norm_distances = self._find_indices(xi.T)
result = self._evaluate_nearest(indices, norm_distances)
elif method in self._SPLINE_METHODS:
if is_method_changed:
self._validate_grid_dimensions(self.grid, method)
if method in self._SPLINE_METHODS_recursive:
result = self._evaluate_spline(xi, method)
else:
result = self._spline(xi, nu=nu)
if not self.bounds_error and self.fill_value is not None:
result[out_of_bounds] = self.fill_value
# f(nan) = nan, if any
if np.any(nans):
result[nans] = np.nan
return result.reshape(xi_shape[:-1] + self.values.shape[ndim:])
| (self, xi, method=None, *, nu=None) |
22,602 | scipy.interpolate._rgi | __init__ | null | def __init__(self, points, values, method="linear", bounds_error=True,
fill_value=np.nan, *, solver=None, solver_args=None):
if method not in self._ALL_METHODS:
raise ValueError("Method '%s' is not defined" % method)
elif method in self._SPLINE_METHODS:
self._validate_grid_dimensions(points, method)
self.method = method
self.bounds_error = bounds_error
self.grid, self._descending_dimensions = _check_points(points)
self.values = self._check_values(values)
self._check_dimensionality(self.grid, self.values)
self.fill_value = self._check_fill_value(self.values, fill_value)
if self._descending_dimensions:
self.values = np.flip(values, axis=self._descending_dimensions)
if self.method == "pchip" and np.iscomplexobj(self.values):
msg = ("`PchipInterpolator` only works with real values. Passing "
"complex-dtyped `values` with `method='pchip'` is deprecated "
"and will raise an error in SciPy 1.15.0. If you are trying to "
"use the real components of the passed array, use `np.real` on "
"the array before passing to `RegularGridInterpolator`.")
warnings.warn(msg, DeprecationWarning, stacklevel=2)
if method in self._SPLINE_METHODS_ndbspl:
if solver_args is None:
solver_args = {}
self._spline = self._construct_spline(method, solver, **solver_args)
else:
if solver is not None or solver_args:
raise ValueError(
f"{method =} does not accept the 'solver' argument. Got "
f" {solver = } and with arguments {solver_args}."
)
| (self, points, values, method='linear', bounds_error=True, fill_value=nan, *, solver=None, solver_args=None) |
22,603 | scipy.interpolate._rgi | _check_dimensionality | null | def _check_dimensionality(self, grid, values):
_check_dimensionality(grid, values)
| (self, grid, values) |
22,604 | scipy.interpolate._rgi | _check_fill_value | null | def _check_fill_value(self, values, fill_value):
if fill_value is not None:
fill_value_dtype = np.asarray(fill_value).dtype
if (hasattr(values, 'dtype') and not
np.can_cast(fill_value_dtype, values.dtype,
casting='same_kind')):
raise ValueError("fill_value must be either 'None' or "
"of a type compatible with values")
return fill_value
| (self, values, fill_value) |
22,605 | scipy.interpolate._rgi | _check_points | null | def _check_points(self, points):
return _check_points(points)
| (self, points) |
22,606 | scipy.interpolate._rgi | _check_values | null | def _check_values(self, values):
if not hasattr(values, 'ndim'):
# allow reasonable duck-typed values
values = np.asarray(values)
if hasattr(values, 'dtype') and hasattr(values, 'astype'):
if not np.issubdtype(values.dtype, np.inexact):
values = values.astype(float)
return values
| (self, values) |
22,607 | scipy.interpolate._rgi | _construct_spline | null | def _construct_spline(self, method, solver=None, **solver_args):
if solver is None:
solver = ssl.gcrotmk
spl = make_ndbspl(
self.grid, self.values, self._SPLINE_DEGREE_MAP[method],
solver=solver, **solver_args
)
return spl
| (self, method, solver=None, **solver_args) |
22,608 | scipy.interpolate._rgi | _do_pchip | null | @staticmethod
def _do_pchip(x, y, pt, k):
local_interp = PchipInterpolator(x, y, axis=0)
values = local_interp(pt)
return values
| (x, y, pt, k) |
22,609 | scipy.interpolate._rgi | _do_spline_fit | null | @staticmethod
def _do_spline_fit(x, y, pt, k):
local_interp = make_interp_spline(x, y, k=k, axis=0)
values = local_interp(pt)
return values
| (x, y, pt, k) |
22,610 | scipy.interpolate._rgi | _evaluate_linear | null | def _evaluate_linear(self, indices, norm_distances):
# slice for broadcasting over trailing dimensions in self.values
vslice = (slice(None),) + (None,)*(self.values.ndim - len(indices))
# Compute shifting up front before zipping everything together
shift_norm_distances = [1 - yi for yi in norm_distances]
shift_indices = [i + 1 for i in indices]
# The formula for linear interpolation in 2d takes the form:
# values = self.values[(i0, i1)] * (1 - y0) * (1 - y1) + \
# self.values[(i0, i1 + 1)] * (1 - y0) * y1 + \
# self.values[(i0 + 1, i1)] * y0 * (1 - y1) + \
# self.values[(i0 + 1, i1 + 1)] * y0 * y1
# We pair i with 1 - yi (zipped1) and i + 1 with yi (zipped2)
zipped1 = zip(indices, shift_norm_distances)
zipped2 = zip(shift_indices, norm_distances)
# Take all products of zipped1 and zipped2 and iterate over them
# to get the terms in the above formula. This corresponds to iterating
# over the vertices of a hypercube.
hypercube = itertools.product(*zip(zipped1, zipped2))
value = np.array([0.])
for h in hypercube:
edge_indices, weights = zip(*h)
weight = np.array([1.])
for w in weights:
weight = weight * w
term = np.asarray(self.values[edge_indices]) * weight[vslice]
value = value + term # cannot use += because broadcasting
return value
| (self, indices, norm_distances) |
22,611 | scipy.interpolate._rgi | _evaluate_nearest | null | def _evaluate_nearest(self, indices, norm_distances):
idx_res = [np.where(yi <= .5, i, i + 1)
for i, yi in zip(indices, norm_distances)]
return self.values[tuple(idx_res)]
| (self, indices, norm_distances) |
22,612 | scipy.interpolate._rgi | _evaluate_spline | null | def _evaluate_spline(self, xi, method):
# ensure xi is 2D list of points to evaluate (`m` is the number of
# points and `n` is the number of interpolation dimensions,
# ``n == len(self.grid)``.)
if xi.ndim == 1:
xi = xi.reshape((1, xi.size))
m, n = xi.shape
# Reorder the axes: n-dimensional process iterates over the
# interpolation axes from the last axis downwards: E.g. for a 4D grid
# the order of axes is 3, 2, 1, 0. Each 1D interpolation works along
# the 0th axis of its argument array (for 1D routine it's its ``y``
# array). Thus permute the interpolation axes of `values` *and keep
# trailing dimensions trailing*.
axes = tuple(range(self.values.ndim))
axx = axes[:n][::-1] + axes[n:]
values = self.values.transpose(axx)
if method == 'pchip':
_eval_func = self._do_pchip
else:
_eval_func = self._do_spline_fit
k = self._SPLINE_DEGREE_MAP[method]
# Non-stationary procedure: difficult to vectorize this part entirely
# into numpy-level operations. Unfortunately this requires explicit
# looping over each point in xi.
# can at least vectorize the first pass across all points in the
# last variable of xi.
last_dim = n - 1
first_values = _eval_func(self.grid[last_dim],
values,
xi[:, last_dim],
k)
# the rest of the dimensions have to be on a per point-in-xi basis
shape = (m, *self.values.shape[n:])
result = np.empty(shape, dtype=self.values.dtype)
for j in range(m):
# Main process: Apply 1D interpolate in each dimension
# sequentially, starting with the last dimension.
# These are then "folded" into the next dimension in-place.
folded_values = first_values[j, ...]
for i in range(last_dim-1, -1, -1):
# Interpolate for each 1D from the last dimensions.
# This collapses each 1D sequence into a scalar.
folded_values = _eval_func(self.grid[i],
folded_values,
xi[j, i],
k)
result[j, ...] = folded_values
return result
| (self, xi, method) |
22,613 | scipy.interpolate._rgi | _find_indices | null | def _find_indices(self, xi):
return find_indices(self.grid, xi)
| (self, xi) |
22,614 | scipy.interpolate._rgi | _find_out_of_bounds | null | def _find_out_of_bounds(self, xi):
# check for out of bounds xi
out_of_bounds = np.zeros((xi.shape[1]), dtype=bool)
# iterate through dimensions
for x, grid in zip(xi, self.grid):
out_of_bounds += x < grid[0]
out_of_bounds += x > grid[-1]
return out_of_bounds
| (self, xi) |
22,615 | scipy.interpolate._rgi | _prepare_xi | null | def _prepare_xi(self, xi):
ndim = len(self.grid)
xi = _ndim_coords_from_arrays(xi, ndim=ndim)
if xi.shape[-1] != len(self.grid):
raise ValueError("The requested sample points xi have dimension "
f"{xi.shape[-1]} but this "
f"RegularGridInterpolator has dimension {ndim}")
xi_shape = xi.shape
xi = xi.reshape(-1, xi_shape[-1])
xi = np.asarray(xi, dtype=float)
# find nans in input
nans = np.any(np.isnan(xi), axis=-1)
if self.bounds_error:
for i, p in enumerate(xi.T):
if not np.logical_and(np.all(self.grid[i][0] <= p),
np.all(p <= self.grid[i][-1])):
raise ValueError("One of the requested xi is out of bounds "
"in dimension %d" % i)
out_of_bounds = None
else:
out_of_bounds = self._find_out_of_bounds(xi.T)
return xi, xi_shape, ndim, nans, out_of_bounds
| (self, xi) |
22,616 | scipy.interpolate._rgi | _validate_grid_dimensions | null | def _validate_grid_dimensions(self, points, method):
k = self._SPLINE_DEGREE_MAP[method]
for i, point in enumerate(points):
ndim = len(np.atleast_1d(point))
if ndim <= k:
raise ValueError(f"There are {ndim} points in dimension {i},"
f" but method {method} requires at least "
f" {k+1} points per dimension.")
| (self, points, method) |
22,617 | packaging.version | Version | This class abstracts handling of a project's versions.
A :class:`Version` instance is comparison aware and can be compared and
sorted using the standard Python interfaces.
>>> v1 = Version("1.0a5")
>>> v2 = Version("1.0")
>>> v1
<Version('1.0a5')>
>>> v2
<Version('1.0')>
>>> v1 < v2
True
>>> v1 == v2
False
>>> v1 > v2
False
>>> v1 >= v2
False
>>> v1 <= v2
True
| class Version(_BaseVersion):
"""This class abstracts handling of a project's versions.
A :class:`Version` instance is comparison aware and can be compared and
sorted using the standard Python interfaces.
>>> v1 = Version("1.0a5")
>>> v2 = Version("1.0")
>>> v1
<Version('1.0a5')>
>>> v2
<Version('1.0')>
>>> v1 < v2
True
>>> v1 == v2
False
>>> v1 > v2
False
>>> v1 >= v2
False
>>> v1 <= v2
True
"""
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
_key: CmpKey
def __init__(self, version: str) -> None:
"""Initialize a Version object.
:param version:
The string representation of a version which will be parsed and normalized
before use.
:raises InvalidVersion:
If the ``version`` does not conform to PEP 440 in any way then this
exception will be raised.
"""
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion(f"Invalid version: '{version}'")
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
post=_parse_letter_version(
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
),
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self) -> str:
"""A representation of the Version that shows all internal state.
>>> Version('1.0.0')
<Version('1.0.0')>
"""
return f"<Version('{self}')>"
def __str__(self) -> str:
"""A string representation of the version that can be rounded-tripped.
>>> str(Version("1.0a5"))
'1.0a5'
"""
parts = []
# Epoch
if self.epoch != 0:
parts.append(f"{self.epoch}!")
# Release segment
parts.append(".".join(str(x) for x in self.release))
# Pre-release
if self.pre is not None:
parts.append("".join(str(x) for x in self.pre))
# Post-release
if self.post is not None:
parts.append(f".post{self.post}")
# Development release
if self.dev is not None:
parts.append(f".dev{self.dev}")
# Local version segment
if self.local is not None:
parts.append(f"+{self.local}")
return "".join(parts)
@property
def epoch(self) -> int:
"""The epoch of the version.
>>> Version("2.0.0").epoch
0
>>> Version("1!2.0.0").epoch
1
"""
return self._version.epoch
@property
def release(self) -> Tuple[int, ...]:
"""The components of the "release" segment of the version.
>>> Version("1.2.3").release
(1, 2, 3)
>>> Version("2.0.0").release
(2, 0, 0)
>>> Version("1!2.0.0.post0").release
(2, 0, 0)
Includes trailing zeroes but not the epoch or any pre-release / development /
post-release suffixes.
"""
return self._version.release
@property
def pre(self) -> Optional[Tuple[str, int]]:
"""The pre-release segment of the version.
>>> print(Version("1.2.3").pre)
None
>>> Version("1.2.3a1").pre
('a', 1)
>>> Version("1.2.3b1").pre
('b', 1)
>>> Version("1.2.3rc1").pre
('rc', 1)
"""
return self._version.pre
@property
def post(self) -> Optional[int]:
"""The post-release number of the version.
>>> print(Version("1.2.3").post)
None
>>> Version("1.2.3.post1").post
1
"""
return self._version.post[1] if self._version.post else None
@property
def dev(self) -> Optional[int]:
"""The development number of the version.
>>> print(Version("1.2.3").dev)
None
>>> Version("1.2.3.dev1").dev
1
"""
return self._version.dev[1] if self._version.dev else None
@property
def local(self) -> Optional[str]:
"""The local version segment of the version.
>>> print(Version("1.2.3").local)
None
>>> Version("1.2.3+abc").local
'abc'
"""
if self._version.local:
return ".".join(str(x) for x in self._version.local)
else:
return None
@property
def public(self) -> str:
"""The public portion of the version.
>>> Version("1.2.3").public
'1.2.3'
>>> Version("1.2.3+abc").public
'1.2.3'
>>> Version("1.2.3+abc.dev1").public
'1.2.3'
"""
return str(self).split("+", 1)[0]
@property
def base_version(self) -> str:
"""The "base version" of the version.
>>> Version("1.2.3").base_version
'1.2.3'
>>> Version("1.2.3+abc").base_version
'1.2.3'
>>> Version("1!1.2.3+abc.dev1").base_version
'1!1.2.3'
The "base version" is the public version of the project without any pre or post
release markers.
"""
parts = []
# Epoch
if self.epoch != 0:
parts.append(f"{self.epoch}!")
# Release segment
parts.append(".".join(str(x) for x in self.release))
return "".join(parts)
@property
def is_prerelease(self) -> bool:
"""Whether this version is a pre-release.
>>> Version("1.2.3").is_prerelease
False
>>> Version("1.2.3a1").is_prerelease
True
>>> Version("1.2.3b1").is_prerelease
True
>>> Version("1.2.3rc1").is_prerelease
True
>>> Version("1.2.3dev1").is_prerelease
True
"""
return self.dev is not None or self.pre is not None
@property
def is_postrelease(self) -> bool:
"""Whether this version is a post-release.
>>> Version("1.2.3").is_postrelease
False
>>> Version("1.2.3.post1").is_postrelease
True
"""
return self.post is not None
@property
def is_devrelease(self) -> bool:
"""Whether this version is a development release.
>>> Version("1.2.3").is_devrelease
False
>>> Version("1.2.3.dev1").is_devrelease
True
"""
return self.dev is not None
@property
def major(self) -> int:
"""The first item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").major
1
"""
return self.release[0] if len(self.release) >= 1 else 0
@property
def minor(self) -> int:
"""The second item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").minor
2
>>> Version("1").minor
0
"""
return self.release[1] if len(self.release) >= 2 else 0
@property
def micro(self) -> int:
"""The third item of :attr:`release` or ``0`` if unavailable.
>>> Version("1.2.3").micro
3
>>> Version("1").micro
0
"""
return self.release[2] if len(self.release) >= 3 else 0
| (version: str) -> None |
22,618 | packaging.version | __eq__ | null | def __eq__(self, other: object) -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key == other._key
| (self, other: object) -> bool |
22,619 | packaging.version | __ge__ | null | def __ge__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key >= other._key
| (self, other: packaging.version._BaseVersion) -> bool |
22,620 | packaging.version | __gt__ | null | def __gt__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key > other._key
| (self, other: packaging.version._BaseVersion) -> bool |
22,621 | packaging.version | __hash__ | null | def __hash__(self) -> int:
return hash(self._key)
| (self) -> int |
22,622 | packaging.version | __init__ | Initialize a Version object.
:param version:
The string representation of a version which will be parsed and normalized
before use.
:raises InvalidVersion:
If the ``version`` does not conform to PEP 440 in any way then this
exception will be raised.
| def __init__(self, version: str) -> None:
"""Initialize a Version object.
:param version:
The string representation of a version which will be parsed and normalized
before use.
:raises InvalidVersion:
If the ``version`` does not conform to PEP 440 in any way then this
exception will be raised.
"""
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion(f"Invalid version: '{version}'")
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
post=_parse_letter_version(
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
),
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
| (self, version: str) -> NoneType |
22,623 | packaging.version | __le__ | null | def __le__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key <= other._key
| (self, other: packaging.version._BaseVersion) -> bool |
22,624 | packaging.version | __lt__ | null | def __lt__(self, other: "_BaseVersion") -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key < other._key
| (self, other: packaging.version._BaseVersion) -> bool |
22,625 | packaging.version | __ne__ | null | def __ne__(self, other: object) -> bool:
if not isinstance(other, _BaseVersion):
return NotImplemented
return self._key != other._key
| (self, other: object) -> bool |
22,626 | packaging.version | __repr__ | A representation of the Version that shows all internal state.
>>> Version('1.0.0')
<Version('1.0.0')>
| def __repr__(self) -> str:
"""A representation of the Version that shows all internal state.
>>> Version('1.0.0')
<Version('1.0.0')>
"""
return f"<Version('{self}')>"
| (self) -> str |
22,627 | packaging.version | __str__ | A string representation of the version that can be rounded-tripped.
>>> str(Version("1.0a5"))
'1.0a5'
| def __str__(self) -> str:
"""A string representation of the version that can be rounded-tripped.
>>> str(Version("1.0a5"))
'1.0a5'
"""
parts = []
# Epoch
if self.epoch != 0:
parts.append(f"{self.epoch}!")
# Release segment
parts.append(".".join(str(x) for x in self.release))
# Pre-release
if self.pre is not None:
parts.append("".join(str(x) for x in self.pre))
# Post-release
if self.post is not None:
parts.append(f".post{self.post}")
# Development release
if self.dev is not None:
parts.append(f".dev{self.dev}")
# Local version segment
if self.local is not None:
parts.append(f"+{self.local}")
return "".join(parts)
| (self) -> str |
22,628 | salem.datasets | WRF | WRF proof-of-concept template.
Adds unstaggered and diagnostic variables.
| class WRF(GeoNetcdf):
"""WRF proof-of-concept template.
Adds unstaggered and diagnostic variables.
"""
def __init__(self, file, grid=None, time=None):
GeoNetcdf.__init__(self, file, grid=grid, time=time)
# Change staggered variables to unstaggered ones
for vn, v in self.variables.items():
if wrftools.Unstaggerer.can_do(v):
self.variables[vn] = wrftools.Unstaggerer(v)
# Check if we can add diagnostic variables to the pot
for vn in wrftools.var_classes:
cl = getattr(wrftools, vn)
if cl.can_do(self._nc):
self.variables[vn] = cl(self._nc)
| (file, grid=None, time=None) |
22,631 | salem.datasets | __init__ | null | def __init__(self, file, grid=None, time=None):
GeoNetcdf.__init__(self, file, grid=grid, time=time)
# Change staggered variables to unstaggered ones
for vn, v in self.variables.items():
if wrftools.Unstaggerer.can_do(v):
self.variables[vn] = wrftools.Unstaggerer(v)
# Check if we can add diagnostic variables to the pot
for vn in wrftools.var_classes:
cl = getattr(wrftools, vn)
if cl.can_do(self._nc):
self.variables[vn] = cl(self._nc)
| (self, file, grid=None, time=None) |
22,637 | salem.gis | check_crs | Checks if the crs represents a valid grid, projection or ESPG string.
Examples
--------
>>> p = check_crs('epsg:26915 +units=m')
>>> p.srs
'epsg:26915 +units=m'
>>> p = check_crs('wrong')
>>> p is None
True
Returns
-------
A valid crs if possible, otherwise None
| def check_crs(crs, raise_on_error=False):
"""Checks if the crs represents a valid grid, projection or ESPG string.
Examples
--------
>>> p = check_crs('epsg:26915 +units=m')
>>> p.srs
'epsg:26915 +units=m'
>>> p = check_crs('wrong')
>>> p is None
True
Returns
-------
A valid crs if possible, otherwise None
"""
try:
crs = crs.salem.grid # try xarray
except:
pass
err1, err2 = None, None
if isinstance(crs, pyproj.Proj) or isinstance(crs, Grid):
out = crs
elif isinstance(crs, crs_type):
out = pyproj.Proj(crs.to_wkt(), preserve_units=True)
elif isinstance(crs, dict) or isinstance(crs, str):
if isinstance(crs, str):
# quick fix for https://github.com/pyproj4/pyproj/issues/345
crs = crs.replace(' ', '').replace('+', ' +')
# A series of try-catch to handle the (too) many changes in pyproj
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
warnings.filterwarnings('ignore', category=FutureWarning)
try:
out = pyproj.Proj(crs, preserve_units=True)
except RuntimeError as e:
err1 = str(e)
try:
out = pyproj.Proj(init=crs, preserve_units=True)
except RuntimeError as e:
err2 = str(e)
out = None
else:
out = None
if raise_on_error and out is None:
msg = ('salem could not properly parse the provided coordinate '
'reference system (crs). This could be due to errors in your '
'data, in PyProj, or with salem itself. If this occurs '
'unexpectedly, report an issue to https://github.com/fmaussion/'
'salem/issues. Full log: \n'
'crs: {} ; \n'.format(crs))
if err1 is not None:
msg += 'Output of `pyproj.Proj(crs, preserve_units=True)`: {} ; \n'
msg = msg.format(err1)
if err2 is not None:
msg += 'Output of `pyproj.Proj(init=crs, preserve_units=True)`: {}'
msg = msg.format(err2)
raise ValueError(msg)
return out
| (crs, raise_on_error=False) |
22,639 | pyproj.crs.crs | CRS |
A pythonic Coordinate Reference System manager.
.. versionadded:: 2.0.0
See: :c:func:`proj_create`
The functionality is based on other fantastic projects:
* `rasterio <https://github.com/mapbox/rasterio/blob/c13f0943b95c0eaa36ff3f620bd91107aa67b381/rasterio/_crs.pyx>`_ # noqa: E501
* `opendatacube <https://github.com/opendatacube/datacube-core/blob/83bae20d2a2469a6417097168fd4ede37fd2abe5/datacube/utils/geometry/_base.py>`_ # noqa: E501
Attributes
----------
srs: str
The string form of the user input used to create the CRS.
| class CRS:
"""
A pythonic Coordinate Reference System manager.
.. versionadded:: 2.0.0
See: :c:func:`proj_create`
The functionality is based on other fantastic projects:
* `rasterio <https://github.com/mapbox/rasterio/blob/c13f0943b95c0eaa36ff3f620bd91107aa67b381/rasterio/_crs.pyx>`_ # noqa: E501
* `opendatacube <https://github.com/opendatacube/datacube-core/blob/83bae20d2a2469a6417097168fd4ede37fd2abe5/datacube/utils/geometry/_base.py>`_ # noqa: E501
Attributes
----------
srs: str
The string form of the user input used to create the CRS.
"""
def __init__(self, projparams: Optional[Any] = None, **kwargs) -> None:
"""
Initialize a CRS class instance with:
- PROJ string
- Dictionary of PROJ parameters
- PROJ keyword arguments for parameters
- JSON string with PROJ parameters
- CRS WKT string
- An authority string [i.e. 'epsg:4326']
- An EPSG integer code [i.e. 4326]
- A tuple of ("auth_name": "auth_code") [i.e ('epsg', '4326')]
- An object with a `to_wkt` method.
- A :class:`pyproj.crs.CRS` class
Example usage:
>>> from pyproj import CRS
>>> crs_utm = CRS.from_user_input(26915)
>>> crs_utm
<Projected CRS: EPSG:26915>
Name: NAD83 / UTM zone 15N
Axis Info [cartesian]:
- E[east]: Easting (metre)
- N[north]: Northing (metre)
Area of Use:
- name: North America - 96°W to 90°W and NAD83 by country
- bounds: (-96.0, 25.61, -90.0, 84.0)
Coordinate Operation:
- name: UTM zone 15N
- method: Transverse Mercator
Datum: North American Datum 1983
- Ellipsoid: GRS 1980
- Prime Meridian: Greenwich
<BLANKLINE>
>>> crs_utm.area_of_use.bounds
(-96.0, 25.61, -90.0, 84.0)
>>> crs_utm.ellipsoid
ELLIPSOID["GRS 1980",6378137,298.257222101,
LENGTHUNIT["metre",1],
ID["EPSG",7019]]
>>> crs_utm.ellipsoid.inverse_flattening
298.257222101
>>> crs_utm.ellipsoid.semi_major_metre
6378137.0
>>> crs_utm.ellipsoid.semi_minor_metre
6356752.314140356
>>> crs_utm.prime_meridian
PRIMEM["Greenwich",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8901]]
>>> crs_utm.prime_meridian.unit_name
'degree'
>>> crs_utm.prime_meridian.unit_conversion_factor
0.017453292519943295
>>> crs_utm.prime_meridian.longitude
0.0
>>> crs_utm.datum
DATUM["North American Datum 1983",
ELLIPSOID["GRS 1980",6378137,298.257222101,
LENGTHUNIT["metre",1]],
ID["EPSG",6269]]
>>> crs_utm.coordinate_system
CS[Cartesian,2],
AXIS["(E)",east,
ORDER[1],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]],
AXIS["(N)",north,
ORDER[2],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]]
>>> print(crs_utm.coordinate_operation.to_wkt(pretty=True))
CONVERSION["UTM zone 15N",
METHOD["Transverse Mercator",
ID["EPSG",9807]],
PARAMETER["Latitude of natural origin",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8801]],
PARAMETER["Longitude of natural origin",-93,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8802]],
PARAMETER["Scale factor at natural origin",0.9996,
SCALEUNIT["unity",1],
ID["EPSG",8805]],
PARAMETER["False easting",500000,
LENGTHUNIT["metre",1],
ID["EPSG",8806]],
PARAMETER["False northing",0,
LENGTHUNIT["metre",1],
ID["EPSG",8807]],
ID["EPSG",16015]]
>>> crs = CRS(proj='utm', zone=10, ellps='WGS84')
>>> print(crs.to_wkt(pretty=True))
PROJCRS["unknown",
BASEGEOGCRS["unknown",
DATUM["Unknown based on WGS84 ellipsoid",
ELLIPSOID["WGS 84",6378137,298.257223563,
LENGTHUNIT["metre",1],
ID["EPSG",7030]]],
PRIMEM["Greenwich",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8901]]],
CONVERSION["UTM zone 10N",
METHOD["Transverse Mercator",
ID["EPSG",9807]],
PARAMETER["Latitude of natural origin",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8801]],
PARAMETER["Longitude of natural origin",-123,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8802]],
PARAMETER["Scale factor at natural origin",0.9996,
SCALEUNIT["unity",1],
ID["EPSG",8805]],
PARAMETER["False easting",500000,
LENGTHUNIT["metre",1],
ID["EPSG",8806]],
PARAMETER["False northing",0,
LENGTHUNIT["metre",1],
ID["EPSG",8807]],
ID["EPSG",16010]],
CS[Cartesian,2],
AXIS["(E)",east,
ORDER[1],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]],
AXIS["(N)",north,
ORDER[2],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]]]
>>> geod = crs.get_geod()
>>> f"+a={geod.a:.0f} +f={geod.f:.8f}"
'+a=6378137 +f=0.00335281'
>>> crs.is_projected
True
>>> crs.is_geographic
False
"""
projstring = ""
if projparams:
if isinstance(projparams, _CRS):
projstring = projparams.srs
elif _is_epsg_code(projparams):
projstring = _prepare_from_epsg(projparams)
elif isinstance(projparams, str):
projstring = _prepare_from_string(projparams)
elif isinstance(projparams, dict):
projstring = _prepare_from_dict(projparams)
elif isinstance(projparams, (list, tuple)) and len(projparams) == 2:
projstring = _prepare_from_authority(*projparams)
elif hasattr(projparams, "to_wkt"):
projstring = projparams.to_wkt() # type: ignore
else:
raise CRSError(f"Invalid CRS input: {projparams!r}")
if kwargs:
projkwargs = _prepare_from_dict(kwargs, allow_json=False)
projstring = _prepare_from_string(" ".join((projstring, projkwargs)))
self.srs = projstring
self._local = CRSLocal()
if isinstance(projparams, _CRS):
self._local.crs = projparams
else:
self._local.crs = _CRS(self.srs)
@property
def _crs(self):
"""
Retrieve the Cython based _CRS object for this thread.
"""
if self._local.crs is None:
self._local.crs = _CRS(self.srs)
return self._local.crs
@classmethod
def from_authority(cls, auth_name: str, code: Union[str, int]) -> "CRS":
"""
.. versionadded:: 2.2.0
Make a CRS from an authority name and authority code
Parameters
----------
auth_name: str
The name of the authority.
code : int or str
The code used by the authority.
Returns
-------
CRS
"""
return cls.from_user_input(_prepare_from_authority(auth_name, code))
@classmethod
def from_epsg(cls, code: Union[str, int]) -> "CRS":
"""Make a CRS from an EPSG code
Parameters
----------
code : int or str
An EPSG code.
Returns
-------
CRS
"""
return cls.from_user_input(_prepare_from_epsg(code))
@classmethod
def from_proj4(cls, in_proj_string: str) -> "CRS":
"""
.. versionadded:: 2.2.0
Make a CRS from a PROJ string
Parameters
----------
in_proj_string : str
A PROJ string.
Returns
-------
CRS
"""
if not is_proj(in_proj_string):
raise CRSError(f"Invalid PROJ string: {in_proj_string}")
return cls.from_user_input(_prepare_from_proj_string(in_proj_string))
@classmethod
def from_wkt(cls, in_wkt_string: str) -> "CRS":
"""
.. versionadded:: 2.2.0
Make a CRS from a WKT string
Parameters
----------
in_wkt_string : str
A WKT string.
Returns
-------
CRS
"""
if not is_wkt(in_wkt_string):
raise CRSError(f"Invalid WKT string: {in_wkt_string}")
return cls.from_user_input(_prepare_from_string(in_wkt_string))
@classmethod
def from_string(cls, in_crs_string: str) -> "CRS":
"""
Make a CRS from:
Initialize a CRS class instance with:
- PROJ string
- JSON string with PROJ parameters
- CRS WKT string
- An authority string [i.e. 'epsg:4326']
Parameters
----------
in_crs_string : str
An EPSG, PROJ, or WKT string.
Returns
-------
CRS
"""
return cls.from_user_input(_prepare_from_string(in_crs_string))
def to_string(self) -> str:
"""
.. versionadded:: 2.2.0
Convert the CRS to a string.
It attempts to convert it to the authority string.
Otherwise, it uses the string format of the user
input to create the CRS.
Returns
-------
str
"""
auth_info = self.to_authority(min_confidence=100)
if auth_info:
return ":".join(auth_info)
return self.srs
@classmethod
def from_user_input(cls, value: Any, **kwargs) -> "CRS":
"""
Initialize a CRS class instance with:
- PROJ string
- Dictionary of PROJ parameters
- PROJ keyword arguments for parameters
- JSON string with PROJ parameters
- CRS WKT string
- An authority string [i.e. 'epsg:4326']
- An EPSG integer code [i.e. 4326]
- A tuple of ("auth_name": "auth_code") [i.e ('epsg', '4326')]
- An object with a `to_wkt` method.
- A :class:`pyproj.crs.CRS` class
Parameters
----------
value : obj
A Python int, dict, or str.
Returns
-------
CRS
"""
if isinstance(value, cls):
return value
return cls(value, **kwargs)
def get_geod(self) -> Optional[Geod]:
"""
Returns
-------
pyproj.geod.Geod:
Geod object based on the ellipsoid.
"""
if self.ellipsoid is None:
return None
return Geod(
a=self.ellipsoid.semi_major_metre,
rf=self.ellipsoid.inverse_flattening,
b=self.ellipsoid.semi_minor_metre,
)
@classmethod
def from_dict(cls, proj_dict: dict) -> "CRS":
"""
.. versionadded:: 2.2.0
Make a CRS from a dictionary of PROJ parameters.
Parameters
----------
proj_dict : str
PROJ params in dict format.
Returns
-------
CRS
"""
return cls.from_user_input(_prepare_from_dict(proj_dict))
@classmethod
def from_json(cls, crs_json: str) -> "CRS":
"""
.. versionadded:: 2.4.0
Create CRS from a CRS JSON string.
Parameters
----------
crs_json: str
CRS JSON string.
Returns
-------
CRS
"""
return cls.from_user_input(_load_proj_json(crs_json))
@classmethod
def from_json_dict(cls, crs_dict: dict) -> "CRS":
"""
.. versionadded:: 2.4.0
Create CRS from a JSON dictionary.
Parameters
----------
crs_dict: dict
CRS dictionary.
Returns
-------
CRS
"""
return cls.from_user_input(json.dumps(crs_dict))
def to_dict(self) -> dict:
"""
.. versionadded:: 2.2.0
Converts the CRS to dictionary of PROJ parameters.
.. warning:: You will likely lose important projection
information when converting to a PROJ string from
another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems # noqa: E501
Returns
-------
dict:
PROJ params in dict format.
"""
proj_string = self.to_proj4()
if proj_string is None:
return {}
def _parse(val):
if val.lower() == "true":
return True
if val.lower() == "false":
return False
try:
return int(val)
except ValueError:
pass
try:
return float(val)
except ValueError:
pass
return _try_list_if_string(val)
proj_dict = {}
for param in _RE_PROJ_PARAM.finditer(proj_string):
key, value = param.groups()
if value is not None:
value = _parse(value)
if value is not False:
proj_dict[key] = value
return proj_dict
def to_cf(
self,
wkt_version: Union[WktVersion, str] = WktVersion.WKT2_2019,
errcheck: bool = False,
) -> dict:
"""
.. versionadded:: 2.2.0
This converts a :obj:`pyproj.crs.CRS` object
to a Climate and Forecast (CF) Grid Mapping Version 1.8 dict.
:ref:`build_crs_cf`
Parameters
----------
wkt_version: str or pyproj.enums.WktVersion
Version of WKT supported by CRS.to_wkt.
Default is :attr:`pyproj.enums.WktVersion.WKT2_2019`.
errcheck: bool, default=False
If True, will warn when parameters are ignored.
Returns
-------
dict:
CF-1.8 version of the projection.
"""
# pylint: disable=too-many-branches,too-many-return-statements
cf_dict: dict[str, Any] = {"crs_wkt": self.to_wkt(wkt_version)}
# handle bound CRS
if (
self.is_bound
and self.coordinate_operation
and self.coordinate_operation.towgs84
and self.source_crs
):
sub_cf: dict[str, Any] = self.source_crs.to_cf(
wkt_version=wkt_version,
errcheck=errcheck,
)
sub_cf.pop("crs_wkt")
cf_dict.update(sub_cf)
cf_dict["towgs84"] = self.coordinate_operation.towgs84
return cf_dict
# handle compound CRS
if self.is_compound:
for sub_crs in self.sub_crs_list:
sub_cf = sub_crs.to_cf(wkt_version=wkt_version, errcheck=errcheck)
sub_cf.pop("crs_wkt")
cf_dict.update(sub_cf)
return cf_dict
# handle vertical CRS
if self.is_vertical:
vert_json = self.to_json_dict()
if "geoid_model" in vert_json:
cf_dict["geoid_name"] = vert_json["geoid_model"]["name"]
if self.datum:
cf_dict["geopotential_datum_name"] = self.datum.name
return cf_dict
# write out datum parameters
if self.ellipsoid:
cf_dict.update(
semi_major_axis=self.ellipsoid.semi_major_metre,
semi_minor_axis=self.ellipsoid.semi_minor_metre,
inverse_flattening=self.ellipsoid.inverse_flattening,
)
cf_dict["reference_ellipsoid_name"] = self.ellipsoid.name
if self.prime_meridian:
cf_dict["longitude_of_prime_meridian"] = self.prime_meridian.longitude
cf_dict["prime_meridian_name"] = self.prime_meridian.name
# handle geographic CRS
if self.geodetic_crs:
cf_dict["geographic_crs_name"] = self.geodetic_crs.name
if self.geodetic_crs.datum:
cf_dict["horizontal_datum_name"] = self.geodetic_crs.datum.name
if self.is_geographic:
if self.coordinate_operation:
if (
self.coordinate_operation.method_name.lower()
not in _INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP
):
if errcheck:
warnings.warn(
"Unsupported coordinate operation: "
f"{self.coordinate_operation.method_name}"
)
return {"crs_wkt": cf_dict["crs_wkt"]}
cf_dict.update(
_INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP[
self.coordinate_operation.method_name.lower()
](self.coordinate_operation)
)
else:
cf_dict["grid_mapping_name"] = "latitude_longitude"
return cf_dict
# handle projected CRS
coordinate_operation = None
if not self.is_bound and self.is_projected:
coordinate_operation = self.coordinate_operation
cf_dict["projected_crs_name"] = self.name
coordinate_operation_name = (
None
if not coordinate_operation
else coordinate_operation.method_name.lower().replace(" ", "_")
)
if coordinate_operation_name not in _INVERSE_GRID_MAPPING_NAME_MAP:
if errcheck:
if coordinate_operation:
warnings.warn(
"Unsupported coordinate operation: "
f"{coordinate_operation.method_name}"
)
else:
warnings.warn("Coordinate operation not found.")
return {"crs_wkt": cf_dict["crs_wkt"]}
cf_dict.update(
_INVERSE_GRID_MAPPING_NAME_MAP[coordinate_operation_name](
coordinate_operation
)
)
return cf_dict
@staticmethod
def from_cf(
in_cf: dict,
ellipsoidal_cs: Optional[Any] = None,
cartesian_cs: Optional[Any] = None,
vertical_cs: Optional[Any] = None,
) -> "CRS":
"""
.. versionadded:: 2.2.0
.. versionadded:: 3.0.0 ellipsoidal_cs, cartesian_cs, vertical_cs
This converts a Climate and Forecast (CF) Grid Mapping Version 1.8
dict to a :obj:`pyproj.crs.CRS` object.
:ref:`build_crs_cf`
Parameters
----------
in_cf: dict
CF version of the projection.
ellipsoidal_cs: Any, optional
Input to create an Ellipsoidal Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`.
cartesian_cs: Any, optional
Input to create a Cartesian Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.Cartesian2DCS`.
vertical_cs: Any, optional
Input to create a Vertical Coordinate System accepted by
:meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.VerticalCS`
Returns
-------
CRS
"""
# pylint: disable=too-many-branches
unknown_names = ("unknown", "undefined")
if "crs_wkt" in in_cf:
return CRS(in_cf["crs_wkt"])
if "spatial_ref" in in_cf: # for previous supported WKT key
return CRS(in_cf["spatial_ref"])
grid_mapping_name = in_cf.get("grid_mapping_name")
if grid_mapping_name is None:
raise CRSError("CF projection parameters missing 'grid_mapping_name'")
# build datum if possible
datum = _horizontal_datum_from_params(in_cf)
# build geographic CRS
try:
geographic_conversion_method: Optional[
Callable
] = _GEOGRAPHIC_GRID_MAPPING_NAME_MAP[grid_mapping_name]
except KeyError:
geographic_conversion_method = None
geographic_crs_name = in_cf.get("geographic_crs_name")
if datum:
geographic_crs: CRS = GeographicCRS(
name=geographic_crs_name or "undefined",
datum=datum,
ellipsoidal_cs=ellipsoidal_cs,
)
elif geographic_crs_name and geographic_crs_name not in unknown_names:
geographic_crs = CRS(geographic_crs_name)
if ellipsoidal_cs is not None:
geographic_crs_json = geographic_crs.to_json_dict()
geographic_crs_json[
"coordinate_system"
] = CoordinateSystem.from_user_input(ellipsoidal_cs).to_json_dict()
geographic_crs = CRS(geographic_crs_json)
else:
geographic_crs = GeographicCRS(ellipsoidal_cs=ellipsoidal_cs)
if grid_mapping_name == "latitude_longitude":
return geographic_crs
if geographic_conversion_method is not None:
return DerivedGeographicCRS(
base_crs=geographic_crs,
conversion=geographic_conversion_method(in_cf),
ellipsoidal_cs=ellipsoidal_cs,
)
# build projected CRS
try:
conversion_method = _GRID_MAPPING_NAME_MAP[grid_mapping_name]
except KeyError:
raise CRSError(
f"Unsupported grid mapping name: {grid_mapping_name}"
) from None
projected_crs = ProjectedCRS(
name=in_cf.get("projected_crs_name", "undefined"),
conversion=conversion_method(in_cf),
geodetic_crs=geographic_crs,
cartesian_cs=cartesian_cs,
)
# build bound CRS if exists
bound_crs = None
if "towgs84" in in_cf:
bound_crs = BoundCRS(
source_crs=projected_crs,
target_crs="WGS 84",
transformation=ToWGS84Transformation(
projected_crs.geodetic_crs, *_try_list_if_string(in_cf["towgs84"])
),
)
if "geopotential_datum_name" not in in_cf:
return bound_crs or projected_crs
# build Vertical CRS
vertical_crs = VerticalCRS(
name="undefined",
datum=in_cf["geopotential_datum_name"],
geoid_model=in_cf.get("geoid_name"),
vertical_cs=vertical_cs,
)
# build compound CRS
return CompoundCRS(
name="undefined", components=[bound_crs or projected_crs, vertical_crs]
)
def cs_to_cf(self) -> list[dict]:
"""
.. versionadded:: 3.0.0
This converts all coordinate systems (cs) in the CRS
to a list of Climate and Forecast (CF) Version 1.8 dicts.
:ref:`build_crs_cf`
Returns
-------
list[dict]:
CF-1.8 version of the coordinate systems.
"""
cf_axis_list = []
def rotated_pole(crs):
try:
return (
crs.coordinate_operation
and crs.coordinate_operation.method_name.lower()
in _INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP
)
except KeyError:
return False
if self.type_name == "Temporal CRS" and self.datum:
datum_json = self.datum.to_json_dict()
origin = datum_json.get("time_origin", "1875-05-20").strip().rstrip("zZ")
if len(origin) == 4:
origin = f"{origin}-01-01"
axis = self.axis_info[0]
cf_temporal_axis = {
"standard_name": "time",
"long_name": "time",
"calendar": (
datum_json.get("calendar", "proleptic_gregorian")
.lower()
.replace(" ", "_")
),
"axis": "T",
}
unit_name = axis.unit_name.lower().replace("calendar", "").strip()
# no units for TemporalDateTime
if unit_name:
cf_temporal_axis["units"] = f"{unit_name} since {origin}"
cf_axis_list.append(cf_temporal_axis)
if self.coordinate_system:
cf_axis_list.extend(
self.coordinate_system.to_cf(rotated_pole=rotated_pole(self))
)
elif self.is_bound and self.source_crs and self.source_crs.coordinate_system:
cf_axis_list.extend(
self.source_crs.coordinate_system.to_cf(
rotated_pole=rotated_pole(self.source_crs)
)
)
else:
for sub_crs in self.sub_crs_list:
cf_axis_list.extend(sub_crs.cs_to_cf())
return cf_axis_list
def is_exact_same(self, other: Any) -> bool:
"""
Check if the CRS objects are the exact same.
Parameters
----------
other: Any
Check if the other CRS is the exact same to this object.
If the other object is not a CRS, it will try to create one.
On Failure, it will return False.
Returns
-------
bool
"""
try:
other = CRS.from_user_input(other)
except CRSError:
return False
return self._crs.is_exact_same(other._crs)
def equals(self, other: Any, ignore_axis_order: bool = False) -> bool:
"""
.. versionadded:: 2.5.0
Check if the CRS objects are equivalent.
Parameters
----------
other: Any
Check if the other object is equivalent to this object.
If the other object is not a CRS, it will try to create one.
On Failure, it will return False.
ignore_axis_order: bool, default=False
If True, it will compare the CRS class and ignore the axis order.
Returns
-------
bool
"""
try:
other = CRS.from_user_input(other)
except CRSError:
return False
return self._crs.equals(other._crs, ignore_axis_order=ignore_axis_order)
@property
def geodetic_crs(self) -> Optional["CRS"]:
"""
.. versionadded:: 2.2.0
Returns
-------
CRS:
The geodeticCRS / geographicCRS from the CRS.
"""
return (
None
if self._crs.geodetic_crs is None
else self.__class__(self._crs.geodetic_crs)
)
@property
def source_crs(self) -> Optional["CRS"]:
"""
The base CRS of a BoundCRS or a DerivedCRS/ProjectedCRS,
or the source CRS of a CoordinateOperation.
Returns
-------
CRS
"""
return (
None
if self._crs.source_crs is None
else self.__class__(self._crs.source_crs)
)
@property
def target_crs(self) -> Optional["CRS"]:
"""
.. versionadded:: 2.2.0
Returns
-------
CRS:
The hub CRS of a BoundCRS or the target CRS of a CoordinateOperation.
"""
return (
None
if self._crs.target_crs is None
else self.__class__(self._crs.target_crs)
)
@property
def sub_crs_list(self) -> list["CRS"]:
"""
If the CRS is a compound CRS, it will return a list of sub CRS objects.
Returns
-------
list[CRS]
"""
return [self.__class__(sub_crs) for sub_crs in self._crs.sub_crs_list]
@property
def utm_zone(self) -> Optional[str]:
"""
.. versionadded:: 2.6.0
Finds the UTM zone in a Projected CRS, Bound CRS, or Compound CRS
Returns
-------
Optional[str]:
The UTM zone number and letter if applicable.
"""
if self.is_bound and self.source_crs:
return self.source_crs.utm_zone
if self.sub_crs_list:
for sub_crs in self.sub_crs_list:
if sub_crs.utm_zone:
return sub_crs.utm_zone
elif (
self.coordinate_operation
and "UTM ZONE" in self.coordinate_operation.name.upper()
):
return self.coordinate_operation.name.upper().split("UTM ZONE ")[-1]
return None
@property
def name(self) -> str:
"""
Returns
-------
str:
The name of the CRS (from :cpp:func:`proj_get_name`).
"""
return self._crs.name
@property
def type_name(self) -> str:
"""
Returns
-------
str:
The name of the type of the CRS object.
"""
return self._crs.type_name
@property
def axis_info(self) -> list[Axis]:
"""
Retrieves all relevant axis information in the CRS.
If it is a Bound CRS, it gets the axis list from the Source CRS.
If it is a Compound CRS, it gets the axis list from the Sub CRS list.
Returns
-------
list[Axis]:
The list of axis information.
"""
return self._crs.axis_info
@property
def area_of_use(self) -> Optional[AreaOfUse]:
"""
Returns
-------
AreaOfUse:
The area of use object with associated attributes.
"""
return self._crs.area_of_use
@property
def ellipsoid(self) -> Optional[Ellipsoid]:
"""
.. versionadded:: 2.2.0
Returns
-------
Ellipsoid:
The ellipsoid object with associated attributes.
"""
return self._crs.ellipsoid
@property
def prime_meridian(self) -> Optional[PrimeMeridian]:
"""
.. versionadded:: 2.2.0
Returns
-------
PrimeMeridian:
The prime meridian object with associated attributes.
"""
return self._crs.prime_meridian
@property
def datum(self) -> Optional[Datum]:
"""
.. versionadded:: 2.2.0
Returns
-------
Datum
"""
return self._crs.datum
@property
def coordinate_system(self) -> Optional[CoordinateSystem]:
"""
.. versionadded:: 2.2.0
Returns
-------
CoordinateSystem
"""
return self._crs.coordinate_system
@property
def coordinate_operation(self) -> Optional[CoordinateOperation]:
"""
.. versionadded:: 2.2.0
Returns
-------
CoordinateOperation
"""
return self._crs.coordinate_operation
@property
def remarks(self) -> str:
"""
.. versionadded:: 2.4.0
Returns
-------
str:
Remarks about object.
"""
return self._crs.remarks
@property
def scope(self) -> str:
"""
.. versionadded:: 2.4.0
Returns
-------
str:
Scope of object.
"""
return self._crs.scope
def to_wkt(
self,
version: Union[WktVersion, str] = WktVersion.WKT2_2019,
pretty: bool = False,
output_axis_rule: Optional[bool] = None,
) -> str:
"""
Convert the projection to a WKT string.
Version options:
- WKT2_2015
- WKT2_2015_SIMPLIFIED
- WKT2_2019
- WKT2_2019_SIMPLIFIED
- WKT1_GDAL
- WKT1_ESRI
.. versionadded:: 3.6.0 output_axis_rule
Parameters
----------
version: pyproj.enums.WktVersion, optional
The version of the WKT output.
Default is :attr:`pyproj.enums.WktVersion.WKT2_2019`.
pretty: bool, default=False
If True, it will set the output to be a multiline string.
output_axis_rule: bool, optional, default=None
If True, it will set the axis rule on any case. If false, never.
None for AUTO, that depends on the CRS and version.
Returns
-------
str
"""
wkt = self._crs.to_wkt(
version=version, pretty=pretty, output_axis_rule=output_axis_rule
)
if wkt is None:
raise CRSError(
f"CRS cannot be converted to a WKT string of a '{version}' version. "
"Select a different version of a WKT string or edit your CRS."
)
return wkt
def to_json(self, pretty: bool = False, indentation: int = 2) -> str:
"""
.. versionadded:: 2.4.0
Convert the object to a JSON string.
Parameters
----------
pretty: bool, default=False
If True, it will set the output to be a multiline string.
indentation: int, default=2
If pretty is True, it will set the width of the indentation.
Returns
-------
str
"""
proj_json = self._crs.to_json(pretty=pretty, indentation=indentation)
if proj_json is None:
raise CRSError("CRS cannot be converted to a PROJ JSON string.")
return proj_json
def to_json_dict(self) -> dict:
"""
.. versionadded:: 2.4.0
Convert the object to a JSON dictionary.
Returns
-------
dict
"""
return self._crs.to_json_dict()
def to_proj4(self, version: Union[ProjVersion, int] = ProjVersion.PROJ_5) -> str:
"""
Convert the projection to a PROJ string.
.. warning:: You will likely lose important projection
information when converting to a PROJ string from
another format. See:
https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems # noqa: E501
Parameters
----------
version: pyproj.enums.ProjVersion
The version of the PROJ string output.
Default is :attr:`pyproj.enums.ProjVersion.PROJ_4`.
Returns
-------
str
"""
proj = self._crs.to_proj4(version=version)
if proj is None:
raise CRSError("CRS cannot be converted to a PROJ string.")
return proj
def to_epsg(self, min_confidence: int = 70) -> Optional[int]:
"""
Return the EPSG code best matching the CRS
or None if it a match is not found.
Example:
>>> from pyproj import CRS
>>> ccs = CRS("EPSG:4328")
>>> ccs.to_epsg()
4328
If the CRS is bound, you can attempt to get an epsg code from
the source CRS:
>>> from pyproj import CRS
>>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0")
>>> ccs.to_epsg()
>>> ccs.source_crs.to_epsg()
4978
>>> ccs == CRS.from_epsg(4978)
False
Parameters
----------
min_confidence: int, default=70
A value between 0-100 where 100 is the most confident.
:ref:`min_confidence`
Returns
-------
Optional[int]:
The best matching EPSG code matching the confidence level.
"""
return self._crs.to_epsg(min_confidence=min_confidence)
def to_authority(self, auth_name: Optional[str] = None, min_confidence: int = 70):
"""
.. versionadded:: 2.2.0
Return the authority name and code best matching the CRS
or None if it a match is not found.
Example:
>>> from pyproj import CRS
>>> ccs = CRS("EPSG:4328")
>>> ccs.to_authority()
('EPSG', '4328')
If the CRS is bound, you can get an authority from
the source CRS:
>>> from pyproj import CRS
>>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0")
>>> ccs.to_authority()
>>> ccs.source_crs.to_authority()
('EPSG', '4978')
>>> ccs == CRS.from_authorty('EPSG', '4978')
False
Parameters
----------
auth_name: str, optional
The name of the authority to filter by.
min_confidence: int, default=70
A value between 0-100 where 100 is the most confident.
:ref:`min_confidence`
Returns
-------
tuple(str, str) or None:
The best matching (<auth_name>, <code>) for the confidence level.
"""
return self._crs.to_authority(
auth_name=auth_name, min_confidence=min_confidence
)
def list_authority(
self, auth_name: Optional[str] = None, min_confidence: int = 70
) -> list[AuthorityMatchInfo]:
"""
.. versionadded:: 3.2.0
Return the authority names and codes best matching the CRS.
Example:
>>> from pyproj import CRS
>>> ccs = CRS("EPSG:4328")
>>> ccs.list_authority()
[AuthorityMatchInfo(auth_name='EPSG', code='4326', confidence=100)]
If the CRS is bound, you can get an authority from
the source CRS:
>>> from pyproj import CRS
>>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0")
>>> ccs.list_authority()
[]
>>> ccs.source_crs.list_authority()
[AuthorityMatchInfo(auth_name='EPSG', code='4978', confidence=70)]
>>> ccs == CRS.from_authorty('EPSG', '4978')
False
Parameters
----------
auth_name: str, optional
The name of the authority to filter by.
min_confidence: int, default=70
A value between 0-100 where 100 is the most confident.
:ref:`min_confidence`
Returns
-------
list[AuthorityMatchInfo]:
List of authority matches for the CRS.
"""
return self._crs.list_authority(
auth_name=auth_name, min_confidence=min_confidence
)
def to_3d(self, name: Optional[str] = None) -> "CRS":
"""
.. versionadded:: 3.1.0
Convert the current CRS to the 3D version if it makes sense.
New vertical axis attributes:
- ellipsoidal height
- oriented upwards
- metre units
Parameters
----------
name: str, optional
CRS name. Defaults to use the name of the original CRS.
Returns
-------
CRS
"""
return self.__class__(self._crs.to_3d(name=name))
def to_2d(self, name: Optional[str] = None) -> "CRS":
"""
.. versionadded:: 3.6.0
Convert the current CRS to the 2D version if it makes sense.
Parameters
----------
name: str, optional
CRS name. Defaults to use the name of the original CRS.
Returns
-------
CRS
"""
return self.__class__(self._crs.to_2d(name=name))
@property
def is_geographic(self) -> bool:
"""
This checks if the CRS is geographic.
It will check if it has a geographic CRS
in the sub CRS if it is a compound CRS and will check if
the source CRS is geographic if it is a bound CRS.
Returns
-------
bool:
True if the CRS is in geographic (lon/lat) coordinates.
"""
return self._crs.is_geographic
@property
def is_projected(self) -> bool:
"""
This checks if the CRS is projected.
It will check if it has a projected CRS
in the sub CRS if it is a compound CRS and will check if
the source CRS is projected if it is a bound CRS.
Returns
-------
bool:
True if CRS is projected.
"""
return self._crs.is_projected
@property
def is_vertical(self) -> bool:
"""
.. versionadded:: 2.2.0
This checks if the CRS is vertical.
It will check if it has a vertical CRS
in the sub CRS if it is a compound CRS and will check if
the source CRS is vertical if it is a bound CRS.
Returns
-------
bool:
True if CRS is vertical.
"""
return self._crs.is_vertical
@property
def is_bound(self) -> bool:
"""
Returns
-------
bool:
True if CRS is bound.
"""
return self._crs.is_bound
@property
def is_compound(self) -> bool:
"""
.. versionadded:: 3.1.0
Returns
-------
bool:
True if CRS is compound.
"""
return self._crs.is_compound
@property
def is_engineering(self) -> bool:
"""
.. versionadded:: 2.2.0
Returns
-------
bool:
True if CRS is local/engineering.
"""
return self._crs.is_engineering
@property
def is_geocentric(self) -> bool:
"""
This checks if the CRS is geocentric and
takes into account if the CRS is bound.
Returns
-------
bool:
True if CRS is in geocentric (x/y) coordinates.
"""
return self._crs.is_geocentric
@property
def is_derived(self):
"""
.. versionadded:: 3.2.0
Returns
-------
bool:
True if CRS is a Derived CRS.
"""
return self._crs.is_derived
def __eq__(self, other: Any) -> bool:
return self.equals(other)
def __getstate__(self) -> dict[str, str]:
return {"srs": self.srs}
def __setstate__(self, state: dict[str, Any]):
self.__dict__.update(state)
self._local = CRSLocal()
def __hash__(self) -> int:
return hash(self.to_wkt())
def __str__(self) -> str:
return self.srs
def __repr__(self) -> str:
# get axis information
axis_info_list: list[str] = []
for axis in self.axis_info:
axis_info_list.extend(["- ", str(axis), "\n"])
axis_info_str = "".join(axis_info_list)
# get coordinate system & sub CRS info
source_crs_repr = ""
sub_crs_repr = ""
if self.coordinate_system and self.coordinate_system.axis_list:
coordinate_system_name = str(self.coordinate_system)
elif self.is_bound and self.source_crs:
coordinate_system_name = str(self.source_crs.coordinate_system)
source_crs_repr = f"Source CRS: {self.source_crs.name}\n"
else:
coordinate_system_names = []
sub_crs_repr_list = ["Sub CRS:\n"]
for sub_crs in self.sub_crs_list:
coordinate_system_names.append(str(sub_crs.coordinate_system))
sub_crs_repr_list.extend(["- ", sub_crs.name, "\n"])
coordinate_system_name = "|".join(coordinate_system_names)
sub_crs_repr = "".join(sub_crs_repr_list)
# get coordinate operation repr
coordinate_operation = ""
if self.coordinate_operation:
coordinate_operation = "".join(
[
"Coordinate Operation:\n",
"- name: ",
str(self.coordinate_operation),
"\n- method: ",
self.coordinate_operation.method_name,
"\n",
]
)
# get SRS representation
srs_repr = self.to_string()
srs_repr = srs_repr if len(srs_repr) <= 50 else " ".join([srs_repr[:50], "..."])
axis_info_str = axis_info_str or "- undefined\n"
return (
f"<{self.type_name}: {srs_repr}>\n"
f"Name: {self.name}\n"
f"Axis Info [{coordinate_system_name or 'undefined'}]:\n"
f"{axis_info_str}"
"Area of Use:\n"
f"{self.area_of_use or '- undefined'}\n"
f"{coordinate_operation}"
f"Datum: {self.datum}\n"
f"- Ellipsoid: {self.ellipsoid or 'undefined'}\n"
f"- Prime Meridian: {self.prime_meridian or 'undefined'}\n"
f"{source_crs_repr}"
f"{sub_crs_repr}"
)
| (projparams: Optional[Any] = None, **kwargs) -> None |
22,640 | pyproj.crs.crs | __eq__ | null | def __eq__(self, other: Any) -> bool:
return self.equals(other)
| (self, other: Any) -> bool |
22,641 | pyproj.crs.crs | __getstate__ | null | def __getstate__(self) -> dict[str, str]:
return {"srs": self.srs}
| (self) -> dict[str, str] |
22,642 | pyproj.crs.crs | __hash__ | null | def __hash__(self) -> int:
return hash(self.to_wkt())
| (self) -> int |
22,643 | pyproj.crs.crs | __init__ |
Initialize a CRS class instance with:
- PROJ string
- Dictionary of PROJ parameters
- PROJ keyword arguments for parameters
- JSON string with PROJ parameters
- CRS WKT string
- An authority string [i.e. 'epsg:4326']
- An EPSG integer code [i.e. 4326]
- A tuple of ("auth_name": "auth_code") [i.e ('epsg', '4326')]
- An object with a `to_wkt` method.
- A :class:`pyproj.crs.CRS` class
Example usage:
>>> from pyproj import CRS
>>> crs_utm = CRS.from_user_input(26915)
>>> crs_utm
<Projected CRS: EPSG:26915>
Name: NAD83 / UTM zone 15N
Axis Info [cartesian]:
- E[east]: Easting (metre)
- N[north]: Northing (metre)
Area of Use:
- name: North America - 96°W to 90°W and NAD83 by country
- bounds: (-96.0, 25.61, -90.0, 84.0)
Coordinate Operation:
- name: UTM zone 15N
- method: Transverse Mercator
Datum: North American Datum 1983
- Ellipsoid: GRS 1980
- Prime Meridian: Greenwich
<BLANKLINE>
>>> crs_utm.area_of_use.bounds
(-96.0, 25.61, -90.0, 84.0)
>>> crs_utm.ellipsoid
ELLIPSOID["GRS 1980",6378137,298.257222101,
LENGTHUNIT["metre",1],
ID["EPSG",7019]]
>>> crs_utm.ellipsoid.inverse_flattening
298.257222101
>>> crs_utm.ellipsoid.semi_major_metre
6378137.0
>>> crs_utm.ellipsoid.semi_minor_metre
6356752.314140356
>>> crs_utm.prime_meridian
PRIMEM["Greenwich",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8901]]
>>> crs_utm.prime_meridian.unit_name
'degree'
>>> crs_utm.prime_meridian.unit_conversion_factor
0.017453292519943295
>>> crs_utm.prime_meridian.longitude
0.0
>>> crs_utm.datum
DATUM["North American Datum 1983",
ELLIPSOID["GRS 1980",6378137,298.257222101,
LENGTHUNIT["metre",1]],
ID["EPSG",6269]]
>>> crs_utm.coordinate_system
CS[Cartesian,2],
AXIS["(E)",east,
ORDER[1],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]],
AXIS["(N)",north,
ORDER[2],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]]
>>> print(crs_utm.coordinate_operation.to_wkt(pretty=True))
CONVERSION["UTM zone 15N",
METHOD["Transverse Mercator",
ID["EPSG",9807]],
PARAMETER["Latitude of natural origin",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8801]],
PARAMETER["Longitude of natural origin",-93,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8802]],
PARAMETER["Scale factor at natural origin",0.9996,
SCALEUNIT["unity",1],
ID["EPSG",8805]],
PARAMETER["False easting",500000,
LENGTHUNIT["metre",1],
ID["EPSG",8806]],
PARAMETER["False northing",0,
LENGTHUNIT["metre",1],
ID["EPSG",8807]],
ID["EPSG",16015]]
>>> crs = CRS(proj='utm', zone=10, ellps='WGS84')
>>> print(crs.to_wkt(pretty=True))
PROJCRS["unknown",
BASEGEOGCRS["unknown",
DATUM["Unknown based on WGS84 ellipsoid",
ELLIPSOID["WGS 84",6378137,298.257223563,
LENGTHUNIT["metre",1],
ID["EPSG",7030]]],
PRIMEM["Greenwich",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8901]]],
CONVERSION["UTM zone 10N",
METHOD["Transverse Mercator",
ID["EPSG",9807]],
PARAMETER["Latitude of natural origin",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8801]],
PARAMETER["Longitude of natural origin",-123,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8802]],
PARAMETER["Scale factor at natural origin",0.9996,
SCALEUNIT["unity",1],
ID["EPSG",8805]],
PARAMETER["False easting",500000,
LENGTHUNIT["metre",1],
ID["EPSG",8806]],
PARAMETER["False northing",0,
LENGTHUNIT["metre",1],
ID["EPSG",8807]],
ID["EPSG",16010]],
CS[Cartesian,2],
AXIS["(E)",east,
ORDER[1],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]],
AXIS["(N)",north,
ORDER[2],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]]]
>>> geod = crs.get_geod()
>>> f"+a={geod.a:.0f} +f={geod.f:.8f}"
'+a=6378137 +f=0.00335281'
>>> crs.is_projected
True
>>> crs.is_geographic
False
| def __init__(self, projparams: Optional[Any] = None, **kwargs) -> None:
"""
Initialize a CRS class instance with:
- PROJ string
- Dictionary of PROJ parameters
- PROJ keyword arguments for parameters
- JSON string with PROJ parameters
- CRS WKT string
- An authority string [i.e. 'epsg:4326']
- An EPSG integer code [i.e. 4326]
- A tuple of ("auth_name": "auth_code") [i.e ('epsg', '4326')]
- An object with a `to_wkt` method.
- A :class:`pyproj.crs.CRS` class
Example usage:
>>> from pyproj import CRS
>>> crs_utm = CRS.from_user_input(26915)
>>> crs_utm
<Projected CRS: EPSG:26915>
Name: NAD83 / UTM zone 15N
Axis Info [cartesian]:
- E[east]: Easting (metre)
- N[north]: Northing (metre)
Area of Use:
- name: North America - 96°W to 90°W and NAD83 by country
- bounds: (-96.0, 25.61, -90.0, 84.0)
Coordinate Operation:
- name: UTM zone 15N
- method: Transverse Mercator
Datum: North American Datum 1983
- Ellipsoid: GRS 1980
- Prime Meridian: Greenwich
<BLANKLINE>
>>> crs_utm.area_of_use.bounds
(-96.0, 25.61, -90.0, 84.0)
>>> crs_utm.ellipsoid
ELLIPSOID["GRS 1980",6378137,298.257222101,
LENGTHUNIT["metre",1],
ID["EPSG",7019]]
>>> crs_utm.ellipsoid.inverse_flattening
298.257222101
>>> crs_utm.ellipsoid.semi_major_metre
6378137.0
>>> crs_utm.ellipsoid.semi_minor_metre
6356752.314140356
>>> crs_utm.prime_meridian
PRIMEM["Greenwich",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8901]]
>>> crs_utm.prime_meridian.unit_name
'degree'
>>> crs_utm.prime_meridian.unit_conversion_factor
0.017453292519943295
>>> crs_utm.prime_meridian.longitude
0.0
>>> crs_utm.datum
DATUM["North American Datum 1983",
ELLIPSOID["GRS 1980",6378137,298.257222101,
LENGTHUNIT["metre",1]],
ID["EPSG",6269]]
>>> crs_utm.coordinate_system
CS[Cartesian,2],
AXIS["(E)",east,
ORDER[1],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]],
AXIS["(N)",north,
ORDER[2],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]]
>>> print(crs_utm.coordinate_operation.to_wkt(pretty=True))
CONVERSION["UTM zone 15N",
METHOD["Transverse Mercator",
ID["EPSG",9807]],
PARAMETER["Latitude of natural origin",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8801]],
PARAMETER["Longitude of natural origin",-93,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8802]],
PARAMETER["Scale factor at natural origin",0.9996,
SCALEUNIT["unity",1],
ID["EPSG",8805]],
PARAMETER["False easting",500000,
LENGTHUNIT["metre",1],
ID["EPSG",8806]],
PARAMETER["False northing",0,
LENGTHUNIT["metre",1],
ID["EPSG",8807]],
ID["EPSG",16015]]
>>> crs = CRS(proj='utm', zone=10, ellps='WGS84')
>>> print(crs.to_wkt(pretty=True))
PROJCRS["unknown",
BASEGEOGCRS["unknown",
DATUM["Unknown based on WGS84 ellipsoid",
ELLIPSOID["WGS 84",6378137,298.257223563,
LENGTHUNIT["metre",1],
ID["EPSG",7030]]],
PRIMEM["Greenwich",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8901]]],
CONVERSION["UTM zone 10N",
METHOD["Transverse Mercator",
ID["EPSG",9807]],
PARAMETER["Latitude of natural origin",0,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8801]],
PARAMETER["Longitude of natural origin",-123,
ANGLEUNIT["degree",0.0174532925199433],
ID["EPSG",8802]],
PARAMETER["Scale factor at natural origin",0.9996,
SCALEUNIT["unity",1],
ID["EPSG",8805]],
PARAMETER["False easting",500000,
LENGTHUNIT["metre",1],
ID["EPSG",8806]],
PARAMETER["False northing",0,
LENGTHUNIT["metre",1],
ID["EPSG",8807]],
ID["EPSG",16010]],
CS[Cartesian,2],
AXIS["(E)",east,
ORDER[1],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]],
AXIS["(N)",north,
ORDER[2],
LENGTHUNIT["metre",1,
ID["EPSG",9001]]]]
>>> geod = crs.get_geod()
>>> f"+a={geod.a:.0f} +f={geod.f:.8f}"
'+a=6378137 +f=0.00335281'
>>> crs.is_projected
True
>>> crs.is_geographic
False
"""
projstring = ""
if projparams:
if isinstance(projparams, _CRS):
projstring = projparams.srs
elif _is_epsg_code(projparams):
projstring = _prepare_from_epsg(projparams)
elif isinstance(projparams, str):
projstring = _prepare_from_string(projparams)
elif isinstance(projparams, dict):
projstring = _prepare_from_dict(projparams)
elif isinstance(projparams, (list, tuple)) and len(projparams) == 2:
projstring = _prepare_from_authority(*projparams)
elif hasattr(projparams, "to_wkt"):
projstring = projparams.to_wkt() # type: ignore
else:
raise CRSError(f"Invalid CRS input: {projparams!r}")
if kwargs:
projkwargs = _prepare_from_dict(kwargs, allow_json=False)
projstring = _prepare_from_string(" ".join((projstring, projkwargs)))
self.srs = projstring
self._local = CRSLocal()
if isinstance(projparams, _CRS):
self._local.crs = projparams
else:
self._local.crs = _CRS(self.srs)
| (self, projparams: Optional[Any] = None, **kwargs) -> NoneType |
22,644 | pyproj.crs.crs | __repr__ | null | def __repr__(self) -> str:
# get axis information
axis_info_list: list[str] = []
for axis in self.axis_info:
axis_info_list.extend(["- ", str(axis), "\n"])
axis_info_str = "".join(axis_info_list)
# get coordinate system & sub CRS info
source_crs_repr = ""
sub_crs_repr = ""
if self.coordinate_system and self.coordinate_system.axis_list:
coordinate_system_name = str(self.coordinate_system)
elif self.is_bound and self.source_crs:
coordinate_system_name = str(self.source_crs.coordinate_system)
source_crs_repr = f"Source CRS: {self.source_crs.name}\n"
else:
coordinate_system_names = []
sub_crs_repr_list = ["Sub CRS:\n"]
for sub_crs in self.sub_crs_list:
coordinate_system_names.append(str(sub_crs.coordinate_system))
sub_crs_repr_list.extend(["- ", sub_crs.name, "\n"])
coordinate_system_name = "|".join(coordinate_system_names)
sub_crs_repr = "".join(sub_crs_repr_list)
# get coordinate operation repr
coordinate_operation = ""
if self.coordinate_operation:
coordinate_operation = "".join(
[
"Coordinate Operation:\n",
"- name: ",
str(self.coordinate_operation),
"\n- method: ",
self.coordinate_operation.method_name,
"\n",
]
)
# get SRS representation
srs_repr = self.to_string()
srs_repr = srs_repr if len(srs_repr) <= 50 else " ".join([srs_repr[:50], "..."])
axis_info_str = axis_info_str or "- undefined\n"
return (
f"<{self.type_name}: {srs_repr}>\n"
f"Name: {self.name}\n"
f"Axis Info [{coordinate_system_name or 'undefined'}]:\n"
f"{axis_info_str}"
"Area of Use:\n"
f"{self.area_of_use or '- undefined'}\n"
f"{coordinate_operation}"
f"Datum: {self.datum}\n"
f"- Ellipsoid: {self.ellipsoid or 'undefined'}\n"
f"- Prime Meridian: {self.prime_meridian or 'undefined'}\n"
f"{source_crs_repr}"
f"{sub_crs_repr}"
)
| (self) -> str |
22,645 | pyproj.crs.crs | __setstate__ | null | def __setstate__(self, state: dict[str, Any]):
self.__dict__.update(state)
self._local = CRSLocal()
| (self, state: dict[str, typing.Any]) |
22,646 | pyproj.crs.crs | __str__ | null | def __str__(self) -> str:
return self.srs
| (self) -> str |
22,647 | pyproj.crs.crs | cs_to_cf |
.. versionadded:: 3.0.0
This converts all coordinate systems (cs) in the CRS
to a list of Climate and Forecast (CF) Version 1.8 dicts.
:ref:`build_crs_cf`
Returns
-------
list[dict]:
CF-1.8 version of the coordinate systems.
| def cs_to_cf(self) -> list[dict]:
"""
.. versionadded:: 3.0.0
This converts all coordinate systems (cs) in the CRS
to a list of Climate and Forecast (CF) Version 1.8 dicts.
:ref:`build_crs_cf`
Returns
-------
list[dict]:
CF-1.8 version of the coordinate systems.
"""
cf_axis_list = []
def rotated_pole(crs):
try:
return (
crs.coordinate_operation
and crs.coordinate_operation.method_name.lower()
in _INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP
)
except KeyError:
return False
if self.type_name == "Temporal CRS" and self.datum:
datum_json = self.datum.to_json_dict()
origin = datum_json.get("time_origin", "1875-05-20").strip().rstrip("zZ")
if len(origin) == 4:
origin = f"{origin}-01-01"
axis = self.axis_info[0]
cf_temporal_axis = {
"standard_name": "time",
"long_name": "time",
"calendar": (
datum_json.get("calendar", "proleptic_gregorian")
.lower()
.replace(" ", "_")
),
"axis": "T",
}
unit_name = axis.unit_name.lower().replace("calendar", "").strip()
# no units for TemporalDateTime
if unit_name:
cf_temporal_axis["units"] = f"{unit_name} since {origin}"
cf_axis_list.append(cf_temporal_axis)
if self.coordinate_system:
cf_axis_list.extend(
self.coordinate_system.to_cf(rotated_pole=rotated_pole(self))
)
elif self.is_bound and self.source_crs and self.source_crs.coordinate_system:
cf_axis_list.extend(
self.source_crs.coordinate_system.to_cf(
rotated_pole=rotated_pole(self.source_crs)
)
)
else:
for sub_crs in self.sub_crs_list:
cf_axis_list.extend(sub_crs.cs_to_cf())
return cf_axis_list
| (self) -> list[dict] |
22,648 | pyproj.crs.crs | equals |
.. versionadded:: 2.5.0
Check if the CRS objects are equivalent.
Parameters
----------
other: Any
Check if the other object is equivalent to this object.
If the other object is not a CRS, it will try to create one.
On Failure, it will return False.
ignore_axis_order: bool, default=False
If True, it will compare the CRS class and ignore the axis order.
Returns
-------
bool
| def equals(self, other: Any, ignore_axis_order: bool = False) -> bool:
"""
.. versionadded:: 2.5.0
Check if the CRS objects are equivalent.
Parameters
----------
other: Any
Check if the other object is equivalent to this object.
If the other object is not a CRS, it will try to create one.
On Failure, it will return False.
ignore_axis_order: bool, default=False
If True, it will compare the CRS class and ignore the axis order.
Returns
-------
bool
"""
try:
other = CRS.from_user_input(other)
except CRSError:
return False
return self._crs.equals(other._crs, ignore_axis_order=ignore_axis_order)
| (self, other: Any, ignore_axis_order: bool = False) -> bool |
22,649 | pyproj.crs.crs | from_cf |
.. versionadded:: 2.2.0
.. versionadded:: 3.0.0 ellipsoidal_cs, cartesian_cs, vertical_cs
This converts a Climate and Forecast (CF) Grid Mapping Version 1.8
dict to a :obj:`pyproj.crs.CRS` object.
:ref:`build_crs_cf`
Parameters
----------
in_cf: dict
CF version of the projection.
ellipsoidal_cs: Any, optional
Input to create an Ellipsoidal Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`.
cartesian_cs: Any, optional
Input to create a Cartesian Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.Cartesian2DCS`.
vertical_cs: Any, optional
Input to create a Vertical Coordinate System accepted by
:meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.VerticalCS`
Returns
-------
CRS
| @staticmethod
def from_cf(
in_cf: dict,
ellipsoidal_cs: Optional[Any] = None,
cartesian_cs: Optional[Any] = None,
vertical_cs: Optional[Any] = None,
) -> "CRS":
"""
.. versionadded:: 2.2.0
.. versionadded:: 3.0.0 ellipsoidal_cs, cartesian_cs, vertical_cs
This converts a Climate and Forecast (CF) Grid Mapping Version 1.8
dict to a :obj:`pyproj.crs.CRS` object.
:ref:`build_crs_cf`
Parameters
----------
in_cf: dict
CF version of the projection.
ellipsoidal_cs: Any, optional
Input to create an Ellipsoidal Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or an Ellipsoidal Coordinate System created from :ref:`coordinate_system`.
cartesian_cs: Any, optional
Input to create a Cartesian Coordinate System.
Anything accepted by :meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.Cartesian2DCS`.
vertical_cs: Any, optional
Input to create a Vertical Coordinate System accepted by
:meth:`pyproj.crs.CoordinateSystem.from_user_input`
or :class:`pyproj.crs.coordinate_system.VerticalCS`
Returns
-------
CRS
"""
# pylint: disable=too-many-branches
unknown_names = ("unknown", "undefined")
if "crs_wkt" in in_cf:
return CRS(in_cf["crs_wkt"])
if "spatial_ref" in in_cf: # for previous supported WKT key
return CRS(in_cf["spatial_ref"])
grid_mapping_name = in_cf.get("grid_mapping_name")
if grid_mapping_name is None:
raise CRSError("CF projection parameters missing 'grid_mapping_name'")
# build datum if possible
datum = _horizontal_datum_from_params(in_cf)
# build geographic CRS
try:
geographic_conversion_method: Optional[
Callable
] = _GEOGRAPHIC_GRID_MAPPING_NAME_MAP[grid_mapping_name]
except KeyError:
geographic_conversion_method = None
geographic_crs_name = in_cf.get("geographic_crs_name")
if datum:
geographic_crs: CRS = GeographicCRS(
name=geographic_crs_name or "undefined",
datum=datum,
ellipsoidal_cs=ellipsoidal_cs,
)
elif geographic_crs_name and geographic_crs_name not in unknown_names:
geographic_crs = CRS(geographic_crs_name)
if ellipsoidal_cs is not None:
geographic_crs_json = geographic_crs.to_json_dict()
geographic_crs_json[
"coordinate_system"
] = CoordinateSystem.from_user_input(ellipsoidal_cs).to_json_dict()
geographic_crs = CRS(geographic_crs_json)
else:
geographic_crs = GeographicCRS(ellipsoidal_cs=ellipsoidal_cs)
if grid_mapping_name == "latitude_longitude":
return geographic_crs
if geographic_conversion_method is not None:
return DerivedGeographicCRS(
base_crs=geographic_crs,
conversion=geographic_conversion_method(in_cf),
ellipsoidal_cs=ellipsoidal_cs,
)
# build projected CRS
try:
conversion_method = _GRID_MAPPING_NAME_MAP[grid_mapping_name]
except KeyError:
raise CRSError(
f"Unsupported grid mapping name: {grid_mapping_name}"
) from None
projected_crs = ProjectedCRS(
name=in_cf.get("projected_crs_name", "undefined"),
conversion=conversion_method(in_cf),
geodetic_crs=geographic_crs,
cartesian_cs=cartesian_cs,
)
# build bound CRS if exists
bound_crs = None
if "towgs84" in in_cf:
bound_crs = BoundCRS(
source_crs=projected_crs,
target_crs="WGS 84",
transformation=ToWGS84Transformation(
projected_crs.geodetic_crs, *_try_list_if_string(in_cf["towgs84"])
),
)
if "geopotential_datum_name" not in in_cf:
return bound_crs or projected_crs
# build Vertical CRS
vertical_crs = VerticalCRS(
name="undefined",
datum=in_cf["geopotential_datum_name"],
geoid_model=in_cf.get("geoid_name"),
vertical_cs=vertical_cs,
)
# build compound CRS
return CompoundCRS(
name="undefined", components=[bound_crs or projected_crs, vertical_crs]
)
| (in_cf: dict, ellipsoidal_cs: Optional[Any] = None, cartesian_cs: Optional[Any] = None, vertical_cs: Optional[Any] = None) -> pyproj.crs.crs.CRS |
22,650 | pyproj.crs.crs | get_geod |
Returns
-------
pyproj.geod.Geod:
Geod object based on the ellipsoid.
| def get_geod(self) -> Optional[Geod]:
"""
Returns
-------
pyproj.geod.Geod:
Geod object based on the ellipsoid.
"""
if self.ellipsoid is None:
return None
return Geod(
a=self.ellipsoid.semi_major_metre,
rf=self.ellipsoid.inverse_flattening,
b=self.ellipsoid.semi_minor_metre,
)
| (self) -> Optional[pyproj.geod.Geod] |
22,651 | pyproj.crs.crs | is_exact_same |
Check if the CRS objects are the exact same.
Parameters
----------
other: Any
Check if the other CRS is the exact same to this object.
If the other object is not a CRS, it will try to create one.
On Failure, it will return False.
Returns
-------
bool
| def is_exact_same(self, other: Any) -> bool:
"""
Check if the CRS objects are the exact same.
Parameters
----------
other: Any
Check if the other CRS is the exact same to this object.
If the other object is not a CRS, it will try to create one.
On Failure, it will return False.
Returns
-------
bool
"""
try:
other = CRS.from_user_input(other)
except CRSError:
return False
return self._crs.is_exact_same(other._crs)
| (self, other: Any) -> bool |
22,652 | pyproj.crs.crs | list_authority |
.. versionadded:: 3.2.0
Return the authority names and codes best matching the CRS.
Example:
>>> from pyproj import CRS
>>> ccs = CRS("EPSG:4328")
>>> ccs.list_authority()
[AuthorityMatchInfo(auth_name='EPSG', code='4326', confidence=100)]
If the CRS is bound, you can get an authority from
the source CRS:
>>> from pyproj import CRS
>>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0")
>>> ccs.list_authority()
[]
>>> ccs.source_crs.list_authority()
[AuthorityMatchInfo(auth_name='EPSG', code='4978', confidence=70)]
>>> ccs == CRS.from_authorty('EPSG', '4978')
False
Parameters
----------
auth_name: str, optional
The name of the authority to filter by.
min_confidence: int, default=70
A value between 0-100 where 100 is the most confident.
:ref:`min_confidence`
Returns
-------
list[AuthorityMatchInfo]:
List of authority matches for the CRS.
| def list_authority(
self, auth_name: Optional[str] = None, min_confidence: int = 70
) -> list[AuthorityMatchInfo]:
"""
.. versionadded:: 3.2.0
Return the authority names and codes best matching the CRS.
Example:
>>> from pyproj import CRS
>>> ccs = CRS("EPSG:4328")
>>> ccs.list_authority()
[AuthorityMatchInfo(auth_name='EPSG', code='4326', confidence=100)]
If the CRS is bound, you can get an authority from
the source CRS:
>>> from pyproj import CRS
>>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0")
>>> ccs.list_authority()
[]
>>> ccs.source_crs.list_authority()
[AuthorityMatchInfo(auth_name='EPSG', code='4978', confidence=70)]
>>> ccs == CRS.from_authorty('EPSG', '4978')
False
Parameters
----------
auth_name: str, optional
The name of the authority to filter by.
min_confidence: int, default=70
A value between 0-100 where 100 is the most confident.
:ref:`min_confidence`
Returns
-------
list[AuthorityMatchInfo]:
List of authority matches for the CRS.
"""
return self._crs.list_authority(
auth_name=auth_name, min_confidence=min_confidence
)
| (self, auth_name: Optional[str] = None, min_confidence: int = 70) -> list[importlib._bootstrap.AuthorityMatchInfo] |
22,653 | pyproj.crs.crs | to_2d |
.. versionadded:: 3.6.0
Convert the current CRS to the 2D version if it makes sense.
Parameters
----------
name: str, optional
CRS name. Defaults to use the name of the original CRS.
Returns
-------
CRS
| def to_2d(self, name: Optional[str] = None) -> "CRS":
"""
.. versionadded:: 3.6.0
Convert the current CRS to the 2D version if it makes sense.
Parameters
----------
name: str, optional
CRS name. Defaults to use the name of the original CRS.
Returns
-------
CRS
"""
return self.__class__(self._crs.to_2d(name=name))
| (self, name: Optional[str] = None) -> pyproj.crs.crs.CRS |
22,654 | pyproj.crs.crs | to_3d |
.. versionadded:: 3.1.0
Convert the current CRS to the 3D version if it makes sense.
New vertical axis attributes:
- ellipsoidal height
- oriented upwards
- metre units
Parameters
----------
name: str, optional
CRS name. Defaults to use the name of the original CRS.
Returns
-------
CRS
| def to_3d(self, name: Optional[str] = None) -> "CRS":
"""
.. versionadded:: 3.1.0
Convert the current CRS to the 3D version if it makes sense.
New vertical axis attributes:
- ellipsoidal height
- oriented upwards
- metre units
Parameters
----------
name: str, optional
CRS name. Defaults to use the name of the original CRS.
Returns
-------
CRS
"""
return self.__class__(self._crs.to_3d(name=name))
| (self, name: Optional[str] = None) -> pyproj.crs.crs.CRS |
22,655 | pyproj.crs.crs | to_authority |
.. versionadded:: 2.2.0
Return the authority name and code best matching the CRS
or None if it a match is not found.
Example:
>>> from pyproj import CRS
>>> ccs = CRS("EPSG:4328")
>>> ccs.to_authority()
('EPSG', '4328')
If the CRS is bound, you can get an authority from
the source CRS:
>>> from pyproj import CRS
>>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0")
>>> ccs.to_authority()
>>> ccs.source_crs.to_authority()
('EPSG', '4978')
>>> ccs == CRS.from_authorty('EPSG', '4978')
False
Parameters
----------
auth_name: str, optional
The name of the authority to filter by.
min_confidence: int, default=70
A value between 0-100 where 100 is the most confident.
:ref:`min_confidence`
Returns
-------
tuple(str, str) or None:
The best matching (<auth_name>, <code>) for the confidence level.
| def to_authority(self, auth_name: Optional[str] = None, min_confidence: int = 70):
"""
.. versionadded:: 2.2.0
Return the authority name and code best matching the CRS
or None if it a match is not found.
Example:
>>> from pyproj import CRS
>>> ccs = CRS("EPSG:4328")
>>> ccs.to_authority()
('EPSG', '4328')
If the CRS is bound, you can get an authority from
the source CRS:
>>> from pyproj import CRS
>>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0")
>>> ccs.to_authority()
>>> ccs.source_crs.to_authority()
('EPSG', '4978')
>>> ccs == CRS.from_authorty('EPSG', '4978')
False
Parameters
----------
auth_name: str, optional
The name of the authority to filter by.
min_confidence: int, default=70
A value between 0-100 where 100 is the most confident.
:ref:`min_confidence`
Returns
-------
tuple(str, str) or None:
The best matching (<auth_name>, <code>) for the confidence level.
"""
return self._crs.to_authority(
auth_name=auth_name, min_confidence=min_confidence
)
| (self, auth_name: Optional[str] = None, min_confidence: int = 70) |
22,656 | pyproj.crs.crs | to_cf |
.. versionadded:: 2.2.0
This converts a :obj:`pyproj.crs.CRS` object
to a Climate and Forecast (CF) Grid Mapping Version 1.8 dict.
:ref:`build_crs_cf`
Parameters
----------
wkt_version: str or pyproj.enums.WktVersion
Version of WKT supported by CRS.to_wkt.
Default is :attr:`pyproj.enums.WktVersion.WKT2_2019`.
errcheck: bool, default=False
If True, will warn when parameters are ignored.
Returns
-------
dict:
CF-1.8 version of the projection.
| def to_cf(
self,
wkt_version: Union[WktVersion, str] = WktVersion.WKT2_2019,
errcheck: bool = False,
) -> dict:
"""
.. versionadded:: 2.2.0
This converts a :obj:`pyproj.crs.CRS` object
to a Climate and Forecast (CF) Grid Mapping Version 1.8 dict.
:ref:`build_crs_cf`
Parameters
----------
wkt_version: str or pyproj.enums.WktVersion
Version of WKT supported by CRS.to_wkt.
Default is :attr:`pyproj.enums.WktVersion.WKT2_2019`.
errcheck: bool, default=False
If True, will warn when parameters are ignored.
Returns
-------
dict:
CF-1.8 version of the projection.
"""
# pylint: disable=too-many-branches,too-many-return-statements
cf_dict: dict[str, Any] = {"crs_wkt": self.to_wkt(wkt_version)}
# handle bound CRS
if (
self.is_bound
and self.coordinate_operation
and self.coordinate_operation.towgs84
and self.source_crs
):
sub_cf: dict[str, Any] = self.source_crs.to_cf(
wkt_version=wkt_version,
errcheck=errcheck,
)
sub_cf.pop("crs_wkt")
cf_dict.update(sub_cf)
cf_dict["towgs84"] = self.coordinate_operation.towgs84
return cf_dict
# handle compound CRS
if self.is_compound:
for sub_crs in self.sub_crs_list:
sub_cf = sub_crs.to_cf(wkt_version=wkt_version, errcheck=errcheck)
sub_cf.pop("crs_wkt")
cf_dict.update(sub_cf)
return cf_dict
# handle vertical CRS
if self.is_vertical:
vert_json = self.to_json_dict()
if "geoid_model" in vert_json:
cf_dict["geoid_name"] = vert_json["geoid_model"]["name"]
if self.datum:
cf_dict["geopotential_datum_name"] = self.datum.name
return cf_dict
# write out datum parameters
if self.ellipsoid:
cf_dict.update(
semi_major_axis=self.ellipsoid.semi_major_metre,
semi_minor_axis=self.ellipsoid.semi_minor_metre,
inverse_flattening=self.ellipsoid.inverse_flattening,
)
cf_dict["reference_ellipsoid_name"] = self.ellipsoid.name
if self.prime_meridian:
cf_dict["longitude_of_prime_meridian"] = self.prime_meridian.longitude
cf_dict["prime_meridian_name"] = self.prime_meridian.name
# handle geographic CRS
if self.geodetic_crs:
cf_dict["geographic_crs_name"] = self.geodetic_crs.name
if self.geodetic_crs.datum:
cf_dict["horizontal_datum_name"] = self.geodetic_crs.datum.name
if self.is_geographic:
if self.coordinate_operation:
if (
self.coordinate_operation.method_name.lower()
not in _INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP
):
if errcheck:
warnings.warn(
"Unsupported coordinate operation: "
f"{self.coordinate_operation.method_name}"
)
return {"crs_wkt": cf_dict["crs_wkt"]}
cf_dict.update(
_INVERSE_GEOGRAPHIC_GRID_MAPPING_NAME_MAP[
self.coordinate_operation.method_name.lower()
](self.coordinate_operation)
)
else:
cf_dict["grid_mapping_name"] = "latitude_longitude"
return cf_dict
# handle projected CRS
coordinate_operation = None
if not self.is_bound and self.is_projected:
coordinate_operation = self.coordinate_operation
cf_dict["projected_crs_name"] = self.name
coordinate_operation_name = (
None
if not coordinate_operation
else coordinate_operation.method_name.lower().replace(" ", "_")
)
if coordinate_operation_name not in _INVERSE_GRID_MAPPING_NAME_MAP:
if errcheck:
if coordinate_operation:
warnings.warn(
"Unsupported coordinate operation: "
f"{coordinate_operation.method_name}"
)
else:
warnings.warn("Coordinate operation not found.")
return {"crs_wkt": cf_dict["crs_wkt"]}
cf_dict.update(
_INVERSE_GRID_MAPPING_NAME_MAP[coordinate_operation_name](
coordinate_operation
)
)
return cf_dict
| (self, wkt_version: Union[pyproj.enums.WktVersion, str] = <WktVersion.WKT2_2019: 'WKT2_2019'>, errcheck: bool = False) -> dict |
22,657 | pyproj.crs.crs | to_dict |
.. versionadded:: 2.2.0
Converts the CRS to dictionary of PROJ parameters.
.. warning:: You will likely lose important projection
information when converting to a PROJ string from
another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems # noqa: E501
Returns
-------
dict:
PROJ params in dict format.
| def to_dict(self) -> dict:
"""
.. versionadded:: 2.2.0
Converts the CRS to dictionary of PROJ parameters.
.. warning:: You will likely lose important projection
information when converting to a PROJ string from
another format. See: https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems # noqa: E501
Returns
-------
dict:
PROJ params in dict format.
"""
proj_string = self.to_proj4()
if proj_string is None:
return {}
def _parse(val):
if val.lower() == "true":
return True
if val.lower() == "false":
return False
try:
return int(val)
except ValueError:
pass
try:
return float(val)
except ValueError:
pass
return _try_list_if_string(val)
proj_dict = {}
for param in _RE_PROJ_PARAM.finditer(proj_string):
key, value = param.groups()
if value is not None:
value = _parse(value)
if value is not False:
proj_dict[key] = value
return proj_dict
| (self) -> dict |
22,658 | pyproj.crs.crs | to_epsg |
Return the EPSG code best matching the CRS
or None if it a match is not found.
Example:
>>> from pyproj import CRS
>>> ccs = CRS("EPSG:4328")
>>> ccs.to_epsg()
4328
If the CRS is bound, you can attempt to get an epsg code from
the source CRS:
>>> from pyproj import CRS
>>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0")
>>> ccs.to_epsg()
>>> ccs.source_crs.to_epsg()
4978
>>> ccs == CRS.from_epsg(4978)
False
Parameters
----------
min_confidence: int, default=70
A value between 0-100 where 100 is the most confident.
:ref:`min_confidence`
Returns
-------
Optional[int]:
The best matching EPSG code matching the confidence level.
| def to_epsg(self, min_confidence: int = 70) -> Optional[int]:
"""
Return the EPSG code best matching the CRS
or None if it a match is not found.
Example:
>>> from pyproj import CRS
>>> ccs = CRS("EPSG:4328")
>>> ccs.to_epsg()
4328
If the CRS is bound, you can attempt to get an epsg code from
the source CRS:
>>> from pyproj import CRS
>>> ccs = CRS("+proj=geocent +datum=WGS84 +towgs84=0,0,0")
>>> ccs.to_epsg()
>>> ccs.source_crs.to_epsg()
4978
>>> ccs == CRS.from_epsg(4978)
False
Parameters
----------
min_confidence: int, default=70
A value between 0-100 where 100 is the most confident.
:ref:`min_confidence`
Returns
-------
Optional[int]:
The best matching EPSG code matching the confidence level.
"""
return self._crs.to_epsg(min_confidence=min_confidence)
| (self, min_confidence: int = 70) -> Optional[int] |
22,659 | pyproj.crs.crs | to_json |
.. versionadded:: 2.4.0
Convert the object to a JSON string.
Parameters
----------
pretty: bool, default=False
If True, it will set the output to be a multiline string.
indentation: int, default=2
If pretty is True, it will set the width of the indentation.
Returns
-------
str
| def to_json(self, pretty: bool = False, indentation: int = 2) -> str:
"""
.. versionadded:: 2.4.0
Convert the object to a JSON string.
Parameters
----------
pretty: bool, default=False
If True, it will set the output to be a multiline string.
indentation: int, default=2
If pretty is True, it will set the width of the indentation.
Returns
-------
str
"""
proj_json = self._crs.to_json(pretty=pretty, indentation=indentation)
if proj_json is None:
raise CRSError("CRS cannot be converted to a PROJ JSON string.")
return proj_json
| (self, pretty: bool = False, indentation: int = 2) -> str |
22,660 | pyproj.crs.crs | to_json_dict |
.. versionadded:: 2.4.0
Convert the object to a JSON dictionary.
Returns
-------
dict
| def to_json_dict(self) -> dict:
"""
.. versionadded:: 2.4.0
Convert the object to a JSON dictionary.
Returns
-------
dict
"""
return self._crs.to_json_dict()
| (self) -> dict |
22,661 | pyproj.crs.crs | to_proj4 |
Convert the projection to a PROJ string.
.. warning:: You will likely lose important projection
information when converting to a PROJ string from
another format. See:
https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems # noqa: E501
Parameters
----------
version: pyproj.enums.ProjVersion
The version of the PROJ string output.
Default is :attr:`pyproj.enums.ProjVersion.PROJ_4`.
Returns
-------
str
| def to_proj4(self, version: Union[ProjVersion, int] = ProjVersion.PROJ_5) -> str:
"""
Convert the projection to a PROJ string.
.. warning:: You will likely lose important projection
information when converting to a PROJ string from
another format. See:
https://proj.org/faq.html#what-is-the-best-format-for-describing-coordinate-reference-systems # noqa: E501
Parameters
----------
version: pyproj.enums.ProjVersion
The version of the PROJ string output.
Default is :attr:`pyproj.enums.ProjVersion.PROJ_4`.
Returns
-------
str
"""
proj = self._crs.to_proj4(version=version)
if proj is None:
raise CRSError("CRS cannot be converted to a PROJ string.")
return proj
| (self, version: Union[pyproj.enums.ProjVersion, int] = <ProjVersion.PROJ_5: 5>) -> str |
22,662 | pyproj.crs.crs | to_string |
.. versionadded:: 2.2.0
Convert the CRS to a string.
It attempts to convert it to the authority string.
Otherwise, it uses the string format of the user
input to create the CRS.
Returns
-------
str
| def to_string(self) -> str:
"""
.. versionadded:: 2.2.0
Convert the CRS to a string.
It attempts to convert it to the authority string.
Otherwise, it uses the string format of the user
input to create the CRS.
Returns
-------
str
"""
auth_info = self.to_authority(min_confidence=100)
if auth_info:
return ":".join(auth_info)
return self.srs
| (self) -> str |
22,663 | pyproj.crs.crs | to_wkt |
Convert the projection to a WKT string.
Version options:
- WKT2_2015
- WKT2_2015_SIMPLIFIED
- WKT2_2019
- WKT2_2019_SIMPLIFIED
- WKT1_GDAL
- WKT1_ESRI
.. versionadded:: 3.6.0 output_axis_rule
Parameters
----------
version: pyproj.enums.WktVersion, optional
The version of the WKT output.
Default is :attr:`pyproj.enums.WktVersion.WKT2_2019`.
pretty: bool, default=False
If True, it will set the output to be a multiline string.
output_axis_rule: bool, optional, default=None
If True, it will set the axis rule on any case. If false, never.
None for AUTO, that depends on the CRS and version.
Returns
-------
str
| def to_wkt(
self,
version: Union[WktVersion, str] = WktVersion.WKT2_2019,
pretty: bool = False,
output_axis_rule: Optional[bool] = None,
) -> str:
"""
Convert the projection to a WKT string.
Version options:
- WKT2_2015
- WKT2_2015_SIMPLIFIED
- WKT2_2019
- WKT2_2019_SIMPLIFIED
- WKT1_GDAL
- WKT1_ESRI
.. versionadded:: 3.6.0 output_axis_rule
Parameters
----------
version: pyproj.enums.WktVersion, optional
The version of the WKT output.
Default is :attr:`pyproj.enums.WktVersion.WKT2_2019`.
pretty: bool, default=False
If True, it will set the output to be a multiline string.
output_axis_rule: bool, optional, default=None
If True, it will set the axis rule on any case. If false, never.
None for AUTO, that depends on the CRS and version.
Returns
-------
str
"""
wkt = self._crs.to_wkt(
version=version, pretty=pretty, output_axis_rule=output_axis_rule
)
if wkt is None:
raise CRSError(
f"CRS cannot be converted to a WKT string of a '{version}' version. "
"Select a different version of a WKT string or edit your CRS."
)
return wkt
| (self, version: Union[pyproj.enums.WktVersion, str] = <WktVersion.WKT2_2019: 'WKT2_2019'>, pretty: bool = False, output_axis_rule: Optional[bool] = None) -> str |
22,665 | salem.wrftools | geogrid_simulator | Emulates geogrid.exe, which is useful when defining new WRF domains.
Parameters
----------
fpath: str
path to a namelist.wps file
do_maps: bool
if you want the simulator to return you maps of the grids as well
map_kwargs: dict
kwargs to pass to salem.Map()
Returns
-------
(grids, maps) with:
- grids: a list of Grids corresponding to the domains
defined in the namelist
- maps: a list of maps corresponding to the grids (if do_maps==True)
| def geogrid_simulator(fpath, do_maps=True, map_kwargs=None):
"""Emulates geogrid.exe, which is useful when defining new WRF domains.
Parameters
----------
fpath: str
path to a namelist.wps file
do_maps: bool
if you want the simulator to return you maps of the grids as well
map_kwargs: dict
kwargs to pass to salem.Map()
Returns
-------
(grids, maps) with:
- grids: a list of Grids corresponding to the domains
defined in the namelist
- maps: a list of maps corresponding to the grids (if do_maps==True)
"""
with open(fpath) as f:
lines = f.readlines()
pargs = dict()
for l in lines:
s = l.split('=')
if len(s) < 2:
continue
s0 = s[0].strip().upper()
s1 = list(filter(None, s[1].strip().replace('\n', '').split(',')))
if s0 == 'PARENT_ID':
parent_id = [int(s) for s in s1]
if s0 == 'PARENT_GRID_RATIO':
parent_ratio = [int(s) for s in s1]
if s0 == 'I_PARENT_START':
i_parent_start = [int(s) for s in s1]
if s0 == 'J_PARENT_START':
j_parent_start = [int(s) for s in s1]
if s0 == 'E_WE':
e_we = [int(s) for s in s1]
if s0 == 'E_SN':
e_sn = [int(s) for s in s1]
if s0 == 'DX':
dx = float(s1[0])
if s0 == 'DY':
dy = float(s1[0])
if s0 == 'MAP_PROJ':
map_proj = s1[0].replace("'", '').strip().upper()
if s0 == 'REF_LAT':
pargs['lat_0'] = float(s1[0])
if s0 == 'REF_LON':
pargs['ref_lon'] = float(s1[0])
if s0 == 'TRUELAT1':
pargs['lat_1'] = float(s1[0])
if s0 == 'TRUELAT2':
pargs['lat_2'] = float(s1[0])
if s0 == 'STAND_LON':
pargs['lon_0'] = float(s1[0])
# Sometimes files are not complete
pargs.setdefault('lon_0', pargs['ref_lon'])
# define projection
if map_proj == 'LAMBERT':
pwrf = '+proj=lcc +lat_1={lat_1} +lat_2={lat_2} ' \
'+lat_0={lat_0} +lon_0={lon_0} ' \
'+x_0=0 +y_0=0 +a=6370000 +b=6370000'
pwrf = pwrf.format(**pargs)
elif map_proj == 'MERCATOR':
pwrf = '+proj=merc +lat_ts={lat_1} +lon_0={lon_0} ' \
'+x_0=0 +y_0=0 +a=6370000 +b=6370000'
pwrf = pwrf.format(**pargs)
elif map_proj == 'POLAR':
pwrf = '+proj=stere +lat_ts={lat_1} +lat_0=90.0 +lon_0={lon_0} ' \
'+x_0=0 +y_0=0 +a=6370000 +b=6370000'
pwrf = pwrf.format(**pargs)
else:
raise NotImplementedError('WRF proj not implemented yet: '
'{}'.format(map_proj))
pwrf = gis.check_crs(pwrf)
# get easting and northings from dom center (probably unnecessary here)
e, n = gis.transform_proj(wgs84, pwrf, pargs['ref_lon'], pargs['lat_0'])
# LL corner
nx, ny = e_we[0]-1, e_sn[0]-1
x0 = -(nx-1) / 2. * dx + e # -2 because of staggered grid
y0 = -(ny-1) / 2. * dy + n
# parent grid
grid = gis.Grid(nxny=(nx, ny), x0y0=(x0, y0), dxdy=(dx, dy), proj=pwrf)
# child grids
out = [grid]
for ips, jps, pid, ratio, we, sn in zip(i_parent_start, j_parent_start,
parent_id, parent_ratio,
e_we, e_sn):
if ips == 1:
continue
ips -= 1
jps -= 1
we -= 1
sn -= 1
nx = we / ratio
ny = sn / ratio
if nx != (we / ratio):
raise RuntimeError('e_we and ratios are incompatible: '
'(e_we - 1) / ratio must be integer!')
if ny != (sn / ratio):
raise RuntimeError('e_sn and ratios are incompatible: '
'(e_sn - 1) / ratio must be integer!')
prevgrid = out[pid - 1]
xx, yy = prevgrid.corner_grid.x_coord, prevgrid.corner_grid.y_coord
dx = prevgrid.dx / ratio
dy = prevgrid.dy / ratio
grid = gis.Grid(nxny=(we, sn),
x0y0=(xx[ips], yy[jps]),
dxdy=(dx, dy),
pixel_ref='corner',
proj=pwrf)
out.append(grid.center_grid)
maps = None
if do_maps:
from salem import Map
import shapely.geometry as shpg
if map_kwargs is None:
map_kwargs = {}
maps = []
for i, g in enumerate(out):
m = Map(g, **map_kwargs)
for j in range(i+1, len(out)):
cg = out[j]
left, right, bottom, top = cg.extent
s = np.array([(left, bottom), (right, bottom),
(right, top), (left, top)])
l1 = shpg.LinearRing(s)
m.set_geometry(l1, crs=cg.proj, linewidth=(len(out)-j),
zorder=5)
maps.append(m)
return out, maps
| (fpath, do_maps=True, map_kwargs=None) |
22,666 | salem | get_cmap | null | def get_cmap():
raise ImportError('requires matplotlib')
| () |
22,667 | salem.utils | get_demo_file | Returns the path to the desired demo file. | def get_demo_file(fname):
"""Returns the path to the desired demo file."""
d = download_demo_files()
if fname in d:
return d[fname]
else:
return None
| (fname) |
22,669 | salem.gis | googlestatic_mercator_grid | Mercator map centered on a specified point (google API conventions).
Mostly useful for google maps.
| def googlestatic_mercator_grid(center_ll=None, nx=640, ny=640, zoom=12, scale=1):
"""Mercator map centered on a specified point (google API conventions).
Mostly useful for google maps.
"""
# Number of pixels in an image with a zoom level of 0.
google_pix = 256 * scale
# The equatorial radius of the Earth assuming WGS-84 ellipsoid.
google_earth_radius = 6378137.0
# Make a local proj
lon, lat = center_ll
projloc = check_crs('epsg:3857')
# The size of the image is multiplied by the scaling factor
nx *= scale
ny *= scale
# Meter per pixel
mpix = (2 * np.pi * google_earth_radius) / google_pix / (2**zoom)
xx = nx * mpix
yy = ny * mpix
e, n = transform_proj(wgs84, projloc, lon, lat)
corner = (-xx / 2. + e, yy / 2. + n)
dxdy = (xx / nx, - yy / ny)
return Grid(proj=projloc, x0y0=corner,
nxny=(nx, ny), dxdy=dxdy,
pixel_ref='corner')
| (center_ll=None, nx=640, ny=640, zoom=12, scale=1) |
22,670 | salem.sio | grid_from_dataset | Find out if the dataset contains enough info for Salem to understand.
``ds`` can be an xarray dataset or a NetCDF dataset, or anything
that resembles it.
Returns a :py:class:`~salem.Grid` if successful, ``None`` otherwise
| def grid_from_dataset(ds):
"""Find out if the dataset contains enough info for Salem to understand.
``ds`` can be an xarray dataset or a NetCDF dataset, or anything
that resembles it.
Returns a :py:class:`~salem.Grid` if successful, ``None`` otherwise
"""
# try if it is a salem file
out = _salem_grid_from_dataset(ds)
if out is not None:
return out
# maybe it's a WRF file?
if hasattr(ds, 'MOAD_CEN_LAT') or hasattr(ds, 'PROJ_ENVI_STRING'):
# WRF and HAR have some special attributes
return _wrf_grid_from_dataset(ds)
# Try out platte carree
return _lonlat_grid_from_dataset(ds)
| (ds) |
22,672 | salem | lazy_property | Decorator that makes a property lazy-evaluated. | def lazy_property(fn):
"""Decorator that makes a property lazy-evaluated."""
attr_name = '_lazy_' + fn.__name__
@property
@wraps(fn)
def _lazy_property(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fn(self))
return getattr(self, attr_name)
return _lazy_property
| (fn) |
22,673 | os | makedirs | makedirs(name [, mode=0o777][, exist_ok=False])
Super-mkdir; create a leaf directory and all intermediate ones. Works like
mkdir, except that any intermediate path segment (not just the rightmost)
will be created if it does not exist. If the target directory already
exists, raise an OSError if exist_ok is False. Otherwise no exception is
raised. This is recursive.
| def makedirs(name, mode=0o777, exist_ok=False):
"""makedirs(name [, mode=0o777][, exist_ok=False])
Super-mkdir; create a leaf directory and all intermediate ones. Works like
mkdir, except that any intermediate path segment (not just the rightmost)
will be created if it does not exist. If the target directory already
exists, raise an OSError if exist_ok is False. Otherwise no exception is
raised. This is recursive.
"""
head, tail = path.split(name)
if not tail:
head, tail = path.split(head)
if head and tail and not path.exists(head):
try:
makedirs(head, exist_ok=exist_ok)
except FileExistsError:
# Defeats race condition when another thread created the path
pass
cdir = curdir
if isinstance(tail, bytes):
cdir = bytes(curdir, 'ASCII')
if tail == cdir: # xxx/newdir/. exists if xxx/newdir exists
return
try:
mkdir(name, mode)
except OSError:
# Cannot rely on checking for EEXIST, since the operating system
# could give priority to other errors like EACCES or EROFS
if not exist_ok or not path.isdir(name):
raise
| (name, mode=511, exist_ok=False) |
22,674 | salem.gis | mercator_grid | Local (transverse) mercator map centered on a specified point.
Parameters
----------
center_ll : (float, float)
tuple of lon, lat coordinates where the map will be centered.
extent : (float, float)
tuple of eastings, northings giving the extent (in m) of the map
ny : int
number of y grid points wanted to cover the map (default: 600)
nx : int
number of x grid points wanted to cover the map (mutually exclusive
with y)
origin : str
'lower-left' or 'upper-left'
transverse : bool
wether to use a transverse or regular mercator. Default should have
been false, but for backwards compatibility reasons we keep it to True
| def mercator_grid(center_ll=None, extent=None, ny=600, nx=None,
origin='lower-left', transverse=True):
"""Local (transverse) mercator map centered on a specified point.
Parameters
----------
center_ll : (float, float)
tuple of lon, lat coordinates where the map will be centered.
extent : (float, float)
tuple of eastings, northings giving the extent (in m) of the map
ny : int
number of y grid points wanted to cover the map (default: 600)
nx : int
number of x grid points wanted to cover the map (mutually exclusive
with y)
origin : str
'lower-left' or 'upper-left'
transverse : bool
wether to use a transverse or regular mercator. Default should have
been false, but for backwards compatibility reasons we keep it to True
"""
# Make a local proj
pname = 'tmerc' if transverse else 'merc'
lon, lat = center_ll
proj_params = dict(proj=pname, lat_0=0., lon_0=lon,
k=0.9996, x_0=0, y_0=0, datum='WGS84')
projloc = pyproj.Proj(proj_params)
# Define a spatial resolution
xx = extent[0]
yy = extent[1]
if nx is None:
nx = ny * xx / yy
else:
ny = nx * yy / xx
nx = np.rint(nx)
ny = np.rint(ny)
e, n = transform_proj(wgs84, projloc, lon, lat)
if origin == 'upper-left':
corner = (-xx / 2. + e, yy / 2. + n)
dxdy = (xx / nx, - yy / ny)
else:
corner = (-xx / 2. + e, -yy / 2. + n)
dxdy = (xx / nx, yy / ny)
return Grid(proj=projloc, x0y0=corner, nxny=(nx, ny), dxdy=dxdy,
pixel_ref='corner')
| (center_ll=None, extent=None, ny=600, nx=None, origin='lower-left', transverse=True) |
22,677 | salem.sio | open_metum_dataset | Wrapper to Met Office Unified Model files (experimental)
This is needed because these files are a little messy.
Parameters
----------
file : str
the path to the MetUM file
pole_longitude: optional
Pole longitude position, in unrotated degrees. Defaults to the one
found in the file (if found) and errors otherwise.
pole_latitude: optional
Pole latitude position, in unrotated degrees. Defaults to the one
found in the file (if found) and errors otherwise.
central_rotated_longitude: optional
Longitude rotation about the new pole, in degrees. Defaults to the one
found in the file (if found) and 0 otherwise.
**kwargs : optional
Additional arguments passed on to ``xarray.open_dataset``.
Returns
-------
an xarray Dataset
| def open_metum_dataset(file, pole_longitude=None, pole_latitude=None,
central_rotated_longitude=0., **kwargs):
"""Wrapper to Met Office Unified Model files (experimental)
This is needed because these files are a little messy.
Parameters
----------
file : str
the path to the MetUM file
pole_longitude: optional
Pole longitude position, in unrotated degrees. Defaults to the one
found in the file (if found) and errors otherwise.
pole_latitude: optional
Pole latitude position, in unrotated degrees. Defaults to the one
found in the file (if found) and errors otherwise.
central_rotated_longitude: optional
Longitude rotation about the new pole, in degrees. Defaults to the one
found in the file (if found) and 0 otherwise.
**kwargs : optional
Additional arguments passed on to ``xarray.open_dataset``.
Returns
-------
an xarray Dataset
"""
if not is_rotated_proj_working():
raise RuntimeError('open_metum_dataset currently does not '
'work with certain PROJ versions: '
'https://github.com/pyproj4/pyproj/issues/424')
# open with xarray
ds = xr.open_dataset(file, **kwargs)
# Correct for lons
vn_list = ['grid_longitude_t', 'grid_longitude_uv', 'rlon']
for vn in vn_list:
if vn in ds.coords:
v = ds[vn]
ds[vn] = v.where(v <= 180, v - 360)
# get pyproj string
if pole_longitude is None or pole_latitude is None:
# search for specific attributes names
n_lon = 'grid_north_pole_longitude'
n_lat = 'grid_north_pole_latitude'
# first in dataset
pole_longitude = ds.attrs.get(n_lon, None)
pole_latitude = ds.attrs.get(n_lat, None)
# then as variable attribute
if pole_longitude is None or pole_latitude is None:
for k, v in ds.variables.items():
if n_lon in v.attrs:
pole_longitude = v.attrs[n_lon]
if n_lat in v.attrs:
pole_latitude = v.attrs[n_lat]
if pole_longitude is not None and pole_latitude is not None:
break
srs = ('+ellps=WGS84 +proj=ob_tran +o_proj=latlon '
'+to_meter=0.0174532925199433 '
'+o_lon_p={o_lon_p} +o_lat_p={o_lat_p} +lon_0={lon_0} +no_defs')
params = {
'o_lon_p': central_rotated_longitude,
'o_lat_p': pole_latitude,
'lon_0': 180 + pole_longitude,
}
srs = srs.format(**params)
# add pyproj string everywhere
ds.attrs['pyproj_srs'] = srs
for v in ds.data_vars:
ds[v].attrs['pyproj_srs'] = srs
return ds
| (file, pole_longitude=None, pole_latitude=None, central_rotated_longitude=0.0, **kwargs) |
22,678 | salem.sio | open_mf_wrf_dataset | Open multiple WRF files as a single WRF dataset.
Requires dask to be installed. Note that if your files are sliced by time,
certain diagnostic variable computed out of accumulated variables (e.g.
PRCP) won't be available, because not computable lazily.
This code is adapted from xarray's open_mfdataset function. The xarray
license is reproduced in the salem/licenses directory.
Parameters
----------
paths : str or sequence
Either a string glob in the form `path/to/my/files/*.nc` or an
explicit list of files to open.
chunks : int or dict, optional
Dictionary with keys given by dimension names and values given by chunk
sizes. In general, these should divide the dimensions of each dataset.
If int, chunk each dimension by ``chunks`` .
By default, chunks will be chosen to load entire input files into
memory at once. This has a major impact on performance: please see
xarray's full documentation for more details.
compat : {'identical', 'equals', 'broadcast_equals', 'no_conflicts'}, optional
String indicating how to compare variables of the same name for
potential conflicts when merging:
- 'broadcast_equals': all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- 'equals': all values and dimensions must be the same.
- 'identical': all values, dimensions and attributes must be the
same.
- 'no_conflicts': only values which are not null in both datasets
must be equal. The returned dataset then contains the combination
of all non-null values.
preprocess : callable, optional
If provided, call this function on each dataset prior to concatenation.
lock : False, True or threading.Lock, optional
This argument is passed on to :py:func:`dask.array.from_array`. By
default, a per-variable lock is used when reading data from netCDF
files with the netcdf4 and h5netcdf engines to avoid issues with
concurrent access when using dask's multithreaded backend.
Returns
-------
xarray.Dataset
| def open_mf_wrf_dataset(paths, chunks=None, compat='no_conflicts', lock=None,
preprocess=None):
"""Open multiple WRF files as a single WRF dataset.
Requires dask to be installed. Note that if your files are sliced by time,
certain diagnostic variable computed out of accumulated variables (e.g.
PRCP) won't be available, because not computable lazily.
This code is adapted from xarray's open_mfdataset function. The xarray
license is reproduced in the salem/licenses directory.
Parameters
----------
paths : str or sequence
Either a string glob in the form `path/to/my/files/*.nc` or an
explicit list of files to open.
chunks : int or dict, optional
Dictionary with keys given by dimension names and values given by chunk
sizes. In general, these should divide the dimensions of each dataset.
If int, chunk each dimension by ``chunks`` .
By default, chunks will be chosen to load entire input files into
memory at once. This has a major impact on performance: please see
xarray's full documentation for more details.
compat : {'identical', 'equals', 'broadcast_equals', 'no_conflicts'}, optional
String indicating how to compare variables of the same name for
potential conflicts when merging:
- 'broadcast_equals': all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- 'equals': all values and dimensions must be the same.
- 'identical': all values, dimensions and attributes must be the
same.
- 'no_conflicts': only values which are not null in both datasets
must be equal. The returned dataset then contains the combination
of all non-null values.
preprocess : callable, optional
If provided, call this function on each dataset prior to concatenation.
lock : False, True or threading.Lock, optional
This argument is passed on to :py:func:`dask.array.from_array`. By
default, a per-variable lock is used when reading data from netCDF
files with the netcdf4 and h5netcdf engines to avoid issues with
concurrent access when using dask's multithreaded backend.
Returns
-------
xarray.Dataset
"""
if isinstance(paths, basestring):
paths = sorted(glob(paths))
if not paths:
raise IOError('no files to open')
if lock is None:
lock = NETCDF4_PYTHON_LOCK
try:
datasets = [open_wrf_dataset(p, chunks=chunks or {}, lock=lock)
for p in paths]
except TypeError as err:
if 'lock' not in str(err):
raise
# New xarray backends
datasets = [open_wrf_dataset(p, chunks=chunks or {}) for p in paths]
orig_datasets = datasets
def ds_closer():
for ods in orig_datasets:
ods.close()
if preprocess is not None:
datasets = [preprocess(ds) for ds in datasets]
try:
combined = xr.combine_nested(datasets, combine_attrs='drop_conflicts',
concat_dim='time', compat=compat)
except ValueError:
# Older xarray
combined = xr.combine_nested(datasets, concat_dim='time',
compat=compat)
except AttributeError:
# Even older
combined = xr.auto_combine(datasets, concat_dim='time', compat=compat)
combined.attrs = datasets[0].attrs
try:
combined.set_close(ds_closer)
except AttributeError:
from xarray.backends.api import _MultiFileCloser
mfc = _MultiFileCloser([ods._file_obj for ods in orig_datasets])
combined._file_obj = mfc
# drop accumulated vars if needed (TODO: make this not hard coded)
vns = ['PRCP', 'PRCP_C', 'PRCP_NC']
vns = [vn for vn in vns if vn in combined.variables]
try:
combined = combined.drop_vars(vns)
except AttributeError:
combined = combined.drop(vns)
return combined
| (paths, chunks=None, compat='no_conflicts', lock=None, preprocess=None) |
22,679 | salem.sio | open_wrf_dataset | Wrapper around xarray's open_dataset to make WRF files a bit better.
This is needed because variables often have not enough georef attrs
to be understood alone, and datasets tend to loose their attrs with
operations...
Parameters
----------
file : str
the path to the WRF file
**kwargs : optional
Additional arguments passed on to ``xarray.open_dataset``.
Returns
-------
an xarray Dataset
| def open_wrf_dataset(file, **kwargs):
"""Wrapper around xarray's open_dataset to make WRF files a bit better.
This is needed because variables often have not enough georef attrs
to be understood alone, and datasets tend to loose their attrs with
operations...
Parameters
----------
file : str
the path to the WRF file
**kwargs : optional
Additional arguments passed on to ``xarray.open_dataset``.
Returns
-------
an xarray Dataset
"""
nc = netCDF4.Dataset(file)
nc.set_auto_mask(False)
# Change staggered variables to unstaggered ones
for vn, v in nc.variables.items():
if wrftools.Unstaggerer.can_do(v):
nc.variables[vn] = wrftools.Unstaggerer(v)
# Check if we can add diagnostic variables to the pot
for vn in wrftools.var_classes:
cl = getattr(wrftools, vn)
if vn not in nc.variables and cl.can_do(nc):
nc.variables[vn] = cl(nc)
# trick xarray with our custom netcdf
ds = xr.open_dataset(NetCDF4DataStore(nc), **kwargs)
# remove time dimension to lon lat
for vn in ['XLONG', 'XLAT']:
try:
v = ds[vn].isel(Time=0)
ds[vn] = xr.DataArray(v.values, dims=['south_north', 'west_east'])
except (ValueError, KeyError):
pass
# Convert time (if necessary)
if 'Time' in ds.dims:
time = netcdf_time(ds)
if time is not None:
ds['Time'] = time
ds = ds.rename({'Time': 'time'})
tr = {'Time': 'time', 'XLAT': 'lat', 'XLONG': 'lon', 'XTIME': 'xtime'}
tr = {k: tr[k] for k in tr.keys() if k in ds.variables}
ds = ds.rename(tr)
# drop ugly vars
vns = ['Times', 'XLAT_V', 'XLAT_U', 'XLONG_U', 'XLONG_V']
vns = [vn for vn in vns if vn in ds.variables]
try:
ds = ds.drop_vars(vns)
except AttributeError:
ds = ds.drop(vns)
# add cartesian coords
ds['west_east'] = ds.salem.grid.x_coord
ds['south_north'] = ds.salem.grid.y_coord
# add pyproj string everywhere
ds.attrs['pyproj_srs'] = ds.salem.grid.proj.srs
for v in ds.data_vars:
ds[v].attrs['pyproj_srs'] = ds.salem.grid.proj.srs
return ds
| (file, **kwargs) |
22,680 | salem.sio | open_xr_dataset | Thin wrapper around xarray's open_dataset.
This is needed because variables often have not enough georef attrs
to be understood alone, and datasets tend to loose their attrs with
operations...
Returns
-------
an xarray Dataset
| def open_xr_dataset(file):
"""Thin wrapper around xarray's open_dataset.
This is needed because variables often have not enough georef attrs
to be understood alone, and datasets tend to loose their attrs with
operations...
Returns
-------
an xarray Dataset
"""
# if geotiff, use Salem
p, ext = os.path.splitext(file)
if (ext.lower() == '.tif') or (ext.lower() == '.tiff'):
from salem import GeoTiff
geo = GeoTiff(file)
# TODO: currently everything is loaded in memory (baaad)
da = xr.DataArray(geo.get_vardata(),
coords={'x': geo.grid.center_grid.x_coord,
'y': geo.grid.center_grid.y_coord},
dims=['y', 'x'])
ds = xr.Dataset()
ds.attrs['pyproj_srs'] = geo.grid.proj.srs
ds['data'] = da
ds['data'].attrs['pyproj_srs'] = geo.grid.proj.srs
return ds
# otherwise rely on xarray
ds = xr.open_dataset(file)
# did we get the grid? If not no need to go further
grid = ds.salem.grid
# add cartesian coords for WRF
if 'west_east' in ds.dims:
ds['west_east'] = ds.salem.grid.x_coord
ds['south_north'] = ds.salem.grid.y_coord
# add pyproj string everywhere
ds.attrs['pyproj_srs'] = grid.proj.srs
for v in ds.data_vars:
ds[v].attrs['pyproj_srs'] = grid.proj.srs
return ds
| (file) |
22,685 | salem.gis | proj_is_latlong | Shortcut function because of deprecation. | def proj_is_latlong(proj):
"""Shortcut function because of deprecation."""
try:
return proj.is_latlong()
except AttributeError:
return proj.crs.is_geographic
| (proj) |
22,686 | salem.gis | proj_is_same | Checks is two pyproj projections are equal.
See https://github.com/jswhit/pyproj/issues/15#issuecomment-208862786
Parameters
----------
p1 : pyproj.Proj
first projection
p2 : pyproj.Proj
second projection
| def proj_is_same(p1, p2):
"""Checks is two pyproj projections are equal.
See https://github.com/jswhit/pyproj/issues/15#issuecomment-208862786
Parameters
----------
p1 : pyproj.Proj
first projection
p2 : pyproj.Proj
second projection
"""
if has_gdal:
# this is more robust, but gdal is a pain
osr.UseExceptions()
s1 = osr.SpatialReference()
s1.ImportFromProj4(p1.srs)
s2 = osr.SpatialReference()
s2.ImportFromProj4(p2.srs)
return s1.IsSame(s2) == 1 # IsSame returns 1 or 0
else:
# at least we can try to sort it
p1 = '+'.join(sorted(p1.srs.split('+')))
p2 = '+'.join(sorted(p2.srs.split('+')))
return p1 == p2
| (p1, p2) |
22,687 | salem.gis | proj_to_cartopy | Converts a pyproj.Proj to a cartopy.crs.Projection
Parameters
----------
proj: pyproj.Proj
the projection to convert
Returns
-------
a cartopy.crs.Projection object
| def proj_to_cartopy(proj):
"""Converts a pyproj.Proj to a cartopy.crs.Projection
Parameters
----------
proj: pyproj.Proj
the projection to convert
Returns
-------
a cartopy.crs.Projection object
"""
import cartopy
import cartopy.crs as ccrs
proj = check_crs(proj)
if proj_is_latlong(proj):
return ccrs.PlateCarree()
srs = proj.srs
if has_gdal:
# this is more robust, as srs could be anything (espg, etc.)
from osgeo import osr
s1 = osr.SpatialReference()
s1.ImportFromProj4(proj.srs)
if s1.ExportToProj4():
srs = s1.ExportToProj4()
km_proj = {'lon_0': 'central_longitude',
'lat_0': 'central_latitude',
'x_0': 'false_easting',
'y_0': 'false_northing',
'lat_ts': 'latitude_true_scale',
'o_lon_p': 'central_rotated_longitude',
'o_lat_p': 'pole_latitude',
'k': 'scale_factor',
'zone': 'zone',
}
km_globe = {'a': 'semimajor_axis',
'b': 'semiminor_axis',
}
km_std = {'lat_1': 'lat_1',
'lat_2': 'lat_2',
}
kw_proj = dict()
kw_globe = dict()
kw_std = dict()
for s in srs.split('+'):
s = s.split('=')
if len(s) != 2:
continue
k = s[0].strip()
v = s[1].strip()
try:
v = float(v)
except:
pass
if k == 'proj':
if v == 'tmerc':
cl = ccrs.TransverseMercator
kw_proj['approx'] = True
if v == 'lcc':
cl = ccrs.LambertConformal
if v == 'merc':
cl = ccrs.Mercator
if v == 'utm':
cl = ccrs.UTM
if v == 'stere':
cl = ccrs.Stereographic
if v == 'ob_tran':
cl = ccrs.RotatedPole
if k in km_proj:
if k == 'zone':
v = int(v)
kw_proj[km_proj[k]] = v
if k in km_globe:
kw_globe[km_globe[k]] = v
if k in km_std:
kw_std[km_std[k]] = v
globe = None
if kw_globe:
globe = ccrs.Globe(ellipse='sphere', **kw_globe)
if kw_std:
kw_proj['standard_parallels'] = (kw_std['lat_1'], kw_std['lat_2'])
# mercatoooor
if cl.__name__ == 'Mercator':
kw_proj.pop('false_easting', None)
kw_proj.pop('false_northing', None)
if Version(cartopy.__version__) < Version('0.15'):
kw_proj.pop('latitude_true_scale', None)
elif cl.__name__ == 'Stereographic':
kw_proj.pop('scale_factor', None)
if 'latitude_true_scale' in kw_proj:
kw_proj['true_scale_latitude'] = kw_proj['latitude_true_scale']
kw_proj.pop('latitude_true_scale', None)
elif cl.__name__ == 'RotatedPole':
if 'central_longitude' in kw_proj:
kw_proj['pole_longitude'] = kw_proj['central_longitude'] - 180
kw_proj.pop('central_longitude', None)
else:
kw_proj.pop('latitude_true_scale', None)
try:
return cl(globe=globe, **kw_proj)
except TypeError:
del kw_proj['approx']
return cl(globe=globe, **kw_proj)
| (proj) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.