...
 
Commits (4)
......@@ -158,7 +158,7 @@ class AbstractFeature(cerbere.feature.feature.Feature):
for field, values in content.items():
if field not in GEOCOORDINATES:
if isinstance(values, Field):
data_vars[values.name] = values.xarray()
data_vars[values.name] = values.to_dataarray()
elif isinstance(values, (xr.DataArray, numpy.ndarray)):
data_vars[field] = (('lon', 'lat',), values)
else:
......@@ -656,7 +656,7 @@ class AbstractFeature(cerbere.feature.feature.Feature):
* a :class:`xarray.DataArray`
"""
if isinstance(values, Field):
xrdata = values.xarray()
xrdata = values.to_dataarray()
elif isinstance(values, xr.DataArray):
xrdata = values
......
......@@ -185,7 +185,7 @@ class Field(newfield.Field):
"""
return Field(data=xrdata)
def xarray(self):
def to_dataarray(self):
"""Return the field values a xarray DataArray"""
if self.mapper is None:
return self.array
......
......@@ -258,7 +258,7 @@ class Dataset(ABC):
if isinstance(self._url, list) and mode in [WRITE_NEW, READ_WRITE]:
raise ValueError("A list of files is read only")
self.open(**kwargs)
self._open(**kwargs)
elif isinstance(dataset, xr.Dataset):
self.dataset = dataset
......@@ -307,16 +307,16 @@ class Dataset(ABC):
for var in data.keys():
if isinstance(data[var], Field):
data[var] = to_dict(data[var].xarray)
data[var] = to_dict(data[var].to_dataarray)
if 'coords' in data.keys():
for var, value in data['coords'].items():
if isinstance(value, Field):
data[var] = to_dict(value.xarray)
data[var] = to_dict(value.to_dataarray)
if 'data_vars' in data.keys():
for var, value in data['data_vars'].items():
if isinstance(value, Field):
data[var] = to_dict(value.xarray)
data[var] = to_dict(value.to_dataarray)
# create a dataset
self.dataset = xr.Dataset.from_dict(data)
......@@ -359,7 +359,7 @@ class Dataset(ABC):
raise NotImplementedError
@classmethod
def exists(cls, url):
def exists(cls, url: str) -> bool:
"""tests if `url` is an existing resource"""
try:
result = urlparse(url)
......@@ -509,7 +509,7 @@ class Dataset(ABC):
else:
return xr.Dataset()
def open(self, **kwargs):
def _open(self, **kwargs):
if self.is_opened():
logging.warning("A file is already opened : {}".format(
self._url)
......@@ -653,35 +653,26 @@ class Dataset(ABC):
return tuple(self.get_field_dims(fieldname).values())
@property
def _varnames(self):
"""Returns the names of all the fields of the dataset.
Returns:
list<string>: list of field names
def _varnames(self) -> List[str]:
"""List names of all the fields (including coordinates) of the dataset.
"""
if isinstance(self.dataset, Dataset):
return self.dataset._varnames
return list(self.dataset.variables.keys())
@property
def coordnames(self):
"""Returns the names of the coordinate fields of the dataset.
Returns:
list<string>: list of field names
"""
def coordnames(self) -> List[str]:
"""List of names of the coordinate fields of the dataset."""
if isinstance(self.dataset, Dataset):
return self.dataset._coordnames
return list(self.dataset.coords.keys())
@property
def geocoords(self):
"""
Return the geolocation fields
"""
def geocoords(self) -> List['xarray.DataArray']:
"""List of geolocation coordinates (as DataArray)"""
if isinstance(self.dataset, Dataset):
return self.dataset.geocoords
return self.dataset.geocoords
return [
_ for _ in self.dataset.coords if _ in GEOCOORDINATES
]
......@@ -736,7 +727,7 @@ class Dataset(ABC):
.format(field.name)
)
self.dataset = self.dataset.assign(
{field.name: field.xarray}
{field.name: field.to_dataarray}
)
field._attach_dataset(self)
......
......@@ -39,17 +39,16 @@ class Field(object):
"""A Field describes a scientific data array. It contains data and
metadata attributes.
This is an extension of xarray's DataArray with more strict requirements
on attributes.
This is an extension of xarray's DataArray with stricter requirements on
attributes.
A ``Field`` object can be constructed with:
A :class:`Field` object can be constructed with:
* a :class:`xarray.DataArray` object, provided in ``array``
A `Field`` object can be attached to a `mapper` object (of any class
inherited from a :class:`cerbere.mapper.AbstractMapper`, provided with the
``mapper`` argument.
* a xarray :class:`~xarray.DataArray` object, provided in ``array``
A :class:`Field` object can be attached to a :class:`~cerbere.dataset.dataset.Dataset`
object (of any class inherited from a :class:`~cerbere.dataset.dataset.Dataset`,
provided with the ``dataset`` argument.
Args:
......@@ -93,7 +92,7 @@ class Field(object):
authority (str): naming authority referencing the provided
standard name
standardname (str): standard label for a phenomenon, with
standard_name (optional, str): standard label for a phenomenon, with
respect to the convention stated in `authority` argument.
This corresponds to a standard_name attribute in a CF compliant
NetCDF file.
......@@ -116,7 +115,7 @@ class Field(object):
units: Optional[str] = None,
quality_vars: Optional[List[str]] = None,
attrs: Optional[Mapping[str, Any]] = None,
**kwargs):
**kwargs) -> None:
"""
"""
if name is not None and not isinstance(name, str):
......@@ -153,29 +152,24 @@ class Field(object):
data = data
# instantiate the xarray representation
kwargs['dims'] = list(dims)
kwargs['attrs'] = attrs
self.array = xr.DataArray(
data,
dims=list(dims),
attrs=attrs,
name=name
# dims=list(dims),
# attrs=attrs,
name=name,
**kwargs
)
# Overrides DataArray object when conflicts with the superceding
# arguments
if name is not None:
self.name = name
if standard_name is not None:
self.standard_name = standard_name
if description is not None:
self.description = description
# _FillValue
if fillvalue is not None:
self.array.attrs['_FillValue'] = fillvalue
# quality info arrays
if quality_vars is not None:
self.array.attrs['quality_vars'] = quality_vars
self.name = name
self.standard_name = standard_name
self.description = description
self.fill_value = fillvalue
self.units = units
self.array.attrs['quality_vars'] = quality_vars
# components for complex fields
if fields is not None:
......@@ -198,14 +192,14 @@ class Field(object):
self.array.encoding['cerbere_status'] = "changed"
@classmethod
def to_field(cls, xrdata: 'xarray.DataArray') -> 'Field':
def to_field(cls, data: xr.DataArray) -> 'Field':
"""Cast a xarray DataArray to a
:class:`cerbere.datamodel.field.Field` object
"""
return Field(data=xrdata)
return Field(data=data)
@property
def xarray(self):
def to_dataarray(self):
"""Return the field values a xarray DataArray"""
if self.dataset is None:
return self.array
......@@ -328,34 +322,34 @@ class Field(object):
self.array.attrs['valid_max'] = value
@property
def units(self):
"""return the field units"""
def units(self) -> str:
"""return the field units (``units`` CF attribute)"""
try:
return self.array.attrs['units']
except KeyError:
return None
return
@units.setter
def units(self, units):
"""set the variable units"""
def units(self, units: str):
"""set the variable units (``units`` CF attribute)"""
self.array.attrs['units'] = units
@property
def description(self):
"""return the field description"""
"""return the field description (``long_name`` CF attribute)"""
try:
return self.array.attrs['long_name']
except KeyError:
return None
@units.setter
def description(self, description):
"""set the field description"""
def description(self, description: str) -> None:
"""set the field description (``long_name`` CF attribute)"""
self.array.attrs['long_name'] = description
@property
def standard_name(self):
"""return the field standard name"""
def standard_name(self) -> str:
"""return the field standard name (``standard_name`` CF attribute)"""
try:
return (
self.array.attrs['standard_name'],
......@@ -365,8 +359,8 @@ class Field(object):
return None
@units.setter
def standard_name(self, standard_name):
"""set the standard_name"""
def standard_name(self, standard_name: str) -> None:
"""set the standard_name (``standard_name`` CF attribute)"""
if isinstance(standard_name, tuple):
self.array.attrs['standard_name'] = standard_name[0]
self.array.attrs['authority'] = standard_name[1]
......@@ -460,8 +454,8 @@ class Field(object):
Return the field values as a :class:`numpy.ma.MaskedArray` object.
Args:
index: any kind of xarray indexing compatible with
:func:`xarray.DataArray.isel` selection method.
index: any kind of xarray indexing compatible with xarray
:func:`~xarray.DataArray.isel` selection method.
padding: pad the result with fill values where slices are out of the
field dimension limits. Default is False.
......@@ -518,9 +512,9 @@ class Field(object):
def _read_dataarray(
cls,
xrdata,
index=None,
padding=False,
as_masked_array=True,
index: Mapping[Hashable, Any]=None,
padding: bool=False,
as_masked_array: bool=True,
**kwargs
):
"""
......@@ -533,7 +527,7 @@ class Field(object):
xrdata (:class:`xarray.DataArray`): the xarray ``DataArray`` object
from which to extract the values.
index (optional): any kind of xarray indexing compatible with
index (dict, optional): any kind of xarray indexing compatible with
``isel`` selection method.
as_masked_array (bool): return the result as a numpy masked array
......@@ -560,8 +554,8 @@ class Field(object):
@classmethod
def _pad_data(
cls,
array: 'xarray.core.dataset.Dataset',
subset: 'xarray.core.dataset.Dataset',
array: 'to_dataarray.core.dataset.Dataset',
subset: 'to_dataarray.core.dataset.Dataset',
index: Optional[Mapping[str, slice]]
) -> 'numpy.ndarray':
"""
......@@ -647,7 +641,11 @@ class Field(object):
return False
return self.handler.is_saved()
def bitmask_or(self, meanings, index=None, **kwargs):
def bitmask_or(
self,
meanings,
index: Mapping[Hashable, Any]=None,
**kwargs):
"""helper function to get a boolean mask from a bit field.
Bit (or flag) fields are arrays of integers where each bit has a
......@@ -700,61 +698,63 @@ class Field(object):
return self.get_values(slices=index) & int(masksum) != 0
@classmethod
def compute(cls, operator, field1, field2=None, variable=None):
"""Perform an operation and returns the result as a field
def compute(
cls,
func,
field1: 'Field', field2: 'Field'=None,
**kwargs) -> 'Field':
"""Apply a function to a field (possibly combining with a second one)
and returns the result as a new field.
The operator may be for instance a numpy MaskedArray operator
The function may be for instance a numpy MaskedArray operator
such as numpy.ma.anom, numpy.ma.corr,...
To be used with caution.
Args:
operator (function) : the function to be called (ex: numpy.ma.anom)
field1 (Field) : the field argument to the operator
field2 (Field) : an optional 2nd field argument to the operator
variable (Variable) : variable of the returned module field. If not
provided, the returned field is created with a basic variable
definition.
func (function) : the function to be called (ex: numpy.ma.anom)
field1 (Field) : the field argument to the function
field2 (Field, optional) : an optional 2nd field argument to the
function
kwargs : any argument to Field creation further describing the
returned Field (units, name, ...).
Returns:
Field: the result field
"""
if variable is None:
if 'name' not in kwargs:
varname = 'result'
variable = Variable(varname)
if field2 is None:
values = partial(operator(field1))
values = func(field1.get_values())
else:
values = partial(operator(field1, field2))
field = Field(variable,
dims=copy.copy(field1.dimensions),
values = func(field1.get_values(), field2.get_values())
field = Field(data=values,
name=varname,
dims=copy.copy(field1.dims),
datatype=field1.datatype,
fillvalue=field1.fillvalue,
values=values,
units=field1.units)
fillvalue=field1.fill_value,
**kwargs)
return field
def clone(
self,
index: Optional[Mapping[str, slice]] = None,
padding: Optional[bool] = False,
prefix: Optional[str] = None,
**kwargs):
"""Create a copy of a field, limiting to a set of slices or indices, and
index: Mapping[Hashable, Any] = None,
padding: bool = False,
prefix: str = None,
**kwargs) -> 'Field':
"""Create a copy of a field, or a subset defined by index, and
padding out as required.
The returned field does not contain any attachment to the source file
attached to the original field, if any.
Args
fieldname (str): The name of the field to extract.
index (xarray index type, optional): any kind of xarray indexing
Args:
index (dict, optional):any kind of xarray indexing compatible with
xarray :func:`~xarray.DataArray.isel` selection method.
padding (bool, optional): True to pad out feature with fill values
to the extent of the dimensions.
prefix (str, optional): add a prefix string to the field names of
the extracted subset.
the extracted subset.
"""
if index is None:
new_field = Field(data=self.array.copy(deep=True))
......@@ -777,11 +777,11 @@ class Field(object):
new_field.set_name(prefix + new_field.name)
return new_field
def rename(self, newname: str):
"""Rename the field.
def rename(self, newname: str) -> None:
"""Rename the field inplace.
Args:
newname: new name of the field
newname (str): new name of the field
"""
if self._mapper is not None:
......@@ -792,13 +792,13 @@ class Field(object):
self.name = newname
def __add__(self, other):
def __add__(self, other: 'Field') -> 'Field':
"""Return a new field with the sum of current and an other field."""
res = Field.convert_from_xarray(self.xrdata + other.xrdata)
res.xrdata.name = "{}_{}_sum".format(self.name, other.name)
return res
def __sub__(self, other):
def __sub__(self, other: 'Field') -> 'Field':
"""Return a new field with the difference of current and an other
field.
"""
......@@ -834,7 +834,7 @@ def module(u, v, variable=None):
field = Field(variable,
dims=copy.copy(u.dimensions),
datatype=u.datatype,
fillvalue=u.fillvalue,
fillvalue=u.fill_value,
values=values,
units=u.units)
return field
......@@ -46,9 +46,9 @@ class GHRSSTNCDataset(NCDataset):
self.__collection_id = None
def open(self, **kwargs):
def _open(self, **kwargs):
# url needs to be opened first in order to guess default datamodel
super().open(**kwargs)
super()._open(**kwargs)
if self._mode != 'w':
......
......@@ -82,7 +82,7 @@ class Feature(Dataset):
* a :class:`xarray.DataArray`
"""
if isinstance(values, Field):
xrdata = values.xarray()
xrdata = values.to_dataarray()
elif isinstance(values, xr.DataArray):
xrdata = values
......
......@@ -43,11 +43,11 @@ class ECMWF05NCFile(NCFile):
super(ECMWF05NCFile, self).__init__(url=url,center_on_greenwhich=True, **kwargs)
return
def open(self,
view=None,
datamodel=None,
datamodel_geolocation_dims=None):
handler = super(ECMWF05NCFile, self).open(view=view,datamodel='Grid',datamodel_geolocation_dims=datamodel_geolocation_dims)
def _open(self,
view=None,
datamodel=None,
datamodel_geolocation_dims=None):
handler = super(ECMWF05NCFile, self)._open(view=view, datamodel='Grid', datamodel_geolocation_dims=datamodel_geolocation_dims)
return handler
def get_fieldnames(self):
......
......@@ -44,9 +44,9 @@ class GHRSSTNCFile(Dataset):
self.__collection_id = None
def open(self, **kwargs):
def _open(self, **kwargs):
# url needs to be opened first in order to guess default datamodel
super().open(**kwargs)
super()._open(**kwargs)
# recompose and unpack time variable if required
self._unsplit_time_coord(split_time=('time', 'sst_dtime'), unpack=True)
......
......@@ -78,7 +78,7 @@ class GribFile(AbstractMapper):
logging.debug("opening in mode : %s", self._mode)
logging.debug("url : %s", url)
# print "START", url
self._handler = pygrib.open(url)
self._handler = pygrib._open(url)
# print "OPEN"
# get list of fields
self._handler.seek(0)
......
......@@ -82,7 +82,7 @@ class GribFile(AbstractMapper):
logging.debug("opening in mode : %s", self._mode)
logging.debug("url : %s", url)
# print "START", url
self._handler = pygrib.open(url)
self._handler = pygrib._open(url)
# print "OPEN"
# get list of fields
self._handler.seek(0)
......
......@@ -48,7 +48,7 @@ logging.info("")
logging.info("Open file")
f = classreader(url=infile)
f.open()
f._open()
logging.info("OK")
logging.info("")
......
......@@ -61,7 +61,7 @@ def test_lonlat(hh):
# sensinle_heat = hh.read_values('shtfl')
def recup_var_names_with_pygrib(ff):
grbs=pygrib.open(ff)
grbs=pygrib._open(ff)
print dir(grbs)
grbs.message(6).data()[1].shape
for dodo in range(grbs.messages):
......
......@@ -22,7 +22,7 @@ elif 'S3A_OL_2_WRR' in fname or 'S3A_OL_2_WFR' in fname:
ncf = SAFEOLFile(url=fname)
print 'OPEN'
ncf.open()
ncf._open()
print '\n\nFIELDS :'
fields = ncf.get_fieldnames()
......
......@@ -61,7 +61,7 @@ else:
print 'OPEN'
ncf.open()
ncf._open()
print '\n\nFIELDS :'
fields = ncf.get_fieldnames()
......
......@@ -8,30 +8,68 @@ This page provides a summary of cerbere's API. For more details
and examples, refer to the relevant chapters in the main part of the
documentation.
See also: :ref:`public api`
Dataset
=======
The base class for ``Dataset`` objects, than can be imported from
``cerbere.dataset.dataset`` module. All other classes in ``cerbere.dataset``
package are derived from this class.
Creating a dataset
------------------
.. currentmodule:: cerbere.dataset.dataset
.. autosummary::
:toctree: generated/
Dataset
Dataset
Attributes
----------
.. autosummary::
:toctree: generated
Dataset.dims
Dataset.sizes
Dataset.geocoords
Dataset.coordnames
Dataset.attrs
Other built-in datasets
-----------------------
.. currentmodule:: cerbere.dataset
.. autosummary::
:toctree: generated/
Dataset.dims
Dataset.sizes
Dataset.coords
Dataset.attrs
ncdataset.NCDataset
ghrsstncdataset.GHRSSTNCDataset
Field
=======
Creating a field
----------------
.. currentmodule:: cerbere.dataset.field
.. autosummary::
:toctree: generated
dataset.field.Field
Feature
=======
Creating a feature
------------------
.. currentmodule:: cerbere.feature
.. autosummary::
:toctree: generated
feature.Feature
trajectory.Trajectory
grid.Grid
swath.Swath
pointcollection.PointCollection
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/cerbere.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/cerbere.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/cerbere"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/cerbere"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
cerbere.dataset
===============
.. rubric:: Description
.. automodule:: cerbere.dataset
.. currentmodule:: cerbere.dataset
cerbere.feature
===============
.. rubric:: Description
.. automodule:: cerbere.feature
.. currentmodule:: cerbere.feature
.. cerbere documentation master file, created by
sphinx-quickstart on Tue Dec 10 15:47:17 2013.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to cerbere's reference documentation!
=============================================
Contents:
.. toctree::
:maxdepth: 4
cerbere
setup
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
set I18NSPHINXOPTS=%SPHINXOPTS% .
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\cerbere.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\cerbere.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end
......@@ -11,9 +11,23 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import pathlib
import sys
sys.path.insert(0, os.path.abspath('..'))
#sys.path.insert(0, os.path.abspath('..'))
# make sure the source version is preferred (#3567)
root = pathlib.Path(__file__).absolute().parent.parent
os.environ["PYTHONPATH"] = str(root)
sys.path.insert(0, str(root))
import cerbere # isort:skip
allowed_failures = set()
print("python exec:", sys.executable)
print("sys.path:", sys.path)
print("PYTHONPATH:", os.environ["PYTHONPATH"])
# -- Project information -----------------------------------------------------
......@@ -27,21 +41,37 @@ release = '2'
# -- General configuration ---------------------------------------------------
autoclass_content = "both" # include both class docstring and __init__
autodoc_default_flags = [
# Make sure that any autodoc declarations show the right members
"members",
"inherited-members",
"show-inheritance",
]
autodoc_default_options = {
# Make sure that any autodoc declarations show the right members
"members": True,
"inherited-members": True,
"show-inheritance": True,
}
autodoc_typehints = "signature"
autosummary_generate = True # Make _autosummary files and include them
napoleon_numpy_docstring = False # Force consistency, leave only Google
napoleon_use_rtype = False # More legible
numpydoc_class_members_toctree = True
numpydoc_show_class_members = False
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.graphviz', 'sphinx.ext.napoleon', 'sphinx_autodoc_typehints']
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.graphviz',
'sphinx.ext.napoleon',
'sphinx_autodoc_typehints',
"sphinx.ext.napoleon",
]
......@@ -60,9 +90,23 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinxdoc'
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/stable", None),
"numpy": ("https://docs.scipy.org/doc/numpy", None),
"scipy": ("https://docs.scipy.org/doc/scipy/reference", None),
"numba": ("https://numba.pydata.org/numba-doc/latest", None),
"matplotlib": ("https://matplotlib.org", None),
"dask": ("https://docs.dask.org/en/latest", None),
"cftime": ("https://unidata.github.io/cftime", None),
"xarray": ("http://xarray.pydata.org/en/stable", None),
}
\ No newline at end of file
......@@ -6,9 +6,11 @@ This is the home of **cerbere** package. The objective of **cerbere** is to
provide different free and open source python modules for the reading,
interpretation, and writing of (ocean by designer's tropism but any other) earth observation data.
It is now based on **xarray** but extends it to read more data formats and adds
It is now based on xarray_ but extends it to read more data formats and adds
a typology of observation objects (called **features**).
.. _NumPy: http://www.numpy.org
.. _xarray: http://xarray.pydata.org
Contents
========
......@@ -50,7 +52,7 @@ Reference
.. toctree::
:maxdepth: 1
./api/index.rst
api
Recipes
=======
......@@ -66,3 +68,9 @@ Indices and tables
* :ref:`modindex`
* :ref:`search`
License
-------
cerbere is available under the open source `GPL License`__.
__ https://www.gnu.org/licenses/gpl-3.0.en.html
\ No newline at end of file
......@@ -10,7 +10,7 @@ f = '/home/cerdata/provider/ncep/model/cfsr/monthly/1999/03/pgbhnl.gdas.199903.g
gribm = mapper.gribfile.GribFile(url=f)
gribm.open()
gribm._open()
attrs = gribm.read_field_attributes('msl')
for k in attrs:
......
......@@ -278,8 +278,8 @@ class TestXArrayDataset(unittest.TestCase):
subset.get_values('test_var').shape, (360, 160)
)
self.assertIsNot(
subset.get_field('test_var').xarray,
dst.get_field('test_var').xarray
subset.get_field('test_var').to_dataarray,
dst.get_field('test_var').to_dataarray
)
def test_extract_as_view(self):
......@@ -290,8 +290,8 @@ class TestXArrayDataset(unittest.TestCase):
subset.get_values('test_var').shape, (360, 160)
)
self.assertIs(
subset.get_field('test_var').xarray,
dst.get_field('test_var').xarray
subset.get_field('test_var').to_dataarray,
dst.get_field('test_var').to_dataarray
)
def test_extract_rename(self):
......@@ -302,8 +302,8 @@ class TestXArrayDataset(unittest.TestCase):
subset.get_values('new_test_var').shape, (360, 160)
)
self.assertIsNot(
subset.get_field('new_test_var').xarray,
dst.get_field('test_var').xarray
subset.get_field('new_test_var').to_dataarray,
dst.get_field('test_var').to_dataarray
)
def test_extract_subset(self):
......@@ -316,8 +316,8 @@ class TestXArrayDataset(unittest.TestCase):
subset.get_values('test_var').shape, (5, 10)
)
self.assertIsNot(
subset.get_field('test_var').xarray,
dst.get_field('test_var').xarray
subset.get_field('test_var').to_dataarray,
dst.get_field('test_var').to_dataarray
)
def test_extract_subset_padding(self):
......@@ -332,8 +332,8 @@ class TestXArrayDataset(unittest.TestCase):
subset.get_values('test_var').shape, (10, 10)
)
self.assertIsNot(
subset.get_field('test_var').xarray,
dst.get_field('test_var').xarray
subset.get_field('test_var').to_dataarray,
dst.get_field('test_var').to_dataarray
)
self.assertEqual(subset.get_values('test_var').count(), 50)
self.assertEqual(subset.get_values('test_var').size, 100)
\ No newline at end of file
......@@ -25,6 +25,20 @@ class TestField(unittest.TestCase):
data = np.ma.zeros((100, 200))
return Field(data, name='test', dims=('x', 'y'))
def test_create_field_from_numpy2(self):
data = np.ma.zeros((100, 200))
field = Field(data=(), name='test')
print("TEST2", field)
print("TOTO")
self.assertIsInstance(field, Field)
def test_create_field_from_numpy2(self):
data = np.ma.zeros((100, 200))
field = Field(data=xr.DataArray(data), name='test')
print("TEST2", field)
print("TOTO")
self.assertIsInstance(field, Field)
def test_create_field(self):
field = self.create_field_from_numpy()
self.assertIsInstance(field, Field)
......@@ -32,7 +46,7 @@ class TestField(unittest.TestCase):
def test_get_xarray(self):
field = self.create_field_from_numpy()
xarr = field.xarray
xarr = field.to_dataarray
self.assertIsInstance(xarr, xr.DataArray)
print(xarr.dims)
self.assertTrue('x' in xarr.dims)
......@@ -86,4 +100,11 @@ class TestField(unittest.TestCase):
self.assertIsInstance(values, np.ma.MaskedArray)
self.assertEqual(values.shape, (20, 5))
self.assertEqual(values.count(), 50)
self.assertEqual(values[10:, :].count(), 0)
\ No newline at end of file
self.assertEqual(values[10:, :].count(), 0)
def test_apply_func(self):
field1 = self.create_field_from_numpy()
field2 = self.create_field_from_numpy()
np.ma.multiply(field1.get_values(), field2.get_values())
fsum = Field.compute(np.ma.multiply, field1, field2)
print("SUM ", fsum)
\ No newline at end of file