Commit 3dedc620 authored by Jeff Piollé's avatar Jeff Piollé

updated mapper for SLSTR - added met fields

parent 0ddf0b8b
......@@ -285,19 +285,18 @@ class AbstractMapper(object):
only used by the classes from :mod:`~cerbere.datamodel`
package. Can be 'Grid', 'Swath', etc...
datamodel_geolocation_dims (list, optional): list of the name of the
geolocation dimensions defining the data model to be read in
the file. Optional argument, only used by the datamodel
datamodel_geolocation_dims (list, optional): list of the name of
the geolocation dimensions defining the data model to be read
in the file. Optional argument, only used by the datamodel
classes, in case the mapper class can store different types of
data models.
Returns:
a handler on the opened file
"""
self.view = view
if datamodel is not None:
self._feature_type = datamodel
return None
@abstractmethod
def close(self):
......
......@@ -975,8 +975,8 @@ class NCFile(AbstractMapper):
else:
raise IndexError(
str(
"Your input data shape {} is not compatible with your "
"field shape {}."
"Your input data shape {} is not compatible with "
"your field shape {}."
).format(
values.shape, ncvar_real.shape
)
......@@ -993,6 +993,7 @@ class NCFile(AbstractMapper):
(ncvar.shape[0] == 1 or ncvar.shape[0] == 0)):
# case of grids
ncvar[0, :] = values
else:
raise IndexError(
str(
......@@ -1156,10 +1157,16 @@ class NCFile(AbstractMapper):
return geophyvars
def get_dimensions(self, fieldname=None):
"""
return the (standard) dimension names of a file or field.
"""Return the dimension's standard names of a file or a field in the
file.
Standardization of the dimension name is applied when possible.
Args:
fieldname (str): the name of the field from which to get the
dimensions. For a geolocation field, use the cerbere standard
name (time, lat, lon), though native field name will work too.
Returns:
tuple<str>: the standard dimensions of the field or file.
"""
if not fieldname:
dims = self.get_handler().dimensions
......
......@@ -704,10 +704,10 @@ class SAFESLFile(AbstractMapper):
storage type.
"""
native_name = self.__get_native_fieldname(fieldname)
rowslice = None
if slices is not None:
rowslice = [slices[0]]
if fieldname == 'time':
rowslice = None
if slices is not None:
rowslice = [slices[0]]
suffix = self.__sltype
SCANSYNC = self.__time_handler.read_values('SCANSYNC')[0]
PIXSYNC_i = self.__time_handler.read_values(
......
......@@ -16,7 +16,7 @@ from cerplot.mapping import CerMap
SLSTR_MAPPERS = ['SAFESLIRFile', 'SAFESL500AFile',
'SAFESL500BFile', 'SAFESL500TDIFile']
TEST_FIELD = {
'SAFESLIRFile': 'N3_sea_surface_temperature',
'SAFESLIRFile': ['N3_sea_surface_temperature', 'S9_BT_in'],
'SAFESL500AFile': 'S2_radiance_an',
'SAFESL500BFile': 'S4_radiance_bo',
'SAFESL500TDIFile': 'S4_radiance_cn'
......@@ -51,7 +51,13 @@ else:
)
ncf = source_mapper(url=fname)
testfield = TEST_FIELD[mapper]
if type(TEST_FIELD[mapper]) is list:
for testfield in TEST_FIELD[mapper]:
if testfield in ncf.get_fieldnames():
break
else:
testfield = TEST_FIELD[mapper]
print 'OPEN'
......@@ -106,23 +112,25 @@ print swath.get_fieldnames()
for fieldname in swath.get_fieldnames():
print swath.get_field(fieldname)
ROW = 200
print "\nREAD VALUES"
values_i = ncf2.read_values('lat')
print values_i.shape
values_o = ncf2.read_values(OBLIQUE_LAT_FIELD[mapper])
print values_o.shape
print "Values at row 1000 :"
print values_i[1000, 550:600]
print values_o[1000, 550:600]
print "Values at row : ", ROW
print values_i[ROW, 550:600]
print values_o[ROW, 550:600]
diff = (values_o - values_i)
print diff.min(), diff.max()
print "\nREAD SUBSET 1"
values_i = swath.get_lat(slices={'row': slice(1000, 1001),
values_i = swath.get_lat(slices={'row': slice(ROW, ROW+5),
'cell': slice(550, 600)},
cache=False)
values_o = swath.get_values(OBLIQUE_LAT_FIELD[mapper],
slices={'row': slice(1000, 1001),
slices={'row': slice(ROW, ROW+5),
'cell': slice(550, 600)},
cache=False)
......@@ -130,11 +138,11 @@ print values_i, values_i.shape
print values_o, values_o.shape
print "\nREAD SUBSET 2"
values_i = swath.get_lat(slices={'row': slice(1000, 1001),
values_i = swath.get_lat(slices={'row': slice(ROW, ROW+5),
'cell': slice(1200, 1250)},
cache=False)
values_o = swath.get_values(OBLIQUE_LAT_FIELD[mapper],
slices={'row': slice(1000, 1001),
slices={'row': slice(ROW, ROW+5),
'cell': slice(1200, 1250)},
cache=False)
......@@ -142,11 +150,11 @@ print "Nadir :", values_i, values_i.shape
print "Oblique : ", values_o, values_o.shape
print "\nREAD SUBSET 3"
values_i = swath.get_lat(slices={'row': slice(1000, 1001),
values_i = swath.get_lat(slices={'row': slice(ROW, ROW+5),
'cell': slice(0, 50)},
cache=False)
values_o = swath.get_values(OBLIQUE_LAT_FIELD[mapper],
slices={'row': slice(1000, 1001),
slices={'row': slice(ROW, ROW+5),
'cell': slice(0, 50)},
cache=False)
......@@ -154,11 +162,11 @@ print "Nadir :", values_i, values_i.shape
print "Oblique : ", values_o, values_o.shape
print "\nREAD SUBSET 4"
values_i = swath.get_lat(slices={'row': slice(1000, 1001),
values_i = swath.get_lat(slices={'row': slice(ROW, ROW+5),
'cell': slice(1000, 1300)},
cache=False)
values_o = swath.get_values(OBLIQUE_LAT_FIELD[mapper],
slices={'row': slice(1000, 1001),
slices={'row': slice(ROW, ROW+5),
'cell': slice(1000, 1300)},
cache=False)
......@@ -166,7 +174,7 @@ print "Nadir :", values_i, values_i.shape
print "Oblique : ", values_o, values_o.shape
# extract subset and save
subset = swath.extract_subset(slices={'row': slice(1000, 1001),
subset = swath.extract_subset(slices={'row': slice(ROW, ROW+5),
'cell': slice(500, 550)})
# save subset
print("Save subset")
......
......@@ -40,7 +40,7 @@ if os.path.exists('.git') and os.path.isdir('.git'):
, 'HEAD'
, '--count']).decode('utf-8').strip()
with open(version_path, 'w') as f:
f.write('{}.{}\n'.format(major_minor_version, commits))
f.write('{}.{}'.format(major_minor_version, commits))
with open(version_path, 'r') as f:
version = f.read()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment