...
 
Commits (4)
......@@ -75,3 +75,7 @@ tests/data/GW_L2P_ALT_ENVI_GDR_20110101_061209_20110101_070215_098_078.nc
tests/data/QS_S2B54289.20093261508
tests/data/WMO51101_20110101T0050_20110131T2350_Lat_24.32N_Lon_162.06W.nc
tests/data/polarfront_2008-09-09T050025_2008-10-17T114909_HiWASE_2008_FORE.nc
.settings/org.eclipse.core.resources.prefs
.idea/
from cerbere.mapper.ncfile import NCFile
from .. import READ_ONLY, WRITE_NEW
class AltimetersL2MergedQuefelouNCFile(NCFile):
def __init__(self, url=None, mode=READ_ONLY,ncformat='NETCDF3_CLASSIC', **kwargs):
super(AltimetersL2MergedQuefelouNCFile,self).__init__(url=url,mode=mode,ncformat=ncformat,depths=True,center_on_greenwhich=False,**kwargs)
if self._handler is None:
self._handler = self.get_handler()
return
def get_standard_dimname(self, geodimname):
"""
Returns the equivalent standard dimension name for a
dimension in the native format.
This is a translation of the native names to standard ones. It is used
for internal purpose and should not be called directly.
To be derived when creating an inherited data mapper class. This is
mandatory for geolocation dimensions which must be standard.
Args:
dimname (string): native dimension name
Return:
str: the (translated) standard name for the dimension. Return
`dimname` if the input dimension has no standard name.
See Also:
see :func:`get_matching_dimname` for the reverse operation
"""
# cell/row are reversed
matching = {
'mes':'time'
# 'time': 'time',
# 'lon':'x',
# 'lat':'y',
# 'idlat':'y',
# 'idlon':'x'
}
if geodimname in matching:
return matching[geodimname]
else:
return None
def get_matching_dimname(self, geodimname):
"""Return the equivalent name in the native format for a standard
dimension.
This is a translation of the standard names to native ones. It is used
for internal purpose only and should not be called directly.
The standard dimension names are:
* x, y, time for :class:`~cerbere.datamodel.grid.Grid`
* row, cell, time for :class:`~cerbere.datamodel.swath.Swath` or
:class:`~cerbere.datamodel.image.Image`
To be derived when creating an inherited data mapper class. This is
mandatory for geolocation dimensions which must be standard.
Args:
dimname (str): standard dimension name.
Returns:
str: return the native name for the dimension. Return `dimname` if
the input dimension has no standard name.
See Also:
see :func:`get_standard_dimname` for the reverse operation
"""
dims = self._handler.dimensions.keys()
dim_matching = {"time":'mes'}
# if 'idlon' in dims:
# dim_matching = {
# 'time': 'time',
# 'x': 'idlon',
# 'y': 'idlat',
# }
# else:
# dim_matching = {
# 'time': 'time',
# 'x': 'lon',
# 'y': 'lat',
# }
return dim_matching[geodimname]
\ No newline at end of file
from cerbere.mapper.ncfile import NCFile
from cerbere.datamodel.field import Field
from cerbere.datamodel.variable import Variable
from collections import OrderedDict
import netCDF4
from .. import READ_ONLY, WRITE_NEW
import numpy as np
import sys
import logging
sys.path.append('/home/losafe/users/agrouaze/PROGRAMMES/ROUTINE_PYTHON') #a virer si on passe ce code en routinier pour cfosat
sys.path.append('/home/losafe/users/agrouaze/git/datavore-web/datavoreweb')
from Mapper_AMSR2_L2_NN_solab import MapperClass
DEFAULT_TIME_UNITS = 'seconds since 2010-01-01 00:00:00'
VIRTUALFIELD_DESCR = {'wind_speed': 'wind speed',
'wind_direction': 'wind direction (where the wind is blowing)'
}
VIRTUALFIELD_STDNAME = {'wind_speed': 'wind_speed',
'wind_direction': 'wind_to_direction'
}
VIRTUALFIELD_UNITS = {
'wind_speed': 'm s-1',
'wind_direction': 'degrees'
}
# variable = Variable(
# shortname='wind_speed',
# description='wind speed',
# authority=None,
# standardname='wind_speed'
# )
# dimensions = OrderedDict([('time', 1),('y', shapa[0]),('x', shapa[1])])
# field = Field(
# variable,
# dimensions,
# datatype=numpy.dtype(numpy.float32),
# units='m.s-1',
# values = values
# )
# all_the_fields = OrderedDict([('wind_speed',field)])
class AMSR2L2NNSOLABNCFile(NCFile):
def __init__(self, url=None, mode=READ_ONLY,ncformat='NETCDF4', **kwargs):
super(AMSR2L2NNSOLABNCFile,self).__init__(url=url,mode=mode,ncformat=ncformat,depths=True,center_on_greenwhich=False,**kwargs)
self._mapped_data = None
if self._handler is None:
self._handler = self.get_handler()
return
def get_fieldnames(self):
'''
Returns the list of geophysical fields stored for the feature
Excludes the geolocation fields in the returned list
'''
geophyvars = super(AMSR2L2NNSOLABNCFile, self).get_fieldnames()
geophyvars.extend(["wind_speed", "wind_direction"])
#geophyvars.remove('height')
return geophyvars
def read_values(self, fieldname, slices=None):
if fieldname =='wind_speed':
fieldname = 'wind_speed_hf'
if self._mapped_data:
res = self._mapped_data[fieldname][slices]
else:
mapperancil = MapperClass()
# if fieldname=='time':
# res = mapperancil.getTime()
# else:
# mapperancil.open(ancill)
# mapperancil.run()
plotmethod, lat_ancil, lons_ancil, values = mapperancil.getScalar( self._url, fieldname, cfg=None, fixmeridian=True, bbox=None, plot_conf=None)
mapped_data = {}
mapped_data['plotmethod'] = plotmethod
time_ancill = mapperancil.getTime()
shapa = mapperancil.getOriginalSwathShape()
swath_def, time_ancill, lat_ancil, lons_ancil, _ = mapperancil.getSwath('wind_speed_hf')
# print 'swath_def',swath_def
# print 'shape swath',shapa
# print 'coloc vetns forts| AMSR2',lat_ancil.shape,lons_ancil.shape,type(time_ancill),time_ancill
values = np.reshape(values,shapa,order='F')
lat_ancil = np.reshape(lat_ancil,shapa,order='F')
lons_ancil = np.reshape(lons_ancil,shapa,order='F')
time_ancill= np.reshape(time_ancill,shapa,order='F')
lat_ancil = np.array(lat_ancil)
lons_ancil = np.array(lons_ancil)
time_ancill = np.array(time_ancill)
logging.debug('val %s lon%s lat %s',values.shape,lat_ancil.shape,lons_ancil.shape)
mapped_data['swath_shape'] = shapa
mapped_data['lat'] = lat_ancil.squeeze()
mapped_data['lon'] = lons_ancil.squeeze()
logging.debug('maper amsr2 mapped_data["lon"]: %s',mapped_data["lon"].shape)
time_ancill = netCDF4.date2num(time_ancill,DEFAULT_TIME_UNITS)
mapped_data['time'] = time_ancill.squeeze()
mapped_data[fieldname] = values.squeeze()
self._mapped_data = mapped_data
res = self._mapped_data[fieldname][slices]
res = res.squeeze()
logging.debug('mapper amrs2 res: %s %s',fieldname,res.shape)
return res
def get_matching_dimname(self, geodimname):
"""
Return the equivalent name in the current data mapper for a standard
feature dimension, or None if not found.
"""
matching = {
'row': 'rows',
'cell': 'cols',
'time': 'time',
# 'vec_1': 'vec',
# 'vec_2': 'vec',
}
if geodimname in matching:
return matching[geodimname]
else:
return None
def read_field(self, fieldname):
"""
Return the field, without its values.
Actual values can be retrieved with read_values() method.
"""
if self._mapped_data:
shapa = self._mapped_data['swath_shape']
else:
self.read_values('wind_speed')
shapa = self._mapped_data['swath_shape']
if fieldname in ['wind_speed', 'wind_direction']:
# create a virtual field
variable = Variable(
shortname=fieldname,
description=VIRTUALFIELD_DESCR[fieldname],
authority=self.get_naming_authority(),
standardname=VIRTUALFIELD_STDNAME[fieldname]
)
field = Field(
variable,
OrderedDict([#('time', 1),
#('z', 1),
('y', shapa[0]),
('x', shapa[1])
]),
datatype=np.dtype(np.float32),
units=VIRTUALFIELD_UNITS[fieldname]
)
field.attach_storage(self.get_field_handler(fieldname))
elif fieldname == 'time':
field = NCFile.read_field(self, fieldname)
vava=field.variable
field = Field(
vava,
OrderedDict([
('y', shapa[0]),
('x', shapa[1])
]),
datatype=np.dtype(np.float32),
units=DEFAULT_TIME_UNITS
)
field.attach_storage(self.get_field_handler(fieldname))
else:
field = NCFile.read_field(self, fieldname)
vava=field.variable
uni=field.units
# if 'z' in field.get_dimnames():
field = Field(
vava,
OrderedDict([#('time', 1),
#('z', self.get_dimsize('z')),
('y', shapa[0]),
('x', shapa[1])
]),
datatype=np.dtype(np.float32),
units=uni
)
field.attach_storage(self.get_field_handler(fieldname))
return field
def get_bbox(self):
if self._mapped_data is None:
self.read_values('lon')
# lons =
lonmin = self._mapped_data['lon'].min()
latmin = self._mapped_data['lat'].min()
lonmax = self._mapped_data['lon'].max()
latmax = self._mapped_data['lat'].max()
return lonmin,latmin,lonmax,latmax
\ No newline at end of file
This diff is collapsed.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cerbere.mapper.cryosat2awincfile
=============================
Mapper for CryoSat-2 Altimeter files from AWI monthly composite
:copyright: Copyright 2013 Ifremer / Cersat.
:license: Released under GPL v3 license, see :ref:`license`.
.. sectionauthor::Antoine Grouazel <antoine.grouazel@ifremer.fr>
"""
from datetime import datetime
import numpy
import logging
# from .ncfile import NCFile
from .. import READ_ONLY, WRITE_NEW
from cerbere.mapper.ncfile import NCFile
import os
# from cerbere.datamodel.grid import Grid
from cerbere.datamodel.variable import Variable
from cerbere.datamodel.field import Field
from collections import OrderedDict
import netCDF4
VIRTUALFIELD_DESCR = {
'time': 'time',
}
VIRTUALFIELD_STDNAME = {
'time': 'time',
}
VIRTUALFIELD_UNITS = {
'time': 'seconds since 2000-01-01 00:00:00',
}
class Cryosat2AWINCFile(NCFile):
"""
Mapper for CryoSat-2 Altimeter files from NOAA RADS 4.0.
Overrides the NCFile mapper to take into account some specific
attributes naming
"""
def __init__(self, url=None, mode=READ_ONLY,ncformat='NETCDF3_CLASSIC', **kwargs):
super(Cryosat2AWINCFile,self).__init__(url=url,mode=mode,ncformat=ncformat,depths=False,center_on_greenwhich=False,**kwargs)
if self._handler is None:
self._handler = self.get_handler()
if self._url is None:
self._url = url
return
def get_start_time(self):
"""Returns the minimum date of the file temporal coverage"""
# handler = self.get_handler()
# attrdate = handler.getncattr('first_meas_time')
logging.debug('url = %s',self._url)
base = os.path.basename(self._url)
dt = datetime.strptime(base,'CryoSat_product_nh_%Y%m.nc')
dt = dt.replace(day=15)
return dt
def get_end_time(self):
"""Returns the maximum date of the file temporal coverage"""
return self.get_start_time()
def read_values(self, fieldname, slices=None):
if fieldname == 'time':
tmp = self.read_times(slices=slices)
else:
# print 'fieldname',fieldname
# if fieldname == "wind_speed":
# u10 = super(Cryosat2AWINCFile, self).read_values('u10m', slices)
# v10 = super(Cryosat2AWINCFile, self).read_values('v10m', slices)
# u = numpy.sqrt(u10 * u10 + v10 * v10)
# shaap=numpy.shape(u)
# u=numpy.reshape(u,(1,shaap[2],shaap[3]))
# return u
# elif fieldname == "wind_direction":
# u10 = super(Cryosat2AWINCFile, self).read_values('u10m', slices)
# v10 = super(Cryosat2AWINCFile, self).read_values('v10m', slices)
# tmp=uv2dir(u10, v10)
# shaap=numpy.shape(tmp)
# tmp=numpy.reshape(tmp,(1,shaap[2],shaap[3]))
# return tmp
# if fieldname in ['lat','lon']:
tmp = super(Cryosat2AWINCFile, self).read_values(fieldname, slices)
tmp = numpy.ma.masked_where(numpy.isnan(tmp),tmp)
# shaap=numpy.shape(tmp)
# tmp=numpy.reshape(tmp,(1,shaap[2],shaap[3]))
return tmp
# else: #case geolocation fields
# return super(Cryosat2AWINCFile, self).read_values(fieldname, slices)
def read_field(self, fieldname):
if fieldname in ['lat','lon']:
# print 'read field',fieldname
res = super(Cryosat2AWINCFile, self).read_field(fieldname)
elif fieldname in ['time']:
variable = Variable(
shortname=fieldname,
description=VIRTUALFIELD_DESCR[fieldname],
authority=self.get_naming_authority(),
standardname=VIRTUALFIELD_STDNAME[fieldname]
)
res = Field(
variable,
OrderedDict([('time', 1),
# ('y', self.get_dimsize('y')),
# ('x', self.get_dimsize('x'))
]),
datatype=numpy.dtype(numpy.float32),
units=VIRTUALFIELD_UNITS[fieldname]
)
res.attach_storage(self.get_field_handler(fieldname))
else:
res = super(Cryosat2AWINCFile, self).read_field(fieldname)
return res
def read_times(self, slices=None):
"""
Read time values of a file or file series.
"""
if self._urlseries is not None:
raise Exception("Not yet implemented")
elif self._url is not None:
times = datetime.strptime(os.path.basename(self._url).split('_')[3].replace('.nc',''),'%Y%m')
# CryoSat_product_nh_201301.nc
return times
def get_standard_dimname(self,geodimname):
"""
Returns the equivalent standard dimension name for a
dimension in the native format.
This is a translation of the native names to standard ones. It is used
for internal purpose and should not be called directly.
To be derived when creating an inherited data mapper class. This is
mandatory for geolocation dimensions which must be standard.
:arg dimname: native dimension name
:type dimname: str
:rtype: str
:return: return the (translated) standard name for the dimension.
return `dimname` if the input dimension has no standard name.
*See*
see :func:`get_matching_dimname` for the reverse operation
"""
matching = {
'time': 'time',
'xc': 'x',
'yc': 'y',
}
if geodimname in matching:
return matching[geodimname]
else:
return None
def get_matching_dimname(self, geodimname):
"""Return the equivalent name in the native format for a standard
dimension.
This is a translation of the standard names to native ones. It is used
for internal purpose only and should not be called directly.
The standard dimension names are:
* x, y, time for :class:`~cerbere.datamodel.grid.Grid`
* row, cell, time for :class:`~cerbere.datamodel.swath.Swath` or
:class:`~cerbere.datamodel.image.Image`
To be derived when creating an inherited data mapper class. This is
mandatory for geolocation dimensions which must be standard.
Args:
dimname (str): standard dimension name.
Returns:
str: return the native name for the dimension. Return `dimname` if
the input dimension has no standard name.
See Also:
see :func:`get_standard_dimname` for the reverse operation
"""
dim_matching = {
'time': 'time',
'x': 'xc',
'y': 'yc',
# 'bnds':'bnds',
}
return dim_matching[geodimname]
def get_fieldnames(self):
'''
Returns the list of geophysical fields stored for the feature
Excludes the geolocation fields in the returned list
'''
geophyvars = super(Cryosat2AWINCFile, self).get_fieldnames()
geophyvars.append('time')
return geophyvars
def get_geolocation_field(self, fieldname):
if fieldname == 'time':
res = 'time'
else:
res = super(Cryosat2AWINCFile, self).get_geolocation_field( fieldname)
return res
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cerbere.mapper.cryosat2ncfile
=============================
Mapper for CryoSat-2 Altimeter files from NOAA RADS 4.0
:copyright: Copyright 2013 Ifremer / Cersat.
:license: Released under GPL v3 license, see :ref:`license`.
.. sectionauthor:: Jeff Piolle <jfpiolle@ifremer.fr>
.. codeauthor:: Jeff Piolle <jfpiolle@ifremer.fr>
"""
from datetime import datetime
import numpy
import logging
# from .ncfile import NCFile
from .. import READ_ONLY, WRITE_NEW
from cerbere.mapper.ncfile import NCFile
import os
# from cerbere.datamodel.grid import Grid
from cerbere.datamodel.variable import Variable
from cerbere.datamodel.field import Field
from collections import OrderedDict
import netCDF4
VIRTUALFIELD_DESCR = {
'time': 'time',
}
VIRTUALFIELD_STDNAME = {
'time': 'time',
}
VIRTUALFIELD_UNITS = {
'time': 'seconds since 2000-01-01 00:00:00',
}
class Cryosat2NCFile(NCFile):
"""
Mapper for CryoSat-2 Altimeter files from NOAA RADS 4.0.
Overrides the NCFile mapper to take into account some specific
attributes naming
"""
def __init__(self, url=None, mode=READ_ONLY,ncformat='NETCDF3_CLASSIC', **kwargs):
super(Cryosat2NCFile,self).__init__(url=url,mode=mode,ncformat=ncformat,depths=False,center_on_greenwhich=False,**kwargs)
if self._handler is None:
self._handler = self.get_handler()
if self._url is None:
self._url = url
return
def get_start_time(self):
"""Returns the minimum date of the file temporal coverage"""
# handler = self.get_handler()
# attrdate = handler.getncattr('first_meas_time')
logging.debug('url = %s',self._url)
base = os.path.basename(self._url)
dt = datetime.strptime(base,'CryoSat_product_nh_%Y%m.nc')
dt = dt.replace(day=15)
return dt
def get_end_time(self):
"""Returns the maximum date of the file temporal coverage"""
return self.get_start_time()
def read_values(self, fieldname, slices=None):
if fieldname == 'time':
tmp = self.read_times(slices=slices)
else:
print 'fieldname',fieldname
# if fieldname == "wind_speed":
# u10 = super(Cryosat2NCFile, self).read_values('u10m', slices)
# v10 = super(Cryosat2NCFile, self).read_values('v10m', slices)
# u = numpy.sqrt(u10 * u10 + v10 * v10)
# shaap=numpy.shape(u)
# u=numpy.reshape(u,(1,shaap[2],shaap[3]))
# return u
# elif fieldname == "wind_direction":
# u10 = super(Cryosat2NCFile, self).read_values('u10m', slices)
# v10 = super(Cryosat2NCFile, self).read_values('v10m', slices)
# tmp=uv2dir(u10, v10)
# shaap=numpy.shape(tmp)
# tmp=numpy.reshape(tmp,(1,shaap[2],shaap[3]))
# return tmp
# if fieldname in ['lat','lon']:
tmp = super(Cryosat2NCFile, self).read_values(fieldname, slices)
tmp = numpy.ma.masked_where(numpy.isnan(tmp),tmp)
# shaap=numpy.shape(tmp)
# tmp=numpy.reshape(tmp,(1,shaap[2],shaap[3]))
return tmp
# else: #case geolocation fields
# return super(Cryosat2NCFile, self).read_values(fieldname, slices)
def read_field(self, fieldname):
if fieldname in ['lat','lon']:
print 'read field',fieldname
res = super(Cryosat2NCFile, self).read_field(fieldname)
elif fieldname in ['time']:
variable = Variable(
shortname=fieldname,
description=VIRTUALFIELD_DESCR[fieldname],
authority=self.get_naming_authority(),
standardname=VIRTUALFIELD_STDNAME[fieldname]
)
res = Field(
variable,
OrderedDict([('time', 1),
# ('y', self.get_dimsize('y')),
# ('x', self.get_dimsize('x'))
]),
datatype=numpy.dtype(numpy.float32),
units=VIRTUALFIELD_UNITS[fieldname]
)
res.attach_storage(self.get_field_handler(fieldname))
else:
res = super(Cryosat2NCFile, self).read_field(fieldname)
return res
def read_times(self, slices=None):
"""
Read time values of a file or file series.
"""
if self._urlseries is not None:
raise Exception("Not yet implemented")
elif self._url is not None:
times = datetime.strptime(os.path.basename(self._url).split('_')[3].replace('.nc',''),'%Y%m')
# CryoSat_product_nh_201301.nc
return times
def get_standard_dimname(self,geodimname):
"""
Returns the equivalent standard dimension name for a
dimension in the native format.
This is a translation of the native names to standard ones. It is used
for internal purpose and should not be called directly.
To be derived when creating an inherited data mapper class. This is
mandatory for geolocation dimensions which must be standard.
:arg dimname: native dimension name
:type dimname: str
:rtype: str
:return: return the (translated) standard name for the dimension.
return `dimname` if the input dimension has no standard name.
*See*
see :func:`get_matching_dimname` for the reverse operation
"""
matching = {
'time': 'time',
'xc': 'x',
'yc': 'y',
}
if geodimname in matching:
return matching[geodimname]
else:
return None
def get_matching_dimname(self, geodimname):
"""Return the equivalent name in the native format for a standard
dimension.
This is a translation of the standard names to native ones. It is used
for internal purpose only and should not be called directly.
The standard dimension names are:
* x, y, time for :class:`~cerbere.datamodel.grid.Grid`
* row, cell, time for :class:`~cerbere.datamodel.swath.Swath` or
:class:`~cerbere.datamodel.image.Image`
To be derived when creating an inherited data mapper class. This is
mandatory for geolocation dimensions which must be standard.
Args:
dimname (str): standard dimension name.
Returns:
str: return the native name for the dimension. Return `dimname` if
the input dimension has no standard name.
See Also:
see :func:`get_standard_dimname` for the reverse operation
"""
dim_matching = {
'time': 'time',
'x': 'xc',
'y': 'yc',
# 'bnds':'bnds',
}
return dim_matching[geodimname]
def get_fieldnames(self):
'''
Returns the list of geophysical fields stored for the feature
Excludes the geolocation fields in the returned list
'''
geophyvars = super(Cryosat2NCFile, self).get_fieldnames()
geophyvars.append('time')
return geophyvars
\ No newline at end of file
"""
mapper GPCP v22 PML composit monthly grid
Antoine Grouazel
"""
import netCDF4
import numpy
import ast
from datetime import datetime
from collections import OrderedDict
#from .ncfile import NCFile
from .. import READ_ONLY, WRITE_NEW
from cerbere.mapper.ncfile import NCFile
from cerbere.datamodel.grid import Grid
from ..datamodel.field import Field
from ..datamodel.variable import Variable
class GPCPNCFile(NCFile):
def __init__(self, url=None, mode=READ_ONLY,ncformat='NETCDF3_CLASSIC', **kwargs):
super(GPCPNCFile,self).__init__(url=url,mode=mode,ncformat=ncformat,depths=True,center_on_greenwhich=False,**kwargs)
if self._handler is None:
self._handler = self.get_handler()
return
def read_values(self, fieldname, slices=None):
if fieldname in ['Precip','Error']:
res =super(GPCPNCFile, self).read_values(fieldname, slices).squeeze().T
else: #case geolocation fields
res = super(GPCPNCFile, self).read_values(fieldname, slices)
return res
def read_field(self, fieldname):
"""
Return the field, without its values.
Actual values can be retrieved with read_values() method.
"""
if fieldname in ['Precip','Error']:
# create a virtual field
tmp = super(GPCPNCFile, self).read_field(fieldname)
didi = OrderedDict([('x', self.get_dimsize('x')),('y', self.get_dimsize('y'))
])
res = Field(variable=tmp.variable,dimensions=didi,datatype=numpy.dtype(numpy.float32),units = tmp.units)
res.attach_storage(self.get_field_handler(fieldname))
# res.set_dimensions = OrderedDict(
# [
# # ('time', 1),
# ('y', self.get_dimsize('y')),
# ('x', self.get_dimsize('x'))
# ]),
print res
# variable = Variable(
# shortname=fieldname,
# description=VIRTUALFIELD_DESCR[fieldname],
# authority=self.get_naming_authority(),
# standardname=VIRTUALFIELD_STDNAME[fieldname]
# )
# field = Field(
# variable,
# OrderedDict([('time', 1),
# #('z', 1),
# ('y', self.get_dimsize('y')),
# ('x', self.get_dimsize('x'))
# ]),
# datatype=numpy.dtype(numpy.float32),
# units=VIRTUALFIELD_UNITS[fieldname]
# )
# field.attach_storage(self.get_field_handler(fieldname))
else:
res = super(GPCPNCFile, self).read_field(fieldname)
return res
\ No newline at end of file
import netCDF4
import numpy
from datetime import datetime
from collections import OrderedDict
from .. import READ_ONLY, WRITE_NEW
from cerbere.mapper.ncfile import NCFile
from cerbere.datamodel.grid import Grid
from ..datamodel.field import Field
from ..datamodel.variable import Variable
VIRTUALFIELD_DESCR = {'sea_surface_temperature': 'sea surface temperature',
# 'wind_direction': 'wind direction (where the wind is blowing)'
}
VIRTUALFIELD_STDNAME = {'sea_surface_temperature': 'sst',
# 'wind_direction': 'wind_to_direction'
}
VIRTUALFIELD_UNITS = {
'sea_surface_temperature': 'kelvin',
# 'wind_direction': 'degrees'
}
class REYNOLDSAVHRRDailyNCFile(NCFile):
def __init__(self, url=None, mode=READ_ONLY,ncformat='NETCDF4', **kwargs):
super(REYNOLDSAVHRRDailyNCFile,self).__init__(url=url,mode=mode,ncformat=ncformat,center_on_greenwhich=True,**kwargs)
if self._handler is None:
self._handler = self.get_handler()
return
def open(self,
view=None,
datamodel=None,
datamodel_geolocation_dims=None):
handler = super(REYNOLDSAVHRRDailyNCFile,self).open(view=view,datamodel='Grid',datamodel_geolocation_dims=None)
return handler
def get_fieldnames(self):
'''
Returns the list of geophysical fields stored for the feature
Excludes the geolocation fields in the returned list
'''
geophyvars = super(REYNOLDSAVHRRDailyNCFile, self).get_fieldnames()
geophyvars.extend(["sea_surface_temperature"])
#geophyvars.remove('height')
return geophyvars
def read_values(self, fieldname, slices=None):
if fieldname == "sea_surface_temperature":
values = super(REYNOLDSAVHRRDailyNCFile, self).read_values('sst', slices)
values = values + 273.15
else:
values = super(REYNOLDSAVHRRDailyNCFile, self).read_values(fieldname, slices)
values = values.squeeze()
return values
def read_field(self, fieldname):
"""
Return the field, without its values.
Actual values can be retrieved with read_values() method.
"""
if fieldname in ['sea_surface_temperature']:
# create a virtual field
variable = Variable(
shortname=fieldname,
description=VIRTUALFIELD_DESCR[fieldname],
authority=self.get_naming_authority(),
standardname=VIRTUALFIELD_STDNAME[fieldname]
)
field = Field(
variable,
OrderedDict([('time', 1),
#('z', 1),
('y', self.get_dimsize('y')),
('x', self.get_dimsize('x'))
]),
datatype=numpy.dtype(numpy.float32),
units=VIRTUALFIELD_UNITS[fieldname]
)
field.attach_storage(self.get_field_handler(fieldname))
else:
field = NCFile.read_field(self, fieldname)
vava=field.variable
uni=field.units
field = Field(
vava,
OrderedDict([('time', 1),
#('z', self.get_dimsize('z')),
('y', self.get_dimsize('y')),
('x', self.get_dimsize('x'))
]),
datatype=numpy.dtype(numpy.float32),
units=uni
)
field.attach_storage(self.get_field_handler(fieldname))
return field
......@@ -183,7 +183,7 @@ class SAFEGeoTiffFile(AbstractMapper):
'digital_number', 'gamma0_lut', 'beta0_lut', 'sigma0_lut',
'gamma0', 'beta0', 'sigma0', 'complex', 'azimuth_time',
'slant_range_time', 'doppler_centroid','sigma0_corrected','noise',
'gamma0_corrected']
'gamma0_corrected','noise_linear_equivalent']
def read_field_attributes(self, fieldname):
"""
......@@ -543,16 +543,44 @@ class SAFEGeoTiffFile(AbstractMapper):
kx=1, ky=1)
elif fieldname == 'noise':
ipf_version = self.get_IPF_version()
logging.debug('version IPF is = %s',ipf_version)
if ipf_version =='002.91' and False: #thre was a bug with noise vectors, Nuno suggested to use 36.8dB noise value instead
logging.debug('version IPF is = %s %s %s',ipf_version,len(ipf_version),ipf_version =='002.91')
if '002.9' in ipf_version: #thre was a bug with noise vectors, Nuno suggested to use 36.8dB noise value instead (see https://jira-projects.cls.fr/browse/MPCS-1978 )
logging.debug('version IPF is special = %s',ipf_version)
# logging.info('%s',self._url)
if 'wv1' in self._url:
logging.debug('wv1')
cst = 10**(36.3/10) #this is a guess from Huimin, we are not sure this is the proper way to turn into linear the cst givne by Nuno (august 2019)
cst = 10**(37.9/10)
else:
cst = 10**(39.1/10) #wv2
logging.debug('wv2')
sat = os.path.basename(self._url)[0:3].upper()
start_date = datetime.strptime(os.path.basename(self._url).split('-')[4],'%Y%m%dt%H%M%S')
if sat=='S1A':
if start_date>=datetime(2019,2,28) and start_date<=datetime(2019,3,12):
cst = 10**(37./10) #wv2 optimised
else:
# cst = 10**(39.1/10) #wv2
cst = 10**(36.8/10) #wv2
elif sat=='S1B':
if start_date>=datetime(2019,5,14) and start_date<=datetime(2019,5,28):
cst = 10**(37./10) #wv2 optimised
else:
cst = 10**(36.8/10) #wv2
else:
raise Exception('satellite not yet taken into account for calibration')
luts = self._attributes['noise']
xval, yval = luts['line'], luts['pixel']
logging.debug('cst value noise %s',cst)
values = np.ones((xval.size,yval.size))*cst
#sigma0 = self.read_values('sigma0')
#values = np.ones(sigma0.shape)*cst #correction Janvier 2020 suite conversation P. Vincent
val = luts[fieldname]
logging.debug('lut val noise before interp = %s',val.mean())
values = self._interpolate_values(xval, yval, val, slices,
intmethod='RectBivariateSpline',
kx=1, ky=1)/cst #il suffit de multiplier par la cst. #feb 2020 je tente division pcq multiplier c trop fort
else:
luts = self._attributes['noise']
xval, yval = luts['line'], luts['pixel']
......@@ -563,6 +591,10 @@ class SAFEGeoTiffFile(AbstractMapper):
values = self._interpolate_values(xval, yval, val, slices,
intmethod='RectBivariateSpline',
kx=1, ky=1)
elif fieldname == 'noise_linear_equivalent': #added by agrouaze to be able to compare it to the sigma0
noise = self.read_values('noise', slices, blocksize)
lut = self.read_values('sigma0_lut', slices=slices)
values = noise.astype('float32') / lut**2 #january 2020, return values like 0.001972573 equi to -27dB
elif fieldname == 'gamma0':
dnum = self.read_values('digital_number', slices=slices)
lut = self.read_values('gamma0_lut', slices=slices)
......
"""
author: Antoine Grouazel
purpose: mapper fo TDS-1 reflectometry L1b data (receiver+tracks)
date: 08/11/2016
not finished yet
"""
import numpy
from datetime import datetime
from collections import OrderedDict
import netCDF4
#from .ncfile import NCFile
from .. import READ_ONLY, WRITE_NEW
from cerbere.datamodel.grid import Grid
from cerbere.mapper.abstractmapper import AbstractMapper
from ..datamodel.field import Field
from ..datamodel.variable import Variable
import os
import logging
import sys
# sys.path.append('~agrouaze/workspace_leste/Regalo_gnss/')
sys.path.append('/home3/homedir7/perso/agrouaze/workspace_leste/Regalo_gnss/')
from agrouaze_reader_tds1 import TDS1_track_L1b,TDS1_get_handler,TDS1_map_data,DEFAULT_TIME_UNITS
VIRTUALFIELD_DESCR = {'azimuth': 'azimuth angle of the sensor',
"AttitudeUncertainty":"AttitudeUncertainty",
'lon':'longitudes -180 +180',
'lat':'latitudes -90 +90',
'time':'dates of the data',
'LandType':'Type of surface: Waterbodies=0 other=1 '
# 'wind_direction': 'wind direction (where the wind is blowing)'
}
VIRTUALFIELD_STDNAME = {'azimuth': 'azimuth',
"AttitudeUncertainty":"AttitudeUncertainty",
'lon':'lon',
'lat':'lat',
'time':'time',
'LandType':'landtype'
# 'wind_direction': 'wind_to_direction'
}
VIRTUALFIELD_UNITS = {
'azimuth': 'degree',
'lon': 'degree',
'lat': 'degree',
'time': DEFAULT_TIME_UNITS,
'AttitudeUncertainty':'uncertainty_unit',
'LandType':'no unit'
# 'wind_direction': 'degrees'
}
DEFAULT_UNIT = 'unkown_unit'
class TDS1L1bTIFFFile(AbstractMapper):
def __init__(self, url=None, mode=READ_ONLY, **kwargs):
pathToTDS = os.path.dirname(os.path.dirname(os.path.dirname(url)))
collection = os.path.basename(os.path.dirname(url))
track_number = os.path.basename(url).strip('.xml').strip('TD')
logging.debug('pathToTDS %s',pathToTDS)
logging.debug('collection %s',collection)
logging.debug('track_number %s',track_number)
self._handler = None
self._url = url
self._mapped_data = None
if self._handler is None:
self._handler = TDS1_get_handler(pathToTDS,collection,track_number)
self.read_values('lon')
return
# def open(self, view=None, datamodel=None, datamodel_geolocation_dims=None):
def close(self):
self._hanlder = None
def create_dim(self, dimname, size=None):
raise NotImplementedError
def create_field(self):
raise NotImplementedError
def get_bbox(self):
lons = self.read_values('lon')
lats = self.read_values('lat')
lonmin = lons.min()
latmin = lats.min()
lonmax = lons.max()
latmax = lats.max()
return lonmin,latmin,lonmax,latmax
def get_dimensions(self):
res = OrderedDict([('time',len(self.read_values('lon'))),
('capteur_referentiel_dim',self.get_dimsize('capteur_referentiel_dim')),
('temperatureSensor',self.get_dimsize('temperatureSensor')),
('range_footprint', self.get_dimsize('range_footprint')),
('azimuth_footprint', self.get_dimsize('azimuth_footprint'))
])
# for dim in ['time','capteur_referentiel_dim','temperatureSensor']:
# val = self.get_dimsize(dim)
# return OrderedDict([('time',len(self.read_values('lon')))])
return res
def get_end_time(self):
times = self.read_values('time')
res = netCDF4.num2date(times[-1],DEFAULT_TIME_UNITS)
return res
def get_start_time(self):
times = self.read_values('time')
res = netCDF4.num2date(times[0],DEFAULT_TIME_UNITS)
return res
def read_field_attributes(self):
raise NotImplemented
def read_fillvalue(self):
raise NotImplemented
def read_global_attribute(self,name):
if name == 'title':
res = 'TDS1-L1b'
else:
res = name
return name
# raise NotImplemented
def read_global_attributes(self):
return ['title']
def write_field(self):
raise NotImplemented
def write_global_attributes(self):
raise NotImplemented
def get_standard_dimname(self):
return super(self).get_standard_dimname()
def get_geolocation_field(self,fieldname):
return self.read_values(fieldname)
def get_matching_dimname(self,dimname):
return dimname
def open(self,view=None,
datamodel=None,
datamodel_geolocation_dims=None):
return self._handler
def get_fieldnames(self):
'''
Returns the list of geophysical fields stored for the feature
Excludes the geolocation fields in the returned list
'''
# geophyvars = super(TDS1L1bTIFFFile, self).get_fieldnames()
geophyvars = []
for tt in self._mapped_data:
if tt not in ['lon','lat','time']:
geophyvars.append(tt)
# geophyvars = ['azimuth']
# geophyvars.extend(["azimuth"])
#geophyvars.remove('height')
return geophyvars
def get_dimsize(self,dimension_name):
if dimension_name == 'time':
res = len(self.read_values('lon'))
elif dimension_name == 'capteur_referentiel_dim':
res = 3
elif dimension_name == 'temperatureSensor':
res = 4
elif dimension_name == 'range_footprint':
res = 128
elif dimension_name == 'azimuth_footprint':
res = 20
return res
def read_values(self, fieldname, slices=None):
if self._mapped_data:
mapped_data = self._mapped_data
else:
logging.debug('go for reading %s',fieldname)
mapped_data = TDS1_map_data(self._handler)
# if mapped_data[fieldname].dtype == 'O':
# logging.debug('chnge dtype')
# mapped_data[fieldname] = mapped_data[fieldname].astype('float64')
# if fieldname == 'time':
# mapped_data[fieldname] = netCDF4.date2num(mapped_data[fieldname],DEFAULT_TIME_UNITS)
self._mapped_data = mapped_data
if fieldname in self._mapped_data:
pass
else:
raise Exception('Field %s no existing',fieldname)
return mapped_data[fieldname]
def read_field(self, fieldname):
"""
Return the field, without its values.
Actual values can be retrieved with read_values() method.
"""
if fieldname in VIRTUALFIELD_DESCR:
desc = VIRTUALFIELD_DESCR[fieldname]
stdname = VIRTUALFIELD_STDNAME[fieldname]
unit = VIRTUALFIELD_UNITS[fieldname]
else:
desc = fieldname
stdname = fieldname
unit = DEFAULT_UNIT
# if fieldname in ['azimuth','lon','lat','time']:
# create a virtual field
variable = Variable(
shortname=fieldname,
description=desc,
authority=self.get_naming_authority(),
standardname=stdname
)
if fieldname in ['AttitudeUncertainty','SPx','Rv','Tx','Tv','Rx','Attitude']:
dims = OrderedDict([('time', self.get_dimsize('time')),
('capteur_referentiel_dim', self.get_dimsize('capteur_referentiel_dim'))])
elif fieldname in ['InternalTemperatures']:
dims = OrderedDict([('time', self.get_dimsize('time')),
('temperatureSensor', self.get_dimsize('temperatureSensor'))])
# elif fieldname in ['LandType']:
elif fieldname in ['DDM']:
dims = OrderedDict([('time', self.get_dimsize('time')),
('range_footprint', self.get_dimsize('range_footprint')),
('azimuth_footprint', self.get_dimsize('azimuth_footprint')),
])
else:
dims = OrderedDict([('time', self.get_dimsize('time'))])
field = Field(
variable,
dims,
datatype=numpy.dtype(numpy.float32),
units=unit
)
field.attach_storage(self.get_field_handler(fieldname))
# else:
# field = OrderedDict([])
# field = NCFile.read_field(self, fieldname)
# vava=field.variable
# uni=field.units
# if 'z' in field.get_dimnames():
# field = Field(
# vava,
# OrderedDict([('time', 1),
# #('z', self.get_dimsize('z')),
# ('y', self.get_dimsize('y')),
# ('x', self.get_dimsize('x'))
# ]),
# datatype=numpy.dtype(numpy.float32),
# units=uni
# )
# field.attach_storage(self.get_field_handler(fieldname))
return field
\ No newline at end of file
from cerbere.mapper.amsr2l2nnsolabncfile import AMSR2L2NNSOLABNCFile
from matplotlib import pyplot as plt
import logging
from cerbere.datamodel.swath import Swath
from mpl_toolkits.basemap import Basemap
import numpy as np
logging.basicConfig(level=logging.DEBUG)
ff ='/home/cercache/project/smosstorm/data/smosstorm/l2/amsr2/2016/07/25/SOLAB_AMSR2_L2_NN_20160725_130201_20160725_135131_067_A_v1.nc'
print ff
nc = AMSR2L2NNSOLABNCFile(ff)
print nc.get_fieldnames()
va = nc.read_values('wind_speed')
print 'values',va.shape,va
time_field = nc.read_field('time')
print time_field
print 'times',time_field.get_values()
print 'bbox,',nc.get_bbox()
lons = nc.read_values('lon')
print 'lons',lons,lons.shape
lats = nc.read_values('lat')
plot = False
if plot:
plt.pcolor(va.squeeze())
plt.show()
mW=-179
mE=179
mN=70
mS=-60
plt.hold('true')
plt.title('')
m = Basemap(projection='merc',llcrnrlat=mS,urcrnrlat=mN,llcrnrlon=mW, urcrnrlon=mE,resolution='c')
m.drawmapboundary(fill_color='#85A6D9')
m.drawcoastlines(color='#6D5F47', linewidth=.4)
m.fillcontinents(color='grey',lake_color='#85A6D9')
# m.drawcountries(color='r')
#m.shadedrelief()
meridians = np.arange(mW,mE,np.around((mE-mW)/10.0))
m.drawmeridians(meridians,labels=[0,0,0,1],fontsize=10,fmt='%i')
parallels = np.arange(mS,mN,np.around((mN-mS)/10.0))
m.drawparallels(parallels,labels=[1,0,0,0],fontsize=10,fmt='%i')
markersize = 3
x,y = m(lons.ravel(),lats.ravel())
m.scatter(x,y,s=7,c=va.squeeze(),lw=0)
plt.colorbar()
plt.show()
mys = Swath()
mys.load(nc)
print mys.get_lon()
print mys.get_field('wind_speed')
\ No newline at end of file
from cerbere.mapper.aquariusl3podaacncfilev5 import AquariusL3PODAACNCFileV5
from cerbere.datamodel.grid import Grid
from matplotlib import pyplot as plt
ff ='/home/datawork-cersat-public/archive/provider/podaac/satellite/l3/aquarius/v5/sci/monthly/2012/Q20121832012213.L3m_MO_SCI_V5.0_SSS_1deg'
nc = AquariusL3PODAACNCFileV5(ff)
dims = nc.get_dimensions('SSS')
print dims
fields = nc.get_fieldnames()
SSS = nc.read_values('SSS')
sssfield = nc.read_field('SSS')
print sssfield
lons = nc.read_values('lon')
lats = nc.read_values('lat')
time = nc.read_values('time')
timefield = nc.read_field('time')
magrid = Grid(longitudes=lons,latitudes=lats,times=timefield)
print magrid
print sssfield
print SSS.shape
for dd in nc.get_dimensions():
print dd
# print nc.get_full_dimensions(dd)
print nc.get_dimsize(dd)
plt.figure()
plt.pcolor(SSS)
plt.colorbar()
output = '/tmp/sss.png'
plt.savefig(output)
print output
\ No newline at end of file
"""
cryosat 2 alti meters AWI composite
"""
# from cerbere.mapper.cryosat2ncfile import Cryosat2NCFile
import matplotlib
# matplotlib.use('Agg')
from cerbere.mapper.cryosat2awincfile import Cryosat2AWINCFile
import cerbere
print cerbere.__file__
from cerbere.datamodel.image import Image
from cerbere.datamodel.grid import Grid
from matplotlib import pyplot as plt
import numpy
import logging
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
ff = '/home3/amzer/Fanny/DATA/SEAICETHICK/DATA_CRYOSAT_2011-2013/2013/CryoSat_product_nh_201301.nc'
nc = Cryosat2AWINCFile(url=ff)
lons = nc.read_values('lon')
fifi=nc.read_field('lon')
print 'field lon',fifi
print nc.get_fieldnames()
# lons = numpy.ma.masked_where(numpy.isnan(lons),lons)
print lons
ice_concentration = nc.read_values('ice_concentration')
print 'lonigtudes test shape'
print 'shape lon',lons.shape
print
print 'times',nc.read_times()
print 'times field',nc.read_field('time')
for hh in ['lat','time','lon']:
print hh,nc.get_geolocation_field(hh)
# plt.pcolor(lons)
# plt.show()
# momo = Image()
momo = Grid()
momo.load(nc)
ice = momo.get_field('ice_concentration').get_values()
# plt.pcolor(ice_concentration)
print nc.get_start_time()
nc.close()
plt.pcolor(ice)
plt.colorbar()
plt.show()
\ No newline at end of file
from matplotlib import pyplot as plt
from cerbere.mapper.ecmwf0125ncfile import ECMWF0125NCFile
from cerbere.datamodel.grid import Grid
import logging
logging.basicConfig(level=logging.DEBUG)
ff = '/home/cerdata/provider/ecmwf/model/forecast_0125/netcdf/2016/244/METEOFRANCE_CEP_20160831T03Z.nc'
Grid
nc = ECMWF0125NCFile(ff)
mygrid = Grid()
mygrid.load(nc)
print mygrid
titi = mygrid.get_fieldnames()
print type(titi),titi
if False:
ws = nc.read_values('wind_speed').squeeze()
print 'wv',ws.shape
# nc.close()
plt.pcolor(ws)
plt.colorbar()
plt.show()
msl = mygrid.get_values('wind_direction')
print 'wind_direction',msl.shape
plt.pcolor(msl)
plt.colorbar()
plt.show()
from matplotlib import pyplot as plt
from cerbere.mapper.ecmwf05ncfile import ECMWF05NCFile
from cerbere.datamodel.grid import Grid
ff = '/home/cercache/users/mirror/home2/bagan/private/data/gridded/ecmwf_analysis_05/netcdf/2016/021/201601210000.nc'
Grid
nc = ECMWF05NCFile(ff)
ws = nc.read_values('wind_speed')
print 'wv',ws.shape
# nc.close()
plt.pcolor(ws)
plt.colorbar()
plt.show()
mygrid = Grid()
mygrid.load(nc)
msl = mygrid.get_values('msl')
print 'msl',msl.shape
plt.pcolor(msl)
plt.colorbar()
plt.show()
titi = mygrid.get_fieldnames()
print type(titi),titi
"""
author: Antoine Grouazel
"""
from cerbere.mapper.ncfile import NCFile
from cerbere.mapper.gpcpv22ncfile import GPCPNCFile
from cerbere.datamodel.grid import Grid
ff = '/home/cerdata/provider/pml/blended/l3/gpcp_v2.2/data/netcdf/2013/gpcp_v22.201302.nc'
nc= GPCPNCFile(ff)
print nc.get_fieldnames()
for ll in ['lon',"lat"]:
print ll,nc.read_values(ll)[0],nc.read_values(ll)[-1],nc.read_values(ll).shape
tmp =nc.read_values('Precip')
print tmp.mean(),tmp.shape
gg = Grid()
gg.load(nc)
print gg
\ No newline at end of file
'''
test reading KNMI bufr reformated into netcdf (DCF) METOP ASCAT nc file
'''
#setenv PYTHONPATH ~/git/cerbere/:/opt/lib/python2.6/site-packages/
#setenv BUFR_TABLES /opt/lib/emos/bufrtables/
import time
import logging
from cerbere.mapper.knmil2ifrncfile import KNMIL2IFRNCFile
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
start = time.time()
#from cerbereutils.plot.mapping import Map
f = '/home/cercache/users/mirror/home/messat/exploit/Data/Swath/ASCATB_KNMI/NetCDF/12.5_coastal/2017/06/11/24546.nc'
mapper_handler = KNMIL2IFRNCFile(url=f)
mapper_handler.open()
print "\nFIELD NAMES"
fieldnames = mapper_handler.get_fieldnames()
for fieldname in fieldnames:
print 'var:',fieldname
print "\nATTRIBUTES"
attrs = mapper_handler.read_global_attributes()
# for att in attrs:
# print att, bufrfile.read_global_attribute(att)
print "\nFIELDS"
# for fieldname in fieldnames:
# print bufrfile.read_field(fieldname)
print "\nSUBSAMPLING"
#print bufrfile.read_values('lat', slices=(slice(0,10), slice(0,10)))
#print bufrfile.read_values('time', slices=(slice(0,10), slice(0,10)))
print "\nTIME COVERAGE"
print mapper_handler.get_start_time()
print mapper_handler.get_end_time()
#load lat field from the mapper:
latfield0 = mapper_handler.get_geolocation_field('lat')
print "latfield0", latfield0
latfield = mapper_handler.read_field('lat')
print latfield
# LOAD THROUGH MODEL
print "\n\nLOAD SWATH MODEL"
from cerbere.datamodel.swath import Swath
swath = Swath()
swath.load(mapper_handler,checkstorage=False)
data = swath.get_lat()
print "#####", data.min(), data.max(), data.count(), data
data = swath.get_lon()
print "#####", data.min(), data.max(), data.count(), data
print swath.get_bbox()
print 'lecture time elapsed',time.time()-start
special_plot = True
if special_plot:
import numpy
import pdb
import matplotlib.pyplot as plt
# from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.basemap import Basemap
limS=-89
limN=89
limW=-179
limE=179
# projection = Basemap(projection='ortho',lon_0=-110,lat_0=-30,resolution='l')
# projection = Basemap(projection='merc',llcrnrlat=limS,urcrnrlat=limN,llcrnrlon=limW, urcrnrlon=limE,resolution='i')
projection = Basemap(projection='npstere',boundinglat=10,lon_0=270,resolution='l')
projection.drawcoastlines(linewidth=0.25)
# projection.drawcountries(linewidth=0.25)
# projection.drawmapboundary(fill_color='aqua')
# projection.fillcontinents(color='yellow',lake_color='aqua')
# fig = plt.figure()
# pdb.set_trace()
lon0 = swath.get_lon()
lat0 = swath.get_lat()
lon = numpy.ma.ravel(lon0)