...
 
Commits (21)
......@@ -53,7 +53,7 @@ REGISTER = 'cat {} | felyx-metric-index --stdin -g 5000'
# command to extract the locations of S-3 matchups with in situ
# used for multi-colocation with third-party products
MATCHUP_LOCATION = 'extract_matchup_locations -i "{}" --mdb {} -o {}'
MATCHUP_LOCATION = 'extract_matchup_locations -i "{}" --mdb {} -o {} '
# command for matchups assembling and collation
ASSEMBLE = (
......@@ -112,9 +112,12 @@ def manifest(f, dataset, root, shift=0, check=False):
"""
manifest_output = config.get('felyx', 'manifest_output')
patternroot = '/' + os.path.join(*[
_ for _ in root.split('/') if '%' not in _]
)
manif = "{}.manifest".format(
f.replace(
root.rstrip('/'),
patternroot.rstrip('/'),
os.path.join(manifest_output, dataset)
)
)
......@@ -287,9 +290,10 @@ def make_miniprod_and_metrics(
progress_report['input_files'] = input_files
for dataset in datasets:
for d in history_dates:
full_path = os.path.join(datasets[dataset],
d.strftime("%Y/%j"),
"*")
full_path = os.path.join(
d.strftime(datasets[dataset]),
"*"
)
logger.debug("Search pattern for {}: {}"
.format(dataset, full_path))
files = glob.glob(full_path)
......@@ -736,7 +740,7 @@ def ingest_trajectories2(config, csv_traj, cleaning=True):
return 0
def assemble(platform_type, start, end, mdb_prefix, cfgfile):
def assemble(platform_type, start, end, mdb_prefix, config, cfgfile):
"""build the command for match-up assembling"""
reference = config.get('processing', 'reference')
datasets = [_ for _, val in config.items('data')]
......@@ -1135,6 +1139,7 @@ if __name__ == "__main__":
if options.ancillary or options.all:
# reference dataset (against all other datasets are matched)
reference = config.get('processing', 'reference')
prefix = config.get('processing', 'site_prefix')
location_output = config.get('felyx', 'matchups_location_output')
# extract and register first the match-up locations (temporary)
......@@ -1162,6 +1167,10 @@ if __name__ == "__main__":
"{}_{}".format(mdb_prefix, platform_type),
loc_file
)
command += ' --prefix {}'.format(mdb_prefix)
if prefix is not None:
command += ' --replaced-prefix {}'.format(prefix)
logger.debug(command)
res = os.system(command)
if res != 0 or not os.path.exists(loc_file):
......@@ -1212,7 +1221,7 @@ if __name__ == "__main__":
if deltaT == 1:
logger.info("...assemble the for day {}"
.format(d))
assemble(platform_type, start, end, mdb_prefix, options.config)
assemble(platform_type, start, end, mdb_prefix, config, options.config)
else:
curend = start + datetime.timedelta(days=deltaT)
while start < end:
......@@ -1221,7 +1230,7 @@ if __name__ == "__main__":
start.strftime("%H:%M"),
curend.strftime("%H:%M")
))
assemble(platform_type, start, curend, mdb_prefix)
assemble(platform_type, start, curend, mdb_prefix, config, options.config)
start = curend
curend = start + datetime.timedelta(days=deltaT)
......
......@@ -23,7 +23,7 @@ import dateutil.parser
from felyx_work.utils.matchup import build
LOGGER = logging.getLogger(__name__)
logger = logging.getLogger(__name__)
def read_configuration(cfgfile):
......@@ -164,18 +164,18 @@ def felyx_assemble_matchups(cli_args=None):
# Set up verbosity option.
if arguments.verbose:
LOGGER.setLevel(logging.DEBUG)
logger.setLevel(logging.DEBUG)
elif arguments.quiet:
LOGGER.setLevel(logging.WARNING)
logger.setLevel(logging.WARNING)
elif arguments.silent:
LOGGER.setLevel(logging.FATAL)
logger.setLevel(logging.FATAL)
else:
LOGGER.setLevel(logging.INFO)
logger.setLevel(logging.INFO)
# set log file
if arguments.log is not None:
hdlr = logging.FileHandler(arguments.log)
LOGGER.addHandler(hdlr)
logger.addHandler(hdlr)
# time selection
try:
......
......@@ -350,8 +350,8 @@ def process_locally(inputs, args, miniprod_dir, static, dynamic,
logger.info('Processing {}'.format(input_path))
_source_file = SourceFile(input_path, args.dataset_id)
static_results = {}
dynamic_results = {}
static_results = None
dynamic_results = None
with _source_file.load() as source_file:
if static is True:
try:
......@@ -363,6 +363,8 @@ def process_locally(inputs, args, miniprod_dir, static, dynamic,
signature,
allowed_collections,
allowed_sites)
except felyx_work.extraction.NoMiniprod:
logger.warning('No colocation found for {}'.format(source_file.input_url))
except felyx_work.extraction.NoExtraction:
has_filter = ((allowed_collections is not None) or
(allowed_sites is not None))
......@@ -384,6 +386,8 @@ def process_locally(inputs, args, miniprod_dir, static, dynamic,
signature,
allowed_collections,
allowed_sites)
except felyx_work.extraction.NoMiniprod:
logger.warning('No colocation found for {}'.format(source_file.input_url))
except felyx_work.extraction.NoExtraction:
has_filter = ((allowed_collections is not None) or
(allowed_sites is not None))
......@@ -396,6 +400,15 @@ def process_locally(inputs, args, miniprod_dir, static, dynamic,
'selected with --allowed-collections or'
' --allowed-sites. Ignoring the error.')
_ok = True
if ((static_results is None) and (dynamic_results is None)):
logger.warning('No miniproduct generated for this input file')
continue
if static_results is None:
static_results = {}
if dynamic_results is None:
dynamic_results = {}
_ok = export_miniprods(input_path, static_results, dynamic_results,
args.manifest_dir, args.index_results)
ok = ok and _ok
......
......@@ -32,6 +32,18 @@ class MissingMiniprodFields(Exception):
super(Exception, self).__init__(_msg, *args, **kwargs)
def _parse_datetime(str_value):
""""""
patterns = ('%Y%m%dT%H%M%S', '%Y%m%dT%H%M%SZ')
for pattern in patterns:
try:
dt = datetime.datetime.strptime(str_value, pattern)
return dt
except ValueError:
pass
raise ValueError
def dict_from_Data(miniprod):
"""
"""
......@@ -68,19 +80,16 @@ def dict_from_Data(miniprod):
percentage_coverage = float(_percentage_coverage)
_date_created = get_attr('date_created')
date_created = datetime.datetime.strptime(_date_created, '%Y%m%dT%H%M%SZ')
date_created = _parse_datetime(_date_created)
_date_modified = get_attr('date_modified')
date_modified = datetime.datetime.strptime(_date_modified,
'%Y%m%dT%H%M%SZ')
date_modified = _parse_datetime(_date_modified)
_time_coverage_start = get_attr('time_coverage_start')
time_coverage_start = datetime.datetime.strptime(_time_coverage_start,
'%Y%m%dT%H%M%SZ')
time_coverage_start = _parse_datetime(_time_coverage_start)
_time_coverage_stop = get_attr('time_coverage_end')
time_coverage_stop = datetime.datetime.strptime(_time_coverage_stop,
'%Y%m%dT%H%M%SZ')
time_coverage_stop = _parse_datetime(_time_coverage_stop)
result = {'site': site_id,
'source': source,
......@@ -102,8 +111,7 @@ def dict_from_Data(miniprod):
dynamic_lat = float(get_attr('dynamic_target_latitude'))
_dynamic_time = get_attr('dynamic_target_time')
dynamic_time = datetime.datetime.strptime(_dynamic_time,
'%Y%m%dT%H%M%S')
dynamic_time = _parse_datetime(_dynamic_time)
result['dynamic_target_longitude'] = dynamic_lon
result['dynamic_target_latitude'] = dynamic_lat
......@@ -151,19 +159,16 @@ def dict_from_netcdf4(nc_handler):
percentage_coverage = float(_percentage_coverage)
_date_created = nc_handler.date_created
date_created = datetime.datetime.strptime(_date_created, '%Y%m%dT%H%M%SZ')
date_created = _parse_datetime(_date_created)
_date_modified = nc_handler.date_modified
date_modified = datetime.datetime.strptime(_date_modified,
'%Y%m%dT%H%M%SZ')
date_modified = _parse_datetime(_date_modified)
_time_coverage_start = nc_handler.time_coverage_start
time_coverage_start = datetime.datetime.strptime(_time_coverage_start,
'%Y%m%dT%H%M%SZ')
time_coverage_start = _parse_datetime(_time_coverage_start)
_time_coverage_stop = nc_handler.time_coverage_end
time_coverage_stop = datetime.datetime.strptime(_time_coverage_stop,
'%Y%m%dT%H%M%SZ')
time_coverage_stop = _parse_datetime(_time_coverage_stop)
result = {'site': site_id,
'source': source,
......@@ -185,8 +190,7 @@ def dict_from_netcdf4(nc_handler):
dynamic_lat = float(nc_handler.dynamic_target_latitude)
_dynamic_time = nc_handler.dynamic_target_time
dynamic_time = datetime.datetime.strptime(_dynamic_time,
'%Y%m%dT%H%M%S')
dynamic_time = _parse_datetime(_dynamic_time)
result['dynamic_target_longitude'] = dynamic_lon
result['dynamic_target_latitude'] = dynamic_lat
......
......@@ -32,13 +32,8 @@ def static_sites(source_file, site_collections, allowed_sites=None):
for code, shape in sites.items():
if (allowed_sites is not None) and code not in allowed_sites:
continue
if boundary_shape.use_adjusted or shape.use_adjusted:
if not boundary_shape.adjusted_shape.disjoint(
shape.adjusted_shape):
result[code] = shape
else:
if not boundary_shape.disjoint(shape):
result[code] = shape
if not boundary_shape.disjoint(shape):
result[code] = shape
if result == {}:
continue
revised_collections[collection] = result
......@@ -143,17 +138,15 @@ def dynamic_sites(source_file, site_collections, allowed_sites=None):
(lat_min, lat_min, lat_max, lat_max, lat_min)
)
shape = GeographicPolygon(
numpy.array((lon_boundary, lat_boundary)).T
[(lon_boundary,
lat_boundary,
(lons[int(len(lons)/2)], lats[int(len(lats)/2)]))]
)
if boundary_shape is None:
boundary_shape = source_file.data_shape
if boundary_shape.use_adjusted or shape.use_adjusted:
if not boundary_shape.adjusted_shape.disjoint(
shape.adjusted_shape):
result[site] = shape
elif boundary_shape.intersects(shape):
if (boundary_shape.intersects(shape)):
result[site] = shape
if result == {}:
......
......@@ -23,6 +23,55 @@ from felyx_work.miniprod.shape import GeographicLine, GeographicPolygon
EARTH_MEAN_RADIUS = 6.37123E6 # meters
def get_swath_lonlat(lon, lat):
"""
@TODO : won't work very well with missing lat/lon value
@TODO : number of vertices should be evaluated in a more optimal way (fine tuning)
"""
nbvertices = 50
step0 = int(lon.shape[0] / nbvertices)
step1 = int(lon.shape[1] / nbvertices)
lon_boundary = numpy.concatenate(
(
lon[0, ::step1].compressed(),
lon[::step0, -1].compressed(),
lon[-1, ::-step1].compressed(),
lon[::-step0, 0].compressed()
)
)
lat_boundary = numpy.concatenate(
(
lat[0, ::step1].compressed(),
lat[::step0, -1].compressed(),
lat[-1, ::-step1].compressed(),
lat[::-step0, 0].compressed()
)
)
# find duplicates
uniques, uniq_idx, counts = numpy.unique(lon_boundary, return_index=True, return_counts=True)
duplicates_lon = uniq_idx[numpy.where(counts >= 2)]
uniques, uniq_idx, counts = numpy.unique(lat_boundary, return_index=True, return_counts=True)
duplicates_lat = uniq_idx[numpy.where(counts >= 2)]
duplicates = set(duplicates_lon).intersection(duplicates_lat)
lon_boundary = numpy.delete(lon_boundary, list(duplicates))
lat_boundary = numpy.delete(lat_boundary, list(duplicates))
inside_coords = tuple(int(ti / 2) for ti in lat.shape)
inside_pix = (
lon[inside_coords],
lat[inside_coords]
)
return (lon_boundary, lat_boundary, inside_pix)
def distance(lat1, lon1, lat2, lon2):
"Returns the distance in meter between two points"
lat1 = numpy.radians(lat1)
......@@ -52,41 +101,37 @@ def extract_swath_boundary(lon, lat):
if lat.ndim == lon.ndim == 3:
lon_boundary = numpy.concatenate(
(
lon[0, 0, ::5].compressed(),
lon[0, ::-5, -1].compressed(),
lon[0, -1, ::-5].compressed(),
lon[0, ::5, 0].compressed()
)
# split swath in two to cope with self wrapping of some orbits
# @TODO should be improved because you only need to do that in a few
# cases...
# @TODO should be improved to cut along the track direction dimension
# here it's a guess
splitidx = lon.shape[1] / 2
footprint1 = get_swath_lonlat(
lon[0, :splitidx, :], lat[0, :splitidx, :]
)
lat_boundary = numpy.concatenate(
(
lat[0, 0, ::5].compressed(),
lat[0, ::-5, -1].compressed(),
lat[0, -1, ::-5].compressed(),
lat[0, ::5, 0].compressed()
)
footprint2 = get_swath_lonlat(
lon[0, splitidx - 1:, :], lat[0, splitidx - 1:, :]
)
else:
lon_boundary = numpy.concatenate(
(
lon[0, ::5].compressed(),
lon[::5, -1].compressed(),
lon[-1, ::-5].compressed(),
lon[::-5, 0].compressed()
)
# split swath in two to cope with self wrapping of some orbits
# @TODO should be improved because you only need to do that in a few
# cases...
# @TODO should be improved to cut along the trach direction dimension
# here it's a guess
splitidx = lon.shape[1] / 2
footprint1 = get_swath_lonlat(
lon[:splitidx, :], lat[:splitidx, :]
)
lat_boundary = numpy.concatenate(
(
lat[0, ::5].compressed(),
lat[::5, -1].compressed(),
lat[-1, ::-5].compressed(),
lat[::-5, 0].compressed()
)
footprint2 = get_swath_lonlat(
lon[splitidx - 2:, :], lat[splitidx - 2:, :]
)
return lon_boundary, lat_boundary
return [footprint1, footprint2]
def get_data_shape(datamodel):
......@@ -104,14 +149,10 @@ def get_data_shape(datamodel):
elif isinstance(datamodel, Swath):
lon, lat = datamodel.get_lon(), datamodel.get_lat()
lon_boundary, lat_boundary = extract_swath_boundary(lon, lat)
swath_shape = extract_swath_boundary(lon, lat)
try:
boundary_shape = GeographicPolygon(
numpy.array(
(lon_boundary, lat_boundary)
).T
)
boundary_shape = GeographicPolygon(swath_shape)
except ValueError:
# Recalculate a joint mask (for rare occasions when the longitude
# and latitude have mismatching missing data.
......@@ -119,17 +160,7 @@ def get_data_shape(datamodel):
lon.mask = fixed_mask
lat.mask = fixed_mask
if not lon_boundary.any() and not lat_boundary.any():
raise ValueError(
'The input file does not have a valid geospatial boundary.'
)
lon_boundary, lat_boundary = extract_swath_boundary(lon, lat)
boundary_shape = GeographicPolygon(
numpy.array(
(lon_boundary, lat_boundary)
).T
)
boundary_shape = GeographicPolygon(swath_shape)
elif isinstance(datamodel, Grid):
lon, lat = datamodel.get_lon(), datamodel.get_lat()
......@@ -139,9 +170,9 @@ def get_data_shape(datamodel):
(lat[0], lat[0], lat[-1], lat[-1], lat[0]))
boundary_shape = GeographicPolygon(
numpy.array(
(lon_boundary.compressed(), lat_boundary.compressed())
).T
[(lon_boundary,
lat_boundary,
(lon[int(len(lon) / 2)], lat[int(len(lat) / 2)]))]
)
else:
......
......@@ -30,7 +30,7 @@ from scipy.ndimage.measurements import find_objects, label
from scipy.ndimage import generate_binary_structure
from scipy.ndimage.morphology import binary_dilation
from shapely.geometry.multipoint import MultiPoint
from shapely.geometry.polygon import Polygon
from shapely.geometry import Polygon, MultiPolygon
def get_coverage(lats, lons, site_shape):
......@@ -135,7 +135,7 @@ def slicer(
returned_coverages = []
returned_centres = []
if not isinstance(shape, Polygon):
if not isinstance(shape, (Polygon, MultiPolygon)):
raise TypeError('The shape is not valid')
lon_min, lat_min, lon_max, lat_max = shape.bounds
......@@ -160,22 +160,29 @@ def slicer(
#Bail out if no pixels are in the site.
return None
# Get the x and y slices
y_slice = find_objects(y_labels)[0][0]
x_slice = find_objects(x_labels)[0][0]
# Get the return values
returned_slice = (x_slice, y_slice)
returned_mask = mask[returned_slice]
if returned_mask.all():
returned_coverage = 100.0
else:
returned_coverage = get_coverage(
full_lat[returned_slice][returned_mask],
full_lon[returned_slice][returned_mask],
shape)
if returned_coverage is None:
return None
for x in range(x_count):
for y in range(y_count):
# Get the x and y slices
y_slice = find_objects(y_labels)[y][0]
x_slice = find_objects(x_labels)[x][0]
# Get the return values
returned_slice = (x_slice, y_slice)
returned_mask = mask[returned_slice]
if returned_mask.all():
returned_coverage = 100.0
else:
returned_coverage = get_coverage(
full_lat[returned_slice][returned_mask],
full_lon[returned_slice][returned_mask],
shape)
if returned_coverage is not None:
break
if returned_coverages is not None:
break
if returned_coverage is None:
return None
returned_slices = [returned_slice]
returned_masks = [returned_mask]
......
......@@ -332,7 +332,7 @@ class LocalFelyxData(FelyxServerData):
return collection[0]['name']
def site_collections(self, dataset, dynamic=False,
only_site_collection=None):
only_site_collection=None):
"""
Return a dictionary representation of the site collections, defined
by the contents of miniprod processing.
......@@ -391,8 +391,16 @@ class LocalFelyxData(FelyxServerData):
collection['site_collection']
)
result[group][collection['id']] = {}
# fix for unconsistent local cache (bug workaround)
unfound_sites = []
for site in collection['site']:
site_info = self.site(site_id=site)
try:
site_info = self.site(site_id=site)
except KeyError:
unfound_sites.append(site)
continue
wkt = site_info['shape']
if ';' in wkt:
# avoid issues with "SRID=4326;POLYGON ((..."
......@@ -403,6 +411,12 @@ class LocalFelyxData(FelyxServerData):
{site_info['code']: felyx_shape}
)
# remove unknown sites from collections
for site in unfound_sites:
LOGGER.error("Site {} was not found in local cache".format(site))
# remove from collection
collections['site'].pop(collection['site'].index(site))
return result
def constraints(self, dataset):
......@@ -443,7 +457,16 @@ class LocalFelyxData(FelyxServerData):
if result[site]['miniprod_size'] < chain['miniprod_size']:
result[site]['miniprod_size'] = chain['miniprod_size']
return {self.site(key)['code']: value for key, value in result.items()}
res = {}
for key, value in result.items():
try:
site = self.site(key)
res[self.site(key)['code']] = value
except KeyError:
LOGGER.error('Site {} was not found'.format(site))
continue
return res
def miniprod_size(self, dataset, site_collection_id):
"""Return the single miniprod size for a dataset, collection pair.
......
......@@ -153,7 +153,7 @@ def install(root_path, es_info, web_info, force=False):
plugins_meta_dir = os.path.join(plugins_dir, '.metadata')
default_api_url = '{}/api/v1'.format(web_url)
miniprods_url = '{}/miniprods'.format(web_url)
insitu_mappings_dir = os.path.join(confi_dir, 'insitu_mappings')
insitu_mappings_dir = os.path.join(conf_dir, 'insitu_mappings')
cfg = DEFAULT.copy()
cfg['shared_workspace_root'] = work_dir
......
......@@ -34,8 +34,9 @@ def get_from_file(miniprod_path):
if dataset is None:
raise Exception('No felyx_dataset_name in the miniprod global '
'attributes')
_type = miniprod.read_global_attribute('cdm_feature_type')
if _type is None:
try:
_type = miniprod.read_global_attribute('cdm_feature_type')
except ValueError:
_type = miniprod.read_global_attribute('cdm_data_type')
if _type is None:
raise Exception('No cdm_feature_type nor cdm_data_type defined in the '
......
......@@ -166,13 +166,9 @@ def produce_miniprods(source_file, dynamic, disable_padding, miniprod_dir,
# local alias
get_extractions = felyx_work.extraction.get_extractions
try:
extractions = get_extractions(source_file, dynamic, disable_padding,
non_empty_field, allowed_collections,
allowed_sites)
except felyx_work.extraction.NoMiniprod:
LOGGER.warning('No colocation found for {}'.format(source_file.input_url))
return results
extractions = get_extractions(source_file, dynamic, disable_padding,
non_empty_field, allowed_collections,
allowed_sites)
for subset, site, site_info, collection in extractions['public']:
ok, miniprod_name, full_path = create_miniprod(source_file, subset,
......
......@@ -17,7 +17,9 @@ optimisations.
import numpy
import logging
from shapely.geometry import Polygon, LineString
from shapely.geometry import Polygon, MultiPolygon, LineString
from spherical_geometry.polygon import SphericalPolygon
from shapely.ops import cascaded_union
LOGGER = logging.getLogger(__name__)
......@@ -105,11 +107,14 @@ class GeographicShape(object):
return self._adjusted_shape
if isinstance(self, GeographicPolygon):
lons, lats = self.boundary.xy
lons = numpy.array(lons)
lons[lons < 0] += 360
coords = numpy.array((lons, lats)).T
self._adjusted_shape = GeographicPolygon(coords)
if len(self.geoms) == 1:
lons, lats = self.geoms[0].boundary.xy
lons = numpy.array(lons)
lons[lons < 0] += 360
coords = numpy.array((lons, lats)).T
self._adjusted_shape = GeographicPolygon([Polygon(coords)])
else:
self._adjusted_shape = self
if isinstance(self, GeographicLine):
lons, lats = self.xy
......@@ -189,7 +194,7 @@ class GeographicShape(object):
return self._use_adjusted
class GeographicPolygon(GeographicShape, Polygon):
class GeographicPolygon(GeographicShape, MultiPolygon):
"""
A class inheriting from GeographicShape and Polygon.
......@@ -199,10 +204,57 @@ class GeographicPolygon(GeographicShape, Polygon):
def __init__(self, *args, **kwargs):
"""
Initialise a GeographicLine
Initialise a GeographicPolygon
"""
# noinspection PyArgumentList,PyArgumentList
super(GeographicPolygon, self).__init__(*args, **kwargs)
if not (isinstance(args[0], list)):
raise ValueError("Bad input args for GeographicPolygon")
geoms = []
for shape in args[0]:
# transform into spherical polygon
lons, lats, inside_pix = shape
sp = SphericalPolygon.from_lonlat(
lon=lons,
lat=lats,
center=inside_pix
)
e = 1e-6 # the spherical geometry library runs into assertions if using 90 or 180 exactly
# e = 0.
east = SphericalPolygon.from_lonlat(
lon=[0, 0, 180 - e, 180 - e, 0],
lat=[90 - e, -90 + e, -90 + e, 90 - e, 90 - e],
center=(15, 0)
)
west = SphericalPolygon.from_lonlat(
lon=[-180 + e, -180 + e, 0, 0, -180 + e],
lat=[90 - e, -90 + e, -90 + e, 90 - e, 90 - e],
center=(-150, 0)
)
west_int = sp.intersection(west)
east_int = sp.intersection(east)
for val in west_int.to_lonlat():
plon, plat = val
plon[plon > 180] -= 360
poly = Polygon(numpy.array((plon, plat)).T).buffer(0)
if poly.is_valid:
geoms.append(poly)
for val in east_int.to_lonlat():
plon, plat = val
plon[plon > 180] -= 360
poly = Polygon(numpy.array((plon, plat)).T).buffer(0)
if poly.is_valid:
geoms.append(poly)
footprint = cascaded_union(geoms)
if isinstance(footprint, Polygon):
super(GeographicPolygon, self).__init__([footprint], **kwargs)
else:
super(GeographicPolygon, self).__init__(footprint, **kwargs)
class GeographicLine(LineString, GeographicShape):
......
......@@ -65,7 +65,11 @@ class SiteCollection(object):
# build dictionary of sites id <-> code
self.sites = {}
for site_code in collection['site']:
site_id = str(server.site(site_id=site_code)['code'])
try:
site_id = str(server.site(site_id=site_code)['code'])
except KeyError:
logger.error('Site {} not found. Ignored'.format(site_code))
continue
self.sites[site_id] = site_code
# try first access to local database if existing, then remote url if
......
......@@ -207,7 +207,7 @@ def get_es_server_url(es_config=None):
if not es_config.endswith('/'):
es_config = '{}/'.format(es_config)
try:
if requests.head(es_config, timeout=5).ok:
if requests.head(es_config, timeout=60).ok:
return es_config
# Not a 200 code....
......@@ -248,7 +248,7 @@ def get_es_server_url(es_config=None):
if not config.endswith('/'):
config = '{}/'.format(config)
try:
if requests.head(config, timeout=5).ok:
if requests.head(config, timeout=60).ok:
return config
# Not a 200 code....
......
......@@ -16,6 +16,7 @@ import sys
import json
import copy
import numpy
import errno
import logging
import operator
import calendar
......@@ -94,6 +95,13 @@ def save(matchups,
output_dir='.'
):
"""save container of match-ups into a file"""
if not os.path.exists(output_dir):
try:
os.makedirs(output_dir)
except OSError as e:
if errno.EEXIST != e.errno:
raise
outfilename = '{}_{}_{}_{}.nc'.format(
reference_dataset,
site_collections[0],
......
......@@ -67,6 +67,7 @@ setup(
'requests>=2.7.0',
'simplejson>=3.3.0',
'requests-cache>=0.4.3',
'spherical-geometry<1.2.12', # incompatible with Python2 after 1.2.11
'cerform @ git+https://git.cersat.fr/cerbere/cerform.git@master#egg=cerform-0.1.0',
'cerinterp @ git+https://git.cersat.fr/cerbere/cerinterp.git@master#egg=cerinterp-0.1.0',
'ceraux @ git+https://git.cersat.fr/cerbere/ceraux.git@master#egg=ceraux-0.1.0',
......