diff --git a/plugins/python_providers/resources/amda.py b/plugins/python_providers/resources/amda.py index 353176c..9d350b9 100644 --- a/plugins/python_providers/resources/amda.py +++ b/plugins/python_providers/resources/amda.py @@ -27,8 +27,8 @@ def get_sample(metadata,start,stop): elif value == 'multicomponent': ts_type = pysciqlopcore.MultiComponentTimeSerie default_ctor_args = (0,2) - tstart=datetime.datetime.fromtimestamp(start, tz=timezone.utc) - tend=datetime.datetime.fromtimestamp(stop, tz=timezone.utc) + tstart=datetime.fromtimestamp(start, tz=timezone.utc) + tend=datetime.fromtimestamp(stop, tz=timezone.utc) df = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id=param_id, method="REST") t = np.array([d.timestamp() for d in df.index]) values = df.values @@ -50,7 +50,7 @@ for name,component in amda.component.items(): products = [] for key,parameter in parameters.items(): - path = f"/AMDA/{parameter['mission']}/{parameter['instrument']}/{parameter['dataset']}/{parameter['name']}" + path = f"/AMDA/{parameter['mission']}/{parameter.get('observatory','')}/{parameter['instrument']}/{parameter['dataset']}/{parameter['name']}" components = [component['name'] for component in parameter.get('components',[])] metadata = [ (key,item) for key,item in parameter.items() if key is not 'components' ] n_components = parameter.get('size',0) diff --git a/plugins/python_providers/resources/test.py b/plugins/python_providers/resources/test.py index d236313..cf5a6dc 100644 --- a/plugins/python_providers/resources/test.py +++ b/plugins/python_providers/resources/test.py @@ -4,42 +4,84 @@ import PythonProviders import pysciqlopcore import numpy as np import math +from spwc.cache import _cache +from spwc.common.datetime_range import DateTimeRange +from functools import partial +from datetime import datetime, timedelta, timezone someglobal = 1 def make_scalar(x): y = np.cos(x/10.) - return pysciqlopcore.ScalarTimeSerie(x,y) + return pds.DataFrame(index=[datetime.fromtimestamp(t, tz=timezone.utc) for t in x], data=y) def make_vector(x): v=np.ones((len(x),3)) for i in range(3): - v.transpose()[:][i] = np.cos(x/10. + float(i)) - return pysciqlopcore.VectorTimeSerie(x,v) + v.transpose()[:][i] = np.cos(x/10. + float(i)) + (100. * np.cos(x/10000. + float(i))) + return pds.DataFrame(index=[datetime.fromtimestamp(t, tz=timezone.utc) for t in x], data=v) def make_multicomponent(x): v=np.ones((len(x),4)) for i in range(4): v.transpose()[:][i] = float(i+1) * np.cos(x/10. + float(i)) - return pysciqlopcore.MultiComponentTimeSerie(x,v) + return pds.DataFrame(index=[datetime.fromtimestamp(t, tz=timezone.utc) for t in x], data=v) -def get_data(metadata,start,stop): +def _get_data(p_type, start, stop): + if type(start) is datetime: + start = start.timestamp() + stop = stop.timestamp() x = np.arange(math.ceil(start), math.floor(stop)) - for key,value in metadata: - if key == 'xml:id': - param_id = value - elif key == 'type': - if value == 'vector': - return make_vector(x) - elif value == 'multicomponent': - return make_multicomponent(x) - return make_scalar(x) - - + if p_type == 'scalar': + return make_scalar(x) + if p_type == 'vector': + return make_vector(x) + if p_type == 'multicomponent': + return make_multicomponent(x) + return None +def get_data(metadata,start,stop): + ts_type = pysciqlopcore.ScalarTimeSerie + default_ctor_args = 1 + use_cache = False + p_type = 'scalar' + try: + for key,value in metadata: + if key == 'type': + p_type = value + if value == 'vector': + ts_type = pysciqlopcore.VectorTimeSerie + elif value == 'multicomponent': + ts_type = pysciqlopcore.MultiComponentTimeSerie + default_ctor_args = (0,2) + if key == 'cache' and value == 'true': + use_cache = True + if use_cache: + cache_product = f"tests/{p_type}" + df = _cache.get_data(cache_product, DateTimeRange(datetime.fromtimestamp(start, tz=timezone.utc), datetime.fromtimestamp(stop, tz=timezone.utc)), + partial(_get_data, p_type), + fragment_hours=24) + else: + print("No Cache") + df = _get_data(p_type, start, stop) + t = np.array([d.timestamp() for d in df.index]) + values = df.values + return ts_type(t,values) + except Exception as e: + print(traceback.format_exc()) + print("Error in test.py ",str(e)) + return ts_type(default_ctor_args) -PythonProviders.register_product([("/tests/scalar",[],[("type","scalar")]), ("/tests/vector",[],[("type","vector")]), ("/tests/multicomponent",[],[("type","multicomponent"),('size','4')])],get_data) +products = [ + ("/tests/without_cache/scalar",[],[("type","scalar")]), + ("/tests/without_cache/vector",[],[("type","vector")]), + ("/tests/without_cache/multicomponent",[],[("type","multicomponent"),('size','4')]), + ("/tests/with_cache/scalar",[],[("type","scalar"), ("cache","true")]), + ("/tests/with_cache/vector",[],[("type","vector"), ("cache","true")]), + ("/tests/with_cache/multicomponent",[],[("type","multicomponent"),('size','4'), ("cache","true")]) + ] +PythonProviders.register_product(products ,get_data)