@@ -1,38 +1,38 | |||||
1 | import traceback |
|
1 | import traceback | |
2 | from SciQLopBindings import PyDataProvider, Product, VectorTimeSerie, ScalarTimeSerie, DataSeriesType |
|
2 | from SciQLopBindings import PyDataProvider, Product, VectorTimeSerie, ScalarTimeSerie, DataSeriesType | |
3 | import numpy as np |
|
3 | import numpy as np | |
4 |
from sp |
|
4 | from speasy.cache import _cache | |
5 |
from sp |
|
5 | from speasy.common.datetime_range import DateTimeRange | |
6 | from datetime import datetime, timedelta, timezone |
|
6 | from datetime import datetime, timedelta, timezone | |
7 |
from sp |
|
7 | from speasy.common.variable import SpeasyVariable | |
8 |
from sp |
|
8 | from speasy.amda import AMDA | |
9 |
|
9 | |||
10 | amda = AMDA() |
|
10 | amda = AMDA() | |
11 |
|
11 | |||
12 |
|
12 | |||
13 | def vp_make_scalar(var=None): |
|
13 | def vp_make_scalar(var=None): | |
14 | if var is None: |
|
14 | if var is None: | |
15 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.SCALAR) |
|
15 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.SCALAR) | |
16 | else: |
|
16 | else: | |
17 | return (((var.time, np.array([])), var.data), DataSeriesType.SCALAR) |
|
17 | return (((var.time, np.array([])), var.data), DataSeriesType.SCALAR) | |
18 |
|
18 | |||
19 | class DemoVP(PyDataProvider): |
|
19 | class DemoVP(PyDataProvider): | |
20 | def __init__(self): |
|
20 | def __init__(self): | |
21 | super().__init__() |
|
21 | super().__init__() | |
22 | self.register_products([Product("/VP/thb_fgm_gse_mod",[],{"type":"scalar"})]) |
|
22 | self.register_products([Product("/VP/thb_fgm_gse_mod",[],{"type":"scalar"})]) | |
23 |
|
23 | |||
24 | def get_data(self,metadata,start,stop): |
|
24 | def get_data(self,metadata,start,stop): | |
25 | try: |
|
25 | try: | |
26 | tstart = datetime.fromtimestamp(start, tz=timezone.utc) |
|
26 | tstart = datetime.fromtimestamp(start, tz=timezone.utc) | |
27 | tend = datetime.fromtimestamp(stop, tz=timezone.utc) |
|
27 | tend = datetime.fromtimestamp(stop, tz=timezone.utc) | |
28 | thb_bs = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id='thb_bs', method="REST") |
|
28 | thb_bs = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id='thb_bs', method="REST") | |
29 | thb_bs.data = np.sqrt((thb_bs.data*thb_bs.data).sum(axis=1)) |
|
29 | thb_bs.data = np.sqrt((thb_bs.data*thb_bs.data).sum(axis=1)) | |
30 | return vp_make_scalar(thb_bs) |
|
30 | return vp_make_scalar(thb_bs) | |
31 | except Exception as e: |
|
31 | except Exception as e: | |
32 | print(traceback.format_exc()) |
|
32 | print(traceback.format_exc()) | |
33 | print(f"Error in {__file__} ",str(e)) |
|
33 | print(f"Error in {__file__} ",str(e)) | |
34 | return (((np.array([]), np.array([])), np.array([])), ts_type) |
|
34 | return (((np.array([]), np.array([])), np.array([])), ts_type) | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | t=DemoVP() |
|
37 | t=DemoVP() | |
38 |
|
38 |
@@ -1,128 +1,128 | |||||
1 | import traceback |
|
1 | import traceback | |
2 | import os |
|
2 | import os | |
3 | from datetime import datetime, timedelta, timezone |
|
3 | from datetime import datetime, timedelta, timezone | |
4 | from SciQLopBindings import PyDataProvider, Product, VectorTimeSerie, ScalarTimeSerie, DataSeriesType |
|
4 | from SciQLopBindings import PyDataProvider, Product, VectorTimeSerie, ScalarTimeSerie, DataSeriesType | |
5 | import numpy as np |
|
5 | import numpy as np | |
6 | import requests |
|
6 | import requests | |
7 | import copy |
|
7 | import copy | |
8 |
from sp |
|
8 | from speasy.amda import AMDA | |
9 |
|
9 | |||
10 | amda = AMDA() |
|
10 | amda = AMDA() | |
11 |
|
11 | |||
12 |
|
12 | |||
13 | def amda_make_scalar(var=None): |
|
13 | def amda_make_scalar(var=None): | |
14 | if var is None: |
|
14 | if var is None: | |
15 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.SCALAR) |
|
15 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.SCALAR) | |
16 | else: |
|
16 | else: | |
17 | return (((var.time, np.array([])), var.data), DataSeriesType.SCALAR) |
|
17 | return (((var.time, np.array([])), var.data), DataSeriesType.SCALAR) | |
18 |
|
18 | |||
19 |
|
19 | |||
20 | def amda_make_vector(var=None): |
|
20 | def amda_make_vector(var=None): | |
21 | if var is None: |
|
21 | if var is None: | |
22 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.VECTOR) |
|
22 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.VECTOR) | |
23 | else: |
|
23 | else: | |
24 | return (((var.time, np.array([])), var.data), DataSeriesType.VECTOR) |
|
24 | return (((var.time, np.array([])), var.data), DataSeriesType.VECTOR) | |
25 |
|
25 | |||
26 |
|
26 | |||
27 | def amda_make_multi_comp(var=None): |
|
27 | def amda_make_multi_comp(var=None): | |
28 | if var is None: |
|
28 | if var is None: | |
29 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.MULTICOMPONENT) |
|
29 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.MULTICOMPONENT) | |
30 | else: |
|
30 | else: | |
31 | return (((var.time, np.array([])), var.data), DataSeriesType.MULTICOMPONENT) |
|
31 | return (((var.time, np.array([])), var.data), DataSeriesType.MULTICOMPONENT) | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | def amda_make_spectro(var=None): |
|
34 | def amda_make_spectro(var=None): | |
35 | if var is None: |
|
35 | if var is None: | |
36 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.SPECTROGRAM) |
|
36 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.SPECTROGRAM) | |
37 | else: |
|
37 | else: | |
38 | min_sampling = float(var.meta.get("DATASET_MIN_SAMPLING", "nan")) |
|
38 | min_sampling = float(var.meta.get("DATASET_MIN_SAMPLING", "nan")) | |
39 | max_sampling = float(var.meta.get("DATASET_MAX_SAMPLING", "nan")) |
|
39 | max_sampling = float(var.meta.get("DATASET_MAX_SAMPLING", "nan")) | |
40 | if var.y is None and len(var.data): |
|
40 | if var.y is None and len(var.data): | |
41 | var.y = np.logspace(1, 3, var.data.shape[1])[::-1] |
|
41 | var.y = np.logspace(1, 3, var.data.shape[1])[::-1] | |
42 | return (((var.time, var.y), var.data), DataSeriesType.SPECTROGRAM) |
|
42 | return (((var.time, var.y), var.data), DataSeriesType.SPECTROGRAM) | |
43 | #return pysciqlopcore.SpectrogramTimeSerie(var.time,y,var.data,min_sampling,max_sampling,True) |
|
43 | #return pysciqlopcore.SpectrogramTimeSerie(var.time,y,var.data,min_sampling,max_sampling,True) | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | def amda_get_sample(metadata, start, stop): |
|
46 | def amda_get_sample(metadata, start, stop): | |
47 | ts_type = amda_make_scalar |
|
47 | ts_type = amda_make_scalar | |
48 | try: |
|
48 | try: | |
49 | param_id = None |
|
49 | param_id = None | |
50 | for key, value in metadata: |
|
50 | for key, value in metadata: | |
51 | if key == 'xml:id': |
|
51 | if key == 'xml:id': | |
52 | param_id = value |
|
52 | param_id = value | |
53 | elif key == 'type': |
|
53 | elif key == 'type': | |
54 | if value == 'vector': |
|
54 | if value == 'vector': | |
55 | ts_type = amda_make_vector |
|
55 | ts_type = amda_make_vector | |
56 | elif value == 'multicomponent': |
|
56 | elif value == 'multicomponent': | |
57 | ts_type = amda_make_multi_comp |
|
57 | ts_type = amda_make_multi_comp | |
58 | elif value == 'spectrogram': |
|
58 | elif value == 'spectrogram': | |
59 | ts_type = amda_make_spectro |
|
59 | ts_type = amda_make_spectro | |
60 | tstart = datetime.fromtimestamp(start, tz=timezone.utc) |
|
60 | tstart = datetime.fromtimestamp(start, tz=timezone.utc) | |
61 | tend = datetime.fromtimestamp(stop, tz=timezone.utc) |
|
61 | tend = datetime.fromtimestamp(stop, tz=timezone.utc) | |
62 | var = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id=param_id, method="REST") |
|
62 | var = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id=param_id, method="REST") | |
63 | return ts_type(var) |
|
63 | return ts_type(var) | |
64 | except Exception as e: |
|
64 | except Exception as e: | |
65 | print(traceback.format_exc()) |
|
65 | print(traceback.format_exc()) | |
66 | print("Error in amda.py ", str(e)) |
|
66 | print("Error in amda.py ", str(e)) | |
67 | return ts_type() |
|
67 | return ts_type() | |
68 |
|
68 | |||
69 |
|
69 | |||
70 | class AmdaProvider(PyDataProvider): |
|
70 | class AmdaProvider(PyDataProvider): | |
71 | def __init__(self): |
|
71 | def __init__(self): | |
72 | super(AmdaProvider, self).__init__() |
|
72 | super(AmdaProvider, self).__init__() | |
73 | if len(amda.component) is 0: |
|
73 | if len(amda.component) is 0: | |
74 | amda.update_inventory() |
|
74 | amda.update_inventory() | |
75 | parameters = copy.deepcopy(amda.parameter) |
|
75 | parameters = copy.deepcopy(amda.parameter) | |
76 | for name, component in amda.component.items(): |
|
76 | for name, component in amda.component.items(): | |
77 | if 'components' in parameters[component['parameter']]: |
|
77 | if 'components' in parameters[component['parameter']]: | |
78 | parameters[component['parameter']]['components'].append(component) |
|
78 | parameters[component['parameter']]['components'].append(component) | |
79 | else: |
|
79 | else: | |
80 | parameters[component['parameter']]['components']=[component] |
|
80 | parameters[component['parameter']]['components']=[component] | |
81 |
|
81 | |||
82 | products = [] |
|
82 | products = [] | |
83 | for key, parameter in parameters.items(): |
|
83 | for key, parameter in parameters.items(): | |
84 | mission_name = amda.mission[parameter['mission']]['name'] |
|
84 | mission_name = amda.mission[parameter['mission']]['name'] | |
85 | observatory_name = parameter.get('observatory','') |
|
85 | observatory_name = parameter.get('observatory','') | |
86 | if observatory_name != '': |
|
86 | if observatory_name != '': | |
87 | observatory_name = amda.observatory[observatory_name]['name'] |
|
87 | observatory_name = amda.observatory[observatory_name]['name'] | |
88 | instrument_name = amda.instrument[parameter['instrument']]['name'] |
|
88 | instrument_name = amda.instrument[parameter['instrument']]['name'] | |
89 | dataset_name = amda.dataset[parameter['dataset']]['name'] |
|
89 | dataset_name = amda.dataset[parameter['dataset']]['name'] | |
90 | path = f"/AMDA/{mission_name}/{observatory_name}/{instrument_name}/{dataset_name}/{parameter['name']}" |
|
90 | path = f"/AMDA/{mission_name}/{observatory_name}/{instrument_name}/{dataset_name}/{parameter['name']}" | |
91 | components = [component['name'] for component in parameter.get('components',[])] |
|
91 | components = [component['name'] for component in parameter.get('components',[])] | |
92 | metadata = {key: item for key, item in parameter.items() if key is not 'components'} |
|
92 | metadata = {key: item for key, item in parameter.items() if key is not 'components'} | |
93 | n_components = parameter.get('size', 0) |
|
93 | n_components = parameter.get('size', 0) | |
94 | if n_components == '3': |
|
94 | if n_components == '3': | |
95 | metadata["type"] = "vector" |
|
95 | metadata["type"] = "vector" | |
96 | elif parameter.get('display_type', '')=="spectrogram": |
|
96 | elif parameter.get('display_type', '')=="spectrogram": | |
97 | metadata["type"] = "spectrogram" |
|
97 | metadata["type"] = "spectrogram" | |
98 | elif n_components != 0: |
|
98 | elif n_components != 0: | |
99 | metadata["type"] = "multicomponent" |
|
99 | metadata["type"] = "multicomponent" | |
100 | else: |
|
100 | else: | |
101 | metadata["type"] = "scalar" |
|
101 | metadata["type"] = "scalar" | |
102 | products.append(Product(path, components, metadata)) |
|
102 | products.append(Product(path, components, metadata)) | |
103 | self.register_products(products) |
|
103 | self.register_products(products) | |
104 | for _,mission in amda.mission.items(): |
|
104 | for _,mission in amda.mission.items(): | |
105 | if ('target' in mission) and (mission['xml:id'] != 'Ephemerides') and (mission['target'] != 'Earth'): |
|
105 | if ('target' in mission) and (mission['xml:id'] != 'Ephemerides') and (mission['target'] != 'Earth'): | |
106 | self.set_icon(f'/AMDA/{mission["name"]}','satellite') |
|
106 | self.set_icon(f'/AMDA/{mission["name"]}','satellite') | |
107 |
|
107 | |||
108 | def get_data(self, metadata, start, stop): |
|
108 | def get_data(self, metadata, start, stop): | |
109 | ts_type = amda_make_scalar |
|
109 | ts_type = amda_make_scalar | |
110 | try: |
|
110 | try: | |
111 | param_id = metadata['xml:id'] |
|
111 | param_id = metadata['xml:id'] | |
112 | ts_type_str = metadata['type'] |
|
112 | ts_type_str = metadata['type'] | |
113 | if ts_type_str == 'vector': |
|
113 | if ts_type_str == 'vector': | |
114 | ts_type = amda_make_vector |
|
114 | ts_type = amda_make_vector | |
115 | elif ts_type_str == 'multicomponent': |
|
115 | elif ts_type_str == 'multicomponent': | |
116 | ts_type = amda_make_multi_comp |
|
116 | ts_type = amda_make_multi_comp | |
117 | elif ts_type_str == 'spectrogram': |
|
117 | elif ts_type_str == 'spectrogram': | |
118 | ts_type = amda_make_spectro |
|
118 | ts_type = amda_make_spectro | |
119 | tstart = datetime.fromtimestamp(start, tz=timezone.utc) |
|
119 | tstart = datetime.fromtimestamp(start, tz=timezone.utc) | |
120 | tend = datetime.fromtimestamp(stop, tz=timezone.utc) |
|
120 | tend = datetime.fromtimestamp(stop, tz=timezone.utc) | |
121 | var = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id=param_id, method="REST") |
|
121 | var = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id=param_id, method="REST") | |
122 | return ts_type(var) |
|
122 | return ts_type(var) | |
123 | except Exception as e: |
|
123 | except Exception as e: | |
124 | print(traceback.format_exc()) |
|
124 | print(traceback.format_exc()) | |
125 | print("Error in amda.py ", str(e)) |
|
125 | print("Error in amda.py ", str(e)) | |
126 | return ts_type() |
|
126 | return ts_type() | |
127 |
|
127 | |||
128 | _amda = AmdaProvider() |
|
128 | _amda = AmdaProvider() |
@@ -1,93 +1,93 | |||||
1 | import traceback |
|
1 | import traceback | |
2 | from SciQLopBindings import PyDataProvider, Product, VectorTimeSerie, ScalarTimeSerie, DataSeriesType |
|
2 | from SciQLopBindings import PyDataProvider, Product, VectorTimeSerie, ScalarTimeSerie, DataSeriesType | |
3 | import numpy as np |
|
3 | import numpy as np | |
4 | import math |
|
4 | import math | |
5 |
from sp |
|
5 | from speasy.cache import _cache | |
6 |
from sp |
|
6 | from speasy.common.datetime_range import DateTimeRange | |
7 | from functools import partial |
|
7 | from functools import partial | |
8 | from datetime import datetime, timedelta, timezone |
|
8 | from datetime import datetime, timedelta, timezone | |
9 |
from sp |
|
9 | from speasy.common.variable import SpeasyVariable | |
10 |
|
10 | |||
11 |
|
11 | |||
12 | def make_scalar(x): |
|
12 | def make_scalar(x): | |
13 | y = np.cos(x/10.) |
|
13 | y = np.cos(x/10.) | |
14 |
return Sp |
|
14 | return SpeasyVariable(time=x, data=y) | |
15 |
|
15 | |||
16 | def make_vector(x): |
|
16 | def make_vector(x): | |
17 | v=np.ones((len(x),3)) |
|
17 | v=np.ones((len(x),3)) | |
18 | for i in range(3): |
|
18 | for i in range(3): | |
19 | v.transpose()[:][i] = np.cos(x/10. + float(i)) + (100. * np.cos(x/10000. + float(i))) |
|
19 | v.transpose()[:][i] = np.cos(x/10. + float(i)) + (100. * np.cos(x/10000. + float(i))) | |
20 |
return Sp |
|
20 | return SpeasyVariable(time=x, data=v) | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | def make_multicomponent(x): |
|
23 | def make_multicomponent(x): | |
24 | v=np.ones((len(x),4)) |
|
24 | v=np.ones((len(x),4)) | |
25 | for i in range(4): |
|
25 | for i in range(4): | |
26 | v.transpose()[:][i] = float(i+1) * np.cos(x/10. + float(i)) |
|
26 | v.transpose()[:][i] = float(i+1) * np.cos(x/10. + float(i)) | |
27 |
return Sp |
|
27 | return SpeasyVariable(time=x, data=v) | |
28 |
|
28 | |||
29 | def make_spectrogram(x): |
|
29 | def make_spectrogram(x): | |
30 | v=np.ones((len(x),32)) |
|
30 | v=np.ones((len(x),32)) | |
31 | for i in range(32): |
|
31 | for i in range(32): | |
32 | v.transpose()[:][i] = 100.*(2.+ float(i+1) * np.cos(x/1024. + float(i))) |
|
32 | v.transpose()[:][i] = 100.*(2.+ float(i+1) * np.cos(x/1024. + float(i))) | |
33 |
return Sp |
|
33 | return SpeasyVariable(time=x, data=v) | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | def _get_data(p_type, start, stop): |
|
36 | def _get_data(p_type, start, stop): | |
37 | if type(start) is datetime: |
|
37 | if type(start) is datetime: | |
38 | start = start.timestamp() |
|
38 | start = start.timestamp() | |
39 | stop = stop.timestamp() |
|
39 | stop = stop.timestamp() | |
40 | x = np.arange(math.ceil(start), math.floor(stop))*1. |
|
40 | x = np.arange(math.ceil(start), math.floor(stop))*1. | |
41 | if p_type == 'scalar': |
|
41 | if p_type == 'scalar': | |
42 | return make_scalar(x) |
|
42 | return make_scalar(x) | |
43 | if p_type == 'vector': |
|
43 | if p_type == 'vector': | |
44 | return make_vector(x) |
|
44 | return make_vector(x) | |
45 | if p_type == 'multicomponent': |
|
45 | if p_type == 'multicomponent': | |
46 | return make_multicomponent(x) |
|
46 | return make_multicomponent(x) | |
47 | if p_type == 'spectrogram': |
|
47 | if p_type == 'spectrogram': | |
48 | return make_spectrogram(np.arange(math.ceil(start), math.floor(stop),15.)) |
|
48 | return make_spectrogram(np.arange(math.ceil(start), math.floor(stop),15.)) | |
49 | return None |
|
49 | return None | |
50 |
|
50 | |||
51 | class MyProvider(PyDataProvider): |
|
51 | class MyProvider(PyDataProvider): | |
52 | def __init__(self): |
|
52 | def __init__(self): | |
53 | super(MyProvider,self).__init__() |
|
53 | super(MyProvider,self).__init__() | |
54 | self.register_products([Product("/tests/without_cache/scalar",[],{"type":"scalar"}), |
|
54 | self.register_products([Product("/tests/without_cache/scalar",[],{"type":"scalar"}), | |
55 | Product("/tests/without_cache/vector",[],{"type":"vector"}), |
|
55 | Product("/tests/without_cache/vector",[],{"type":"vector"}), | |
56 | Product("/tests/without_cache/multicomponent",[],{"type":"multicomponent",'size':'4'}), |
|
56 | Product("/tests/without_cache/multicomponent",[],{"type":"multicomponent",'size':'4'}), | |
57 | Product("/tests/without_cache/spectrogram",[],{"type":"spectrogram",'size':'32'}), |
|
57 | Product("/tests/without_cache/spectrogram",[],{"type":"spectrogram",'size':'32'}), | |
58 | Product("/tests/with_cache/scalar",[],{"type":"scalar", "cache":"true"}), |
|
58 | Product("/tests/with_cache/scalar",[],{"type":"scalar", "cache":"true"}), | |
59 | Product("/tests/with_cache/vector",[],{"type":"vector", "cache":"true"}), |
|
59 | Product("/tests/with_cache/vector",[],{"type":"vector", "cache":"true"}), | |
60 | Product("/tests/with_cache/multicomponent",[],{"type":"multicomponent",'size':'4', "cache":"true"}) |
|
60 | Product("/tests/with_cache/multicomponent",[],{"type":"multicomponent",'size':'4', "cache":"true"}) | |
61 | ]) |
|
61 | ]) | |
62 |
|
62 | |||
63 | def get_data(self,metadata,start,stop): |
|
63 | def get_data(self,metadata,start,stop): | |
64 | ts_type = DataSeriesType.SCALAR |
|
64 | ts_type = DataSeriesType.SCALAR | |
65 | default_ctor_args = 1 |
|
65 | default_ctor_args = 1 | |
66 | use_cache = False |
|
66 | use_cache = False | |
67 | p_type = 'scalar' |
|
67 | p_type = 'scalar' | |
68 | try: |
|
68 | try: | |
69 | for key,value in metadata.items(): |
|
69 | for key,value in metadata.items(): | |
70 | if key == 'type': |
|
70 | if key == 'type': | |
71 | p_type = value |
|
71 | p_type = value | |
72 | if value == 'vector': |
|
72 | if value == 'vector': | |
73 | ts_type = DataSeriesType.VECTOR |
|
73 | ts_type = DataSeriesType.VECTOR | |
74 | elif value == 'multicomponent': |
|
74 | elif value == 'multicomponent': | |
75 | ts_type = DataSeriesType.MULTICOMPONENT |
|
75 | ts_type = DataSeriesType.MULTICOMPONENT | |
76 | elif value == 'spectrogram': |
|
76 | elif value == 'spectrogram': | |
77 | ts_type = DataSeriesType.SPECTROGRAM |
|
77 | ts_type = DataSeriesType.SPECTROGRAM | |
78 | if key == 'cache' and value == 'true': |
|
78 | if key == 'cache' and value == 'true': | |
79 | use_cache = True |
|
79 | use_cache = True | |
80 | if use_cache: |
|
80 | if use_cache: | |
81 | cache_product = f"tests/{p_type}" |
|
81 | cache_product = f"tests/{p_type}" | |
82 | var = _cache.get_data(cache_product, DateTimeRange(datetime.fromtimestamp(start, tz=timezone.utc), datetime.fromtimestamp(stop, tz=timezone.utc)), partial(_get_data, p_type), fragment_hours=24) |
|
82 | var = _cache.get_data(cache_product, DateTimeRange(datetime.fromtimestamp(start, tz=timezone.utc), datetime.fromtimestamp(stop, tz=timezone.utc)), partial(_get_data, p_type), fragment_hours=24) | |
83 | else: |
|
83 | else: | |
84 | var = _get_data(p_type, start, stop) |
|
84 | var = _get_data(p_type, start, stop) | |
85 | return (((var.time, np.array([])),var.data), ts_type) |
|
85 | return (((var.time, np.array([])),var.data), ts_type) | |
86 | except Exception as e: |
|
86 | except Exception as e: | |
87 | print(traceback.format_exc()) |
|
87 | print(traceback.format_exc()) | |
88 | print("Error in test.py ",str(e)) |
|
88 | print("Error in test.py ",str(e)) | |
89 | return (((np.array([]), np.array([])), np.array([])), ts_type) |
|
89 | return (((np.array([]), np.array([])), np.array([])), ts_type) | |
90 |
|
90 | |||
91 |
|
91 | |||
92 | t=MyProvider() |
|
92 | t=MyProvider() | |
93 |
|
93 |
@@ -1,31 +1,31 | |||||
1 | #!/bin/bash |
|
1 | #!/bin/bash | |
2 | # guess using centos 7 as build host |
|
2 | # guess using centos 7 as build host | |
3 | yum install -y gtk3 openssl-devel.x86_64 ncurses-devel.x86_64 sqlite-devel.x86_64 tkinter.x86_64 readline-devel.x86_64 xz-devel.x86_64 gdbm-devel.x86_64 bzip2-devel.x86_64 tk-devel.x86_64 libffi-devel.x86_64 make |
|
3 | yum install -y gtk3 openssl-devel.x86_64 ncurses-devel.x86_64 sqlite-devel.x86_64 tkinter.x86_64 readline-devel.x86_64 xz-devel.x86_64 gdbm-devel.x86_64 bzip2-devel.x86_64 tk-devel.x86_64 libffi-devel.x86_64 make | |
4 | HERE="$(dirname "$(readlink -f "${0}")")" |
|
4 | HERE="$(dirname "$(readlink -f "${0}")")" | |
5 | SCIQLOP_SCR=$HERE/../../ |
|
5 | SCIQLOP_SCR=$HERE/../../ | |
6 | SCIQLOP_BUILD=$SCIQLOP_SCR/build/ |
|
6 | SCIQLOP_BUILD=$SCIQLOP_SCR/build/ | |
7 | SCIQLOP_APPDIR=$SCIQLOP_BUILD/AppDir |
|
7 | SCIQLOP_APPDIR=$SCIQLOP_BUILD/AppDir | |
8 | mkdir $SCIQLOP_BUILD |
|
8 | mkdir $SCIQLOP_BUILD | |
9 | cd $SCIQLOP_BUILD |
|
9 | cd $SCIQLOP_BUILD | |
10 | # need to build python to easily install/relocate in AppImage |
|
10 | # need to build python to easily install/relocate in AppImage | |
11 | wget https://www.python.org/ftp/python/3.7.3/Python-3.7.3.tgz |
|
11 | wget https://www.python.org/ftp/python/3.7.3/Python-3.7.3.tgz | |
12 | tar -xf Python-3.7.3.tgz |
|
12 | tar -xf Python-3.7.3.tgz | |
13 | cd Python-3.7.3 |
|
13 | cd Python-3.7.3 | |
14 | # Optimisation is damn slow maybe enabled later |
|
14 | # Optimisation is damn slow maybe enabled later | |
15 | ./configure --enable-shared --prefix=/usr |
|
15 | ./configure --enable-shared --prefix=/usr | |
16 | make -j |
|
16 | make -j | |
17 | DESTDIR=$SCIQLOP_BUILD/AppDir make install |
|
17 | DESTDIR=$SCIQLOP_BUILD/AppDir make install | |
18 | cd .. |
|
18 | cd .. | |
19 | cp $HERE/AppRun $SCIQLOP_APPDIR/ |
|
19 | cp $HERE/AppRun $SCIQLOP_APPDIR/ | |
20 | chmod +x $SCIQLOP_APPDIR/AppRun |
|
20 | chmod +x $SCIQLOP_APPDIR/AppRun | |
21 | # Tweak to find custom python from build dir |
|
21 | # Tweak to find custom python from build dir | |
22 | sed "s|/usr|$SCIQLOP_APPDIR/usr|" -i $SCIQLOP_APPDIR/usr/lib/pkgconfig/python3.pc |
|
22 | sed "s|/usr|$SCIQLOP_APPDIR/usr|" -i $SCIQLOP_APPDIR/usr/lib/pkgconfig/python3.pc | |
23 |
LD_PRELOAD=$SCIQLOP_APPDIR/usr/lib/libpython3.7m.so.1.0 PATH=$SCIQLOP_APPDIR/usr/bin/:/usr/bin/ LD_LIBRARY_PATH=AppDir/usr/lib/:AppDir/usr/lib/python3.7/ $SCIQLOP_APPDIR/usr/bin/python3 $SCIQLOP_APPDIR/usr/bin/pip3 install |
|
23 | LD_PRELOAD=$SCIQLOP_APPDIR/usr/lib/libpython3.7m.so.1.0 PATH=$SCIQLOP_APPDIR/usr/bin/:/usr/bin/ LD_LIBRARY_PATH=AppDir/usr/lib/:AppDir/usr/lib/python3.7/ $SCIQLOP_APPDIR/usr/bin/python3 $SCIQLOP_APPDIR/usr/bin/pip3 install speasy | |
24 | LD_LIBRARY_PATH=$SCIQLOP_APPDIR/usr/lib/ PKG_CONFIG_PATH=$SCIQLOP_APPDIR/usr/lib/pkgconfig/:$PKG_CONFIG_PATH PATH=$SCIQLOP_APPDIR/usr/bin/:$PATH meson --prefix=/usr .. |
|
24 | LD_LIBRARY_PATH=$SCIQLOP_APPDIR/usr/lib/ PKG_CONFIG_PATH=$SCIQLOP_APPDIR/usr/lib/pkgconfig/:$PKG_CONFIG_PATH PATH=$SCIQLOP_APPDIR/usr/bin/:$PATH meson --prefix=/usr .. | |
25 | ninja |
|
25 | ninja | |
26 | DESTDIR=$SCIQLOP_APPDIR ninja install |
|
26 | DESTDIR=$SCIQLOP_APPDIR ninja install | |
27 | cp -rf $SCIQLOP_APPDIR/usr/lib64/* $SCIQLOP_APPDIR/usr/lib/ |
|
27 | cp -rf $SCIQLOP_APPDIR/usr/lib64/* $SCIQLOP_APPDIR/usr/lib/ | |
28 | rm -rf $SCIQLOP_APPDIR/usr/lib64/ |
|
28 | rm -rf $SCIQLOP_APPDIR/usr/lib64/ | |
29 | wget https://github.com/probonopd/linuxdeployqt/releases/download/continuous/linuxdeployqt-continuous-x86_64.AppImage |
|
29 | wget https://github.com/probonopd/linuxdeployqt/releases/download/continuous/linuxdeployqt-continuous-x86_64.AppImage | |
30 | chmod +x linuxdeployqt-continuous-x86_64.AppImage && ./linuxdeployqt-continuous-x86_64.AppImage --appimage-extract |
|
30 | chmod +x linuxdeployqt-continuous-x86_64.AppImage && ./linuxdeployqt-continuous-x86_64.AppImage --appimage-extract | |
31 | LD_LIBRARY_PATH=$SCIQLOP_APPDIR/usr/lib:$SCIQLOP_APPDIR/usr/lib/SciQLop/:$SCIQLOP_APPDIR/usr/lib/python3.7/site-packages/numpy/.libs/ ./squashfs-root/AppRun $SCIQLOP_APPDIR/usr/share/applications/*.desktop -appimage -extra-plugins=iconengines,platformthemes/libqgtk3.so |
|
31 | LD_LIBRARY_PATH=$SCIQLOP_APPDIR/usr/lib:$SCIQLOP_APPDIR/usr/lib/SciQLop/:$SCIQLOP_APPDIR/usr/lib/python3.7/site-packages/numpy/.libs/ ./squashfs-root/AppRun $SCIQLOP_APPDIR/usr/share/applications/*.desktop -appimage -extra-plugins=iconengines,platformthemes/libqgtk3.so |
@@ -1,15 +1,15 | |||||
1 | #!/bin/bash |
|
1 | #!/bin/bash | |
2 | # On OS X only 0.49.1 works :( |
|
2 | # On OS X only 0.49.1 works :( | |
3 | pip3 install --upgrade --user meson==0.49.1 |
|
3 | pip3 install --upgrade --user meson==0.49.1 | |
4 | HERE=$( cd "$(dirname "$0")" ; pwd -P ) |
|
4 | HERE=$( cd "$(dirname "$0")" ; pwd -P ) | |
5 | mkdir build |
|
5 | mkdir build | |
6 | ~/Library/Python/3.7/bin/meson -Dcpp_args='-DQT_STATICPLUGIN' -Ddefault_library=static --prefix=/tmp/SciQLOP.app --bindir=Contents/MacOS build |
|
6 | ~/Library/Python/3.7/bin/meson -Dcpp_args='-DQT_STATICPLUGIN' -Ddefault_library=static --prefix=/tmp/SciQLOP.app --bindir=Contents/MacOS build | |
7 | cd build |
|
7 | cd build | |
8 | ninja |
|
8 | ninja | |
9 | ninja install |
|
9 | ninja install | |
10 | ~/Library/Python/3.7/bin/virtualenv --always-copy /tmp/SciQLOP.app |
|
10 | ~/Library/Python/3.7/bin/virtualenv --always-copy /tmp/SciQLOP.app | |
11 | ~/Library/Python/3.7/bin/virtualenv --relocatable /tmp/SciQLOP.app |
|
11 | ~/Library/Python/3.7/bin/virtualenv --relocatable /tmp/SciQLOP.app | |
12 | source /tmp/SciQLOP.app/bin/activate |
|
12 | source /tmp/SciQLOP.app/bin/activate | |
13 |
/tmp/SciQLOP.app/bin/pip install |
|
13 | /tmp/SciQLOP.app/bin/pip install speasy | |
14 | cp $HERE/SciQLOP_wrapper /tmp/SciQLOP.app/Contents/MacOS/ |
|
14 | cp $HERE/SciQLOP_wrapper /tmp/SciQLOP.app/Contents/MacOS/ | |
15 | chmod +x /tmp/SciQLOP.app/Contents/MacOS/SciQLOP_wrapper |
|
15 | chmod +x /tmp/SciQLOP.app/Contents/MacOS/SciQLOP_wrapper |
General Comments 0
You need to be logged in to leave comments.
Login now