@@ -1,38 +1,38 | |||
|
1 | 1 | import traceback |
|
2 | 2 | from SciQLopBindings import PyDataProvider, Product, VectorTimeSerie, ScalarTimeSerie, DataSeriesType |
|
3 | 3 | import numpy as np |
|
4 |
from sp |
|
|
5 |
from sp |
|
|
4 | from speasy.cache import _cache | |
|
5 | from speasy.common.datetime_range import DateTimeRange | |
|
6 | 6 | from datetime import datetime, timedelta, timezone |
|
7 |
from sp |
|
|
8 |
from sp |
|
|
7 | from speasy.common.variable import SpeasyVariable | |
|
8 | from speasy.amda import AMDA | |
|
9 | 9 | |
|
10 | 10 | amda = AMDA() |
|
11 | 11 | |
|
12 | 12 | |
|
13 | 13 | def vp_make_scalar(var=None): |
|
14 | 14 | if var is None: |
|
15 | 15 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.SCALAR) |
|
16 | 16 | else: |
|
17 | 17 | return (((var.time, np.array([])), var.data), DataSeriesType.SCALAR) |
|
18 | 18 | |
|
19 | 19 | class DemoVP(PyDataProvider): |
|
20 | 20 | def __init__(self): |
|
21 | 21 | super().__init__() |
|
22 | 22 | self.register_products([Product("/VP/thb_fgm_gse_mod",[],{"type":"scalar"})]) |
|
23 | 23 | |
|
24 | 24 | def get_data(self,metadata,start,stop): |
|
25 | 25 | try: |
|
26 | 26 | tstart = datetime.fromtimestamp(start, tz=timezone.utc) |
|
27 | 27 | tend = datetime.fromtimestamp(stop, tz=timezone.utc) |
|
28 | 28 | thb_bs = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id='thb_bs', method="REST") |
|
29 | 29 | thb_bs.data = np.sqrt((thb_bs.data*thb_bs.data).sum(axis=1)) |
|
30 | 30 | return vp_make_scalar(thb_bs) |
|
31 | 31 | except Exception as e: |
|
32 | 32 | print(traceback.format_exc()) |
|
33 | 33 | print(f"Error in {__file__} ",str(e)) |
|
34 | 34 | return (((np.array([]), np.array([])), np.array([])), ts_type) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | t=DemoVP() |
|
38 | 38 |
@@ -1,128 +1,128 | |||
|
1 | 1 | import traceback |
|
2 | 2 | import os |
|
3 | 3 | from datetime import datetime, timedelta, timezone |
|
4 | 4 | from SciQLopBindings import PyDataProvider, Product, VectorTimeSerie, ScalarTimeSerie, DataSeriesType |
|
5 | 5 | import numpy as np |
|
6 | 6 | import requests |
|
7 | 7 | import copy |
|
8 |
from sp |
|
|
8 | from speasy.amda import AMDA | |
|
9 | 9 | |
|
10 | 10 | amda = AMDA() |
|
11 | 11 | |
|
12 | 12 | |
|
13 | 13 | def amda_make_scalar(var=None): |
|
14 | 14 | if var is None: |
|
15 | 15 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.SCALAR) |
|
16 | 16 | else: |
|
17 | 17 | return (((var.time, np.array([])), var.data), DataSeriesType.SCALAR) |
|
18 | 18 | |
|
19 | 19 | |
|
20 | 20 | def amda_make_vector(var=None): |
|
21 | 21 | if var is None: |
|
22 | 22 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.VECTOR) |
|
23 | 23 | else: |
|
24 | 24 | return (((var.time, np.array([])), var.data), DataSeriesType.VECTOR) |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | def amda_make_multi_comp(var=None): |
|
28 | 28 | if var is None: |
|
29 | 29 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.MULTICOMPONENT) |
|
30 | 30 | else: |
|
31 | 31 | return (((var.time, np.array([])), var.data), DataSeriesType.MULTICOMPONENT) |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | def amda_make_spectro(var=None): |
|
35 | 35 | if var is None: |
|
36 | 36 | return (((np.array([]), np.array([])), np.array([])), DataSeriesType.SPECTROGRAM) |
|
37 | 37 | else: |
|
38 | 38 | min_sampling = float(var.meta.get("DATASET_MIN_SAMPLING", "nan")) |
|
39 | 39 | max_sampling = float(var.meta.get("DATASET_MAX_SAMPLING", "nan")) |
|
40 | 40 | if var.y is None and len(var.data): |
|
41 | 41 | var.y = np.logspace(1, 3, var.data.shape[1])[::-1] |
|
42 | 42 | return (((var.time, var.y), var.data), DataSeriesType.SPECTROGRAM) |
|
43 | 43 | #return pysciqlopcore.SpectrogramTimeSerie(var.time,y,var.data,min_sampling,max_sampling,True) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | def amda_get_sample(metadata, start, stop): |
|
47 | 47 | ts_type = amda_make_scalar |
|
48 | 48 | try: |
|
49 | 49 | param_id = None |
|
50 | 50 | for key, value in metadata: |
|
51 | 51 | if key == 'xml:id': |
|
52 | 52 | param_id = value |
|
53 | 53 | elif key == 'type': |
|
54 | 54 | if value == 'vector': |
|
55 | 55 | ts_type = amda_make_vector |
|
56 | 56 | elif value == 'multicomponent': |
|
57 | 57 | ts_type = amda_make_multi_comp |
|
58 | 58 | elif value == 'spectrogram': |
|
59 | 59 | ts_type = amda_make_spectro |
|
60 | 60 | tstart = datetime.fromtimestamp(start, tz=timezone.utc) |
|
61 | 61 | tend = datetime.fromtimestamp(stop, tz=timezone.utc) |
|
62 | 62 | var = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id=param_id, method="REST") |
|
63 | 63 | return ts_type(var) |
|
64 | 64 | except Exception as e: |
|
65 | 65 | print(traceback.format_exc()) |
|
66 | 66 | print("Error in amda.py ", str(e)) |
|
67 | 67 | return ts_type() |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | class AmdaProvider(PyDataProvider): |
|
71 | 71 | def __init__(self): |
|
72 | 72 | super(AmdaProvider, self).__init__() |
|
73 | 73 | if len(amda.component) is 0: |
|
74 | 74 | amda.update_inventory() |
|
75 | 75 | parameters = copy.deepcopy(amda.parameter) |
|
76 | 76 | for name, component in amda.component.items(): |
|
77 | 77 | if 'components' in parameters[component['parameter']]: |
|
78 | 78 | parameters[component['parameter']]['components'].append(component) |
|
79 | 79 | else: |
|
80 | 80 | parameters[component['parameter']]['components']=[component] |
|
81 | 81 | |
|
82 | 82 | products = [] |
|
83 | 83 | for key, parameter in parameters.items(): |
|
84 | 84 | mission_name = amda.mission[parameter['mission']]['name'] |
|
85 | 85 | observatory_name = parameter.get('observatory','') |
|
86 | 86 | if observatory_name != '': |
|
87 | 87 | observatory_name = amda.observatory[observatory_name]['name'] |
|
88 | 88 | instrument_name = amda.instrument[parameter['instrument']]['name'] |
|
89 | 89 | dataset_name = amda.dataset[parameter['dataset']]['name'] |
|
90 | 90 | path = f"/AMDA/{mission_name}/{observatory_name}/{instrument_name}/{dataset_name}/{parameter['name']}" |
|
91 | 91 | components = [component['name'] for component in parameter.get('components',[])] |
|
92 | 92 | metadata = {key: item for key, item in parameter.items() if key is not 'components'} |
|
93 | 93 | n_components = parameter.get('size', 0) |
|
94 | 94 | if n_components == '3': |
|
95 | 95 | metadata["type"] = "vector" |
|
96 | 96 | elif parameter.get('display_type', '')=="spectrogram": |
|
97 | 97 | metadata["type"] = "spectrogram" |
|
98 | 98 | elif n_components != 0: |
|
99 | 99 | metadata["type"] = "multicomponent" |
|
100 | 100 | else: |
|
101 | 101 | metadata["type"] = "scalar" |
|
102 | 102 | products.append(Product(path, components, metadata)) |
|
103 | 103 | self.register_products(products) |
|
104 | 104 | for _,mission in amda.mission.items(): |
|
105 | 105 | if ('target' in mission) and (mission['xml:id'] != 'Ephemerides') and (mission['target'] != 'Earth'): |
|
106 | 106 | self.set_icon(f'/AMDA/{mission["name"]}','satellite') |
|
107 | 107 | |
|
108 | 108 | def get_data(self, metadata, start, stop): |
|
109 | 109 | ts_type = amda_make_scalar |
|
110 | 110 | try: |
|
111 | 111 | param_id = metadata['xml:id'] |
|
112 | 112 | ts_type_str = metadata['type'] |
|
113 | 113 | if ts_type_str == 'vector': |
|
114 | 114 | ts_type = amda_make_vector |
|
115 | 115 | elif ts_type_str == 'multicomponent': |
|
116 | 116 | ts_type = amda_make_multi_comp |
|
117 | 117 | elif ts_type_str == 'spectrogram': |
|
118 | 118 | ts_type = amda_make_spectro |
|
119 | 119 | tstart = datetime.fromtimestamp(start, tz=timezone.utc) |
|
120 | 120 | tend = datetime.fromtimestamp(stop, tz=timezone.utc) |
|
121 | 121 | var = amda.get_parameter(start_time=tstart, stop_time=tend, parameter_id=param_id, method="REST") |
|
122 | 122 | return ts_type(var) |
|
123 | 123 | except Exception as e: |
|
124 | 124 | print(traceback.format_exc()) |
|
125 | 125 | print("Error in amda.py ", str(e)) |
|
126 | 126 | return ts_type() |
|
127 | 127 | |
|
128 | 128 | _amda = AmdaProvider() |
@@ -1,93 +1,93 | |||
|
1 | 1 | import traceback |
|
2 | 2 | from SciQLopBindings import PyDataProvider, Product, VectorTimeSerie, ScalarTimeSerie, DataSeriesType |
|
3 | 3 | import numpy as np |
|
4 | 4 | import math |
|
5 |
from sp |
|
|
6 |
from sp |
|
|
5 | from speasy.cache import _cache | |
|
6 | from speasy.common.datetime_range import DateTimeRange | |
|
7 | 7 | from functools import partial |
|
8 | 8 | from datetime import datetime, timedelta, timezone |
|
9 |
from sp |
|
|
9 | from speasy.common.variable import SpeasyVariable | |
|
10 | 10 | |
|
11 | 11 | |
|
12 | 12 | def make_scalar(x): |
|
13 | 13 | y = np.cos(x/10.) |
|
14 |
return Sp |
|
|
14 | return SpeasyVariable(time=x, data=y) | |
|
15 | 15 | |
|
16 | 16 | def make_vector(x): |
|
17 | 17 | v=np.ones((len(x),3)) |
|
18 | 18 | for i in range(3): |
|
19 | 19 | v.transpose()[:][i] = np.cos(x/10. + float(i)) + (100. * np.cos(x/10000. + float(i))) |
|
20 |
return Sp |
|
|
20 | return SpeasyVariable(time=x, data=v) | |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def make_multicomponent(x): |
|
24 | 24 | v=np.ones((len(x),4)) |
|
25 | 25 | for i in range(4): |
|
26 | 26 | v.transpose()[:][i] = float(i+1) * np.cos(x/10. + float(i)) |
|
27 |
return Sp |
|
|
27 | return SpeasyVariable(time=x, data=v) | |
|
28 | 28 | |
|
29 | 29 | def make_spectrogram(x): |
|
30 | 30 | v=np.ones((len(x),32)) |
|
31 | 31 | for i in range(32): |
|
32 | 32 | v.transpose()[:][i] = 100.*(2.+ float(i+1) * np.cos(x/1024. + float(i))) |
|
33 |
return Sp |
|
|
33 | return SpeasyVariable(time=x, data=v) | |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | def _get_data(p_type, start, stop): |
|
37 | 37 | if type(start) is datetime: |
|
38 | 38 | start = start.timestamp() |
|
39 | 39 | stop = stop.timestamp() |
|
40 | 40 | x = np.arange(math.ceil(start), math.floor(stop))*1. |
|
41 | 41 | if p_type == 'scalar': |
|
42 | 42 | return make_scalar(x) |
|
43 | 43 | if p_type == 'vector': |
|
44 | 44 | return make_vector(x) |
|
45 | 45 | if p_type == 'multicomponent': |
|
46 | 46 | return make_multicomponent(x) |
|
47 | 47 | if p_type == 'spectrogram': |
|
48 | 48 | return make_spectrogram(np.arange(math.ceil(start), math.floor(stop),15.)) |
|
49 | 49 | return None |
|
50 | 50 | |
|
51 | 51 | class MyProvider(PyDataProvider): |
|
52 | 52 | def __init__(self): |
|
53 | 53 | super(MyProvider,self).__init__() |
|
54 | 54 | self.register_products([Product("/tests/without_cache/scalar",[],{"type":"scalar"}), |
|
55 | 55 | Product("/tests/without_cache/vector",[],{"type":"vector"}), |
|
56 | 56 | Product("/tests/without_cache/multicomponent",[],{"type":"multicomponent",'size':'4'}), |
|
57 | 57 | Product("/tests/without_cache/spectrogram",[],{"type":"spectrogram",'size':'32'}), |
|
58 | 58 | Product("/tests/with_cache/scalar",[],{"type":"scalar", "cache":"true"}), |
|
59 | 59 | Product("/tests/with_cache/vector",[],{"type":"vector", "cache":"true"}), |
|
60 | 60 | Product("/tests/with_cache/multicomponent",[],{"type":"multicomponent",'size':'4', "cache":"true"}) |
|
61 | 61 | ]) |
|
62 | 62 | |
|
63 | 63 | def get_data(self,metadata,start,stop): |
|
64 | 64 | ts_type = DataSeriesType.SCALAR |
|
65 | 65 | default_ctor_args = 1 |
|
66 | 66 | use_cache = False |
|
67 | 67 | p_type = 'scalar' |
|
68 | 68 | try: |
|
69 | 69 | for key,value in metadata.items(): |
|
70 | 70 | if key == 'type': |
|
71 | 71 | p_type = value |
|
72 | 72 | if value == 'vector': |
|
73 | 73 | ts_type = DataSeriesType.VECTOR |
|
74 | 74 | elif value == 'multicomponent': |
|
75 | 75 | ts_type = DataSeriesType.MULTICOMPONENT |
|
76 | 76 | elif value == 'spectrogram': |
|
77 | 77 | ts_type = DataSeriesType.SPECTROGRAM |
|
78 | 78 | if key == 'cache' and value == 'true': |
|
79 | 79 | use_cache = True |
|
80 | 80 | if use_cache: |
|
81 | 81 | cache_product = f"tests/{p_type}" |
|
82 | 82 | var = _cache.get_data(cache_product, DateTimeRange(datetime.fromtimestamp(start, tz=timezone.utc), datetime.fromtimestamp(stop, tz=timezone.utc)), partial(_get_data, p_type), fragment_hours=24) |
|
83 | 83 | else: |
|
84 | 84 | var = _get_data(p_type, start, stop) |
|
85 | 85 | return (((var.time, np.array([])),var.data), ts_type) |
|
86 | 86 | except Exception as e: |
|
87 | 87 | print(traceback.format_exc()) |
|
88 | 88 | print("Error in test.py ",str(e)) |
|
89 | 89 | return (((np.array([]), np.array([])), np.array([])), ts_type) |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | t=MyProvider() |
|
93 | 93 |
@@ -1,31 +1,31 | |||
|
1 | 1 | #!/bin/bash |
|
2 | 2 | # guess using centos 7 as build host |
|
3 | 3 | yum install -y gtk3 openssl-devel.x86_64 ncurses-devel.x86_64 sqlite-devel.x86_64 tkinter.x86_64 readline-devel.x86_64 xz-devel.x86_64 gdbm-devel.x86_64 bzip2-devel.x86_64 tk-devel.x86_64 libffi-devel.x86_64 make |
|
4 | 4 | HERE="$(dirname "$(readlink -f "${0}")")" |
|
5 | 5 | SCIQLOP_SCR=$HERE/../../ |
|
6 | 6 | SCIQLOP_BUILD=$SCIQLOP_SCR/build/ |
|
7 | 7 | SCIQLOP_APPDIR=$SCIQLOP_BUILD/AppDir |
|
8 | 8 | mkdir $SCIQLOP_BUILD |
|
9 | 9 | cd $SCIQLOP_BUILD |
|
10 | 10 | # need to build python to easily install/relocate in AppImage |
|
11 | 11 | wget https://www.python.org/ftp/python/3.7.3/Python-3.7.3.tgz |
|
12 | 12 | tar -xf Python-3.7.3.tgz |
|
13 | 13 | cd Python-3.7.3 |
|
14 | 14 | # Optimisation is damn slow maybe enabled later |
|
15 | 15 | ./configure --enable-shared --prefix=/usr |
|
16 | 16 | make -j |
|
17 | 17 | DESTDIR=$SCIQLOP_BUILD/AppDir make install |
|
18 | 18 | cd .. |
|
19 | 19 | cp $HERE/AppRun $SCIQLOP_APPDIR/ |
|
20 | 20 | chmod +x $SCIQLOP_APPDIR/AppRun |
|
21 | 21 | # Tweak to find custom python from build dir |
|
22 | 22 | sed "s|/usr|$SCIQLOP_APPDIR/usr|" -i $SCIQLOP_APPDIR/usr/lib/pkgconfig/python3.pc |
|
23 |
LD_PRELOAD=$SCIQLOP_APPDIR/usr/lib/libpython3.7m.so.1.0 PATH=$SCIQLOP_APPDIR/usr/bin/:/usr/bin/ LD_LIBRARY_PATH=AppDir/usr/lib/:AppDir/usr/lib/python3.7/ $SCIQLOP_APPDIR/usr/bin/python3 $SCIQLOP_APPDIR/usr/bin/pip3 install |
|
|
23 | LD_PRELOAD=$SCIQLOP_APPDIR/usr/lib/libpython3.7m.so.1.0 PATH=$SCIQLOP_APPDIR/usr/bin/:/usr/bin/ LD_LIBRARY_PATH=AppDir/usr/lib/:AppDir/usr/lib/python3.7/ $SCIQLOP_APPDIR/usr/bin/python3 $SCIQLOP_APPDIR/usr/bin/pip3 install speasy | |
|
24 | 24 | LD_LIBRARY_PATH=$SCIQLOP_APPDIR/usr/lib/ PKG_CONFIG_PATH=$SCIQLOP_APPDIR/usr/lib/pkgconfig/:$PKG_CONFIG_PATH PATH=$SCIQLOP_APPDIR/usr/bin/:$PATH meson --prefix=/usr .. |
|
25 | 25 | ninja |
|
26 | 26 | DESTDIR=$SCIQLOP_APPDIR ninja install |
|
27 | 27 | cp -rf $SCIQLOP_APPDIR/usr/lib64/* $SCIQLOP_APPDIR/usr/lib/ |
|
28 | 28 | rm -rf $SCIQLOP_APPDIR/usr/lib64/ |
|
29 | 29 | wget https://github.com/probonopd/linuxdeployqt/releases/download/continuous/linuxdeployqt-continuous-x86_64.AppImage |
|
30 | 30 | chmod +x linuxdeployqt-continuous-x86_64.AppImage && ./linuxdeployqt-continuous-x86_64.AppImage --appimage-extract |
|
31 | 31 | LD_LIBRARY_PATH=$SCIQLOP_APPDIR/usr/lib:$SCIQLOP_APPDIR/usr/lib/SciQLop/:$SCIQLOP_APPDIR/usr/lib/python3.7/site-packages/numpy/.libs/ ./squashfs-root/AppRun $SCIQLOP_APPDIR/usr/share/applications/*.desktop -appimage -extra-plugins=iconengines,platformthemes/libqgtk3.so |
@@ -1,15 +1,15 | |||
|
1 | 1 | #!/bin/bash |
|
2 | 2 | # On OS X only 0.49.1 works :( |
|
3 | 3 | pip3 install --upgrade --user meson==0.49.1 |
|
4 | 4 | HERE=$( cd "$(dirname "$0")" ; pwd -P ) |
|
5 | 5 | mkdir build |
|
6 | 6 | ~/Library/Python/3.7/bin/meson -Dcpp_args='-DQT_STATICPLUGIN' -Ddefault_library=static --prefix=/tmp/SciQLOP.app --bindir=Contents/MacOS build |
|
7 | 7 | cd build |
|
8 | 8 | ninja |
|
9 | 9 | ninja install |
|
10 | 10 | ~/Library/Python/3.7/bin/virtualenv --always-copy /tmp/SciQLOP.app |
|
11 | 11 | ~/Library/Python/3.7/bin/virtualenv --relocatable /tmp/SciQLOP.app |
|
12 | 12 | source /tmp/SciQLOP.app/bin/activate |
|
13 |
/tmp/SciQLOP.app/bin/pip install |
|
|
13 | /tmp/SciQLOP.app/bin/pip install speasy | |
|
14 | 14 | cp $HERE/SciQLOP_wrapper /tmp/SciQLOP.app/Contents/MacOS/ |
|
15 | 15 | chmod +x /tmp/SciQLOP.app/Contents/MacOS/SciQLOP_wrapper |
General Comments 0
You need to be logged in to leave comments.
Login now