From 1f58a0988db65204cdc72f578d09dfb69b6dfb7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Janko=20Slavi=C4=8D?= Date: Wed, 1 Nov 2023 15:44:16 +0100 Subject: [PATCH] upgrade for uneven data with comments --- data/uneven_comments.lvm | 40 +++++++++++++++++++++++++++ lvm_read.py | 59 ++++++++++++++++++++++++---------------- tests/test_all.py | 28 ++++++++++++++++++- 3 files changed, 103 insertions(+), 24 deletions(-) create mode 100644 data/uneven_comments.lvm diff --git a/data/uneven_comments.lvm b/data/uneven_comments.lvm new file mode 100644 index 0000000..b7d6290 --- /dev/null +++ b/data/uneven_comments.lvm @@ -0,0 +1,40 @@ +LabVIEW Measurement +Writer_Version 2 +Reader_Version 2 +Separator Tab +Decimal_Separator . +Multi_Headings No +X_Columns One +Time_Pref Absolute +Operator dummy +Date 2023/10/23 +Time 15:16:28.319162835 +***End_of_Header*** + +Notes X values guaranteed valid only for TC06 Diffuser +Channels 29 +Samples 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 1 1 1 1 1 +Date 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 2023/10/26 1903/12/31 1903/12/31 1903/12/31 1903/12/31 1903/12/31 +Time 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 11:20:12.319162835 19:00:00 19:00:00 19:00:00 19:00:00 19:00:00 +Y_Unit_Label Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C Deg C gpm gpm SLPM NA gpm +X_Dimension Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time Time +X0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 0.0000000000000000E+0 +Delta_X 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 0.500000 1.000000 1.000000 1.000000 1.000000 1.000000 +***End_of_Header*** +X_Value TC06 Diffuser TC16 Tube Top TC19 Tube Top TC32 FRT Outer TC33 SRT Inner TC34 SRT Outer TC36 TRT Outer TC37 SRT Outer TC39 SRT Top1 TC40 SRT Bottom TC44 FRT Top2 TC45 Short Tube TC46 Dome1 TC47 Dome2 TC14 Exhaust TC_15 Cold He RTD_0 C_In RTD_1 C_Out RTD_2 W_In RTD_3 W_Out RTD_4 CDS RTD_5 RTD_6 RTD_7 TotalWarmFlow ColdFlow MethaneFlow Lambda CDS Flow Comment +0.000000 447.224647 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 334.307023 321.410507 56.989538 -0.829803 12.752446 11.301499 48.239392 55.206290 1256.027059 1256.115820 -242.027870 -242.027870 6.308925 5.033220 16.512546 1.317933 2.589110 DummyComment +0.500000 447.224647 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 2300.783165 334.307023 321.410507 56.989538 -0.829803 12.752446 11.301499 48.239392 55.206290 1256.027059 1256.115820 -242.027870 -242.027870 +1.000000 447.251883 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 334.259636 321.542567 56.990953 -0.822915 12.752446 11.301499 48.239392 55.206290 1256.027059 1256.115820 -242.027870 -242.027870 +1.500000 447.251883 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 2300.796773 334.259636 321.542567 56.990953 -0.822915 12.752446 11.301499 48.239392 55.206290 1256.027059 1256.115820 -242.027870 -242.027870 +2.000000 447.234561 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 334.327350 321.409221 56.934749 -0.713145 12.751987 11.300888 48.238619 55.207065 1256.027059 1256.115820 1256.099320 1256.209881 6.308925 5.133501 17.311896 1.318101 2.613835 DummyComment +2.500000 447.234561 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 2300.791359 334.327350 321.409221 56.934749 -0.713145 12.751987 11.300888 48.238619 55.207065 1256.027059 1256.115820 1256.099320 1256.209881 +3.000000 447.221129 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 334.422635 321.488441 56.896022 -0.771979 12.751987 11.300888 48.238619 55.207065 1256.027059 1256.115820 1256.099320 1256.209881 +3.500000 447.221129 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 2300.803354 334.422635 321.488441 56.896022 -0.771979 12.751987 11.300888 48.238619 55.207065 1256.027059 1256.115820 1256.099320 1256.209881 +4.000000 447.197928 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 334.279190 321.450723 56.870199 -0.690624 12.750456 11.300582 48.239701 55.207065 1256.027059 1256.115820 1256.099320 1256.209881 6.379615 5.232138 16.986235 1.325774 2.604892 DummyComment +4.500000 447.197928 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 2300.807001 334.279190 321.450723 56.870199 -0.690624 12.750456 11.300582 48.239701 55.207065 1256.027059 1256.115820 1256.099320 1256.209881 +5.000000 447.171711 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 334.300433 321.542371 56.828470 -0.727676 12.750456 11.300582 48.239701 55.207065 1256.027059 1256.115820 1256.099320 1256.209881 +5.500000 447.171711 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 2300.797727 334.300433 321.542371 56.828470 -0.727676 12.750456 11.300582 48.239701 55.207065 1256.027059 1256.115820 1256.099320 1256.209881 +6.000000 447.162303 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 334.219576 321.628593 56.763746 -0.811820 12.749844 11.299052 48.242021 55.206600 1256.027059 1256.115820 1256.099320 -242.027870 6.358244 5.130213 16.851365 1.329846 2.650660 DummyComment +6.500000 447.162303 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 2300.796408 334.219576 321.628593 56.763746 -0.811820 12.749844 11.299052 48.242021 55.206600 1256.027059 1256.115820 1256.099320 -242.027870 +7.000000 447.170772 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 334.202566 321.528237 56.741189 -0.840823 12.749844 11.299052 48.242021 55.206600 1256.027059 1256.115820 1256.099320 -242.027870 +7.500000 447.170772 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 2300.789963 334.202566 321.528237 56.741189 -0.840823 12.749844 11.299052 48.242021 55.206600 1256.027059 1256.115820 1256.099320 -242.027870 diff --git a/lvm_read.py b/lvm_read.py index 90c05bc..fa4d736 100644 --- a/lvm_read.py +++ b/lvm_read.py @@ -5,9 +5,10 @@ """ from os import path import pickle +import itertools import numpy as np -__version__ = '1.21' +__version__ = '1.22' def _lvm_pickle(filename): """ Reads pickle file (for local use) @@ -43,21 +44,25 @@ def _lvm_dump(lvm_data, filename, protocol=-1): output.close() -def _read_lvm_base(filename): +def _read_lvm_base(filename, read_comments_as_string=False): """ Base lvm reader. Should be called from ``read``, only :param filename: filename of the lvm file + :read_comments_as_string: if True, comments are read as string and returned in dictionary, + otherwise as nan :return lvm_data: lvm dict """ with open(filename, 'r', encoding="utf8", errors='ignore') as f: - lvm_data = read_lines(f) + lvm_data = read_lines(f, read_comments_as_string=read_comments_as_string) return lvm_data -def read_lines(lines): +def read_lines(lines, read_comments_as_string=False): """ Read lines of strings. :param lines: lines of the lvm file + :read_comments_as_string: if True, comments are read as string and returned in dictionary, + otherwise as nan :return lvm_data: lvm dict """ lvm_data = dict() @@ -73,7 +78,10 @@ def to_float(a): try: return float(a.replace(lvm_data['Decimal_Separator'], '.')) except: - return np.nan + if read_comments_as_string: + return a + else: + return np.nan for line in lines: line = line.replace('\r', '') line_sp = line.replace('\n', '').split('\t') @@ -108,6 +116,8 @@ def to_float(a): first_column = 1 segment['Channel names'] = line_sp[first_column:(nr_of_columns + 1)] data_channels_comment_reading = False + if segment['Channel names'][-1] == 'Comment': + data_channels_comment_reading = True data_reading = True elif data_channels_comment_reading: key, values = line_sp[0], line_sp[1:(nr_of_columns + 1)] @@ -124,16 +134,30 @@ def to_float(a): del lvm_data[segment_nr - 1] segment_nr -= 1 lvm_data['Segments'] = segment_nr + def to_float2(x, default_value=np.nan): + try: + return float(x) + except ValueError: + return default_value + vfunc = np.vectorize(to_float2) for s in range(segment_nr): - lvm_data[s]['data'] = np.asarray(lvm_data[s]['data']) + lvm_data[s]['data'] = list(itertools.zip_longest(*lvm_data[s]['data'], fillvalue='')) + if data_channels_comment_reading and read_comments_as_string: + lvm_data[s]['comments'] = list(lvm_data[s]['data'][-1]) + lvm_data[s]['data'] = vfunc(lvm_data[s]['data'][:-1]).T + else: + lvm_data[s]['data'] = np.asarray(lvm_data[s]['data']).T + #lvm_data[s]['data'] = np.asarray(lvm_data[s]['data']) return lvm_data -def read_str(str): +def read_str(str, read_comments_as_string=False): """ Parse the string as the content of lvm file. :param str: input string + :read_comments_as_string: if True, comments are read as string and returned in dictionary, + otherwise as nan :return: dictionary with lvm data Examples @@ -147,10 +171,10 @@ def read_str(str): >>> lvm.keys() #explore the dictionary dict_keys(['', 'Date', 'X_Columns', 'Time_Pref', 'Time', 'Writer_Version',... """ - return read_lines(str.splitlines(keepends=True)) + return read_lines(str.splitlines(keepends=True), read_comments_as_string=read_comments_as_string) -def read(filename, read_from_pickle=True, dump_file=True): +def read(filename, read_from_pickle=True, dump_file=True, read_comments_as_string=False): """Read from .lvm file and by default for faster reading save to pickle. See also specifications: http://www.ni.com/tutorial/4139/en/ @@ -158,6 +182,8 @@ def read(filename, read_from_pickle=True, dump_file=True): :param filename: file which should be read :param read_from_pickle: if True, it tries to read from pickle :param dump_file: dump file to pickle (significantly increases performance) + :read_comments_as_string: if True, comments are read as string and returned in dictionary, + otherwise as nan :return: dictionary with lvm data Examples @@ -176,20 +202,7 @@ def read(filename, read_from_pickle=True, dump_file=True): if read_from_pickle and lvm_data: return lvm_data else: - lvm_data = _read_lvm_base(filename) + lvm_data = _read_lvm_base(filename, read_comments_as_string=read_comments_as_string) if dump_file: _lvm_dump(lvm_data, filename) return lvm_data - - -if __name__ == '__main__': - import matplotlib.pyplot as plt - - da = read('data/with_comments.lvm',read_from_pickle=False) - #da = read('data\with_empty_fields.lvm',read_from_pickle=False) - print(da.keys()) - print('Number of segments:', da['Segments']) - - plt.plot(da[0]['data']) - plt.show() - diff --git a/tests/test_all.py b/tests/test_all.py index a0d5f9d..c524fb8 100644 --- a/tests/test_all.py +++ b/tests/test_all.py @@ -27,6 +27,31 @@ def test_with_empty_fields_lvm(): data = read('./data/with_empty_fields.lvm', read_from_pickle=False, dump_file=False) np.testing.assert_equal(data[0]['data'][0,7],-0.011923) +def test_with_uneven_comments_as_string(): + data_from_file = np.array([0.000000,447.224647,2300.783165,2300.783165,2300.783165,2300.783165, + 2300.783165,2300.783165,2300.783165,2300.783165,2300.783165,2300.783165, + 2300.783165,334.307023,321.410507,56.989538,-0.829803,12.752446,11.301499, + 48.239392,55.206290,1256.027059,1256.115820,-242.027870,-242.027870, + 6.308925,5.033220,16.512546,1.317933,2.589110]) + data_from_file2 = np.array([6.000000,447.162303,2300.796408,2300.796408,2300.796408,2300.796408, + 2300.796408,2300.796408,2300.796408,2300.796408,2300.796408,2300.796408, + 2300.796408,334.219576,321.628593,56.763746,-0.811820,12.749844, + 11.299052,48.242021,55.206600,1256.027059,1256.115820,1256.099320, + -242.027870,6.358244,5.130213,16.851365,1.329846,2.650660]) + data_from_file3 = np.array([7.500000,447.170772,2300.789963,2300.789963,2300.789963,2300.789963, + 2300.789963,2300.789963,2300.789963,2300.789963,2300.789963,2300.789963, + 2300.789963,334.202566,321.528237,56.741189,-0.840823,12.749844, + 11.299052,48.242021,55.206600,1256.027059,1256.115820,1256.099320, + -242.027870, np.nan, np.nan, np.nan, np.nan, np.nan]) + comments_from_file = ['DummyComment','','','','DummyComment','','','', + 'DummyComment','','','','DummyComment','','',''] + data = read('./data/uneven_comments.lvm', read_from_pickle=False, dump_file=False, read_comments_as_string=True) + np.testing.assert_allclose(data[0]['data'][0],data_from_file) + np.testing.assert_allclose(data[0]['data'][-4],data_from_file2) + np.testing.assert_allclose(data[0]['data'][-1],data_from_file3) + for i in range(len(comments_from_file)): + np.testing.assert_equal(data[0]['comments'][i],comments_from_file[i]) + def test_with_multi_time_column_lvm(): data = read('./data/multi_time_column.lvm', read_from_pickle=False, dump_file=False) np.testing.assert_allclose(data[0]['data'][0],\ @@ -52,5 +77,6 @@ def timing_on_long_short_lvm(): np.testing.run_module_suite() if __name__ == '__main__': - test_several_comments() + test_with_multi_time_column_lvm() + #test_several_comments() #timing_on_long_short_lvm() \ No newline at end of file