Skip to content

Commit ad2b990

Browse files
committed
add tools which could help with pre-processing of the data collected over USB serial
1 parent 8b96a9f commit ad2b990

File tree

4 files changed

+269
-0
lines changed

4 files changed

+269
-0
lines changed
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# process_nicla_bhy2_log_base64.py
2+
This script is used to convert the log encoded in base64 to ascii
3+
## Usage
4+
```
5+
./process_nicla_bhy2_log_base64.py [log_file_name]
6+
```
7+
## Example
8+
./process_nicla_bhy2_log_base64.py log_nicla_bhy2.txt
9+
10+
11+
# check_for_data_loss.sh
12+
This script is used to check for any potential data loss during the transfer,
13+
and it reports some errors if it does find any data loss
14+
15+
## Usage
16+
```
17+
./check_for_data_loss.sh [OPTION] [log_file_name]
18+
```
19+
20+
## Example
21+
- Example 1
22+
```
23+
./check_for_data_loss.sh -b ./minicom.log
24+
```
25+
The above command check for the data loss using the log "./minicom.log" which is base on base64 encoding
26+
- Example 2
27+
```
28+
./check_for_data_loss.sh -a ./minicom.log
29+
```
30+
The above command check for the data loss using the log "./minicom.log" which is base on ascii encoding
31+
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
#!/usr/bin/env python
2+
# this script resamples the data from the log to 1/60hz
3+
4+
import sys
5+
import time
6+
import datetime
7+
import pandas as pd
8+
import math
9+
10+
11+
#DEBUG = True
12+
DEBUG = False
13+
SEP_COLUMN = ','
14+
15+
LINE_TO_SKIP = 500
16+
17+
DOWNSAMPLE_FACTOR=1
18+
19+
def PDBG(*args):
20+
if DEBUG:
21+
print("DEBUG:", *args, file=sys.stderr)
22+
23+
24+
PDBG('Number of arguments:', len(sys.argv))
25+
26+
if (len(sys.argv) < 2):
27+
raise BaseException("missing argument")
28+
29+
filename_in = sys.argv[1]
30+
file_out = None
31+
32+
if (len(sys.argv) > 2):
33+
filename_out = sys.argv[2]
34+
file_out = open(filename_out, 'w')
35+
sys.stdout = file_out
36+
37+
38+
line_cnt = 1 #first row is the header
39+
df_in = pd.read_csv(filename_in, sep=SEP_COLUMN)
40+
41+
seq_last = None
42+
43+
44+
for index, row in df_in.iterrows():
45+
line_cnt += 1
46+
if (line_cnt <= LINE_TO_SKIP):
47+
continue
48+
49+
seq = row['seq']
50+
if (seq_last is not None):
51+
try:
52+
delta = seq - seq_last
53+
if (seq < seq_last):
54+
delta += 10
55+
if (delta != 1):
56+
if (line_cnt < len(df_in.index)):
57+
print("error: line: ", line_cnt, " has data missing, delta:", delta, "seq:", seq)
58+
else:
59+
PDBG("last line: ignored")
60+
seq_last = seq
61+
except:
62+
if (line_cnt < len(df_in.index)):
63+
print("error: line: ", line_cnt, " has data missing")
64+
else:
65+
PDBG("last line: ignored")
66+
pass
67+
else:
68+
PDBG("first row")
69+
seq_last = seq
70+
71+
72+
if file_out is not None:
73+
file_out.close()
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
#!/bin/sh
2+
3+
4+
log_encoding_is_base64=false
5+
6+
if [ ":""$1" = ":-b" ] ; then
7+
log_encoding_is_base64=true
8+
echo "log is using base64 encoding"
9+
elif [ ":""$1" = ":-a" ] ; then
10+
log_encoding_is_base64=false
11+
echo "log is using ascii encoding"
12+
else
13+
echo "usage: ./check_for_data_loss.sh [OPTION] [log_filename] [info_included_in_data]"
14+
echo "\t[OPTION]:"
15+
echo "\t\t" "-a"
16+
echo "\t\t" "\t:log use ascii encoding"
17+
echo "\t\t" "-b"
18+
echo "\t\t" "\t:log use base64 encoding"
19+
echo "\t[info_included_in_data]:"
20+
echo "\t\t" '"accel" or "accel+meta"'
21+
echo "\t\t" '"gyro" or "gyro+meta"'
22+
echo "\t\t" '"accel+gyro" or "accel+gyro+meta"'
23+
24+
25+
echo "\texample: ./check_for_data_loss.sh -b minicom.cap accel+gyro+meta"
26+
return
27+
fi
28+
29+
if [ ":""$2" = ":" ] ; then
30+
log_file="./minicom.cap"
31+
else
32+
log_file="$2"
33+
fi
34+
35+
if [ ":""$3" = ":" ] ; then
36+
info_included_in_data="accel+gyro+meta"
37+
else
38+
info_included_in_data="$3"
39+
fi
40+
41+
42+
tmp_file="./tmp.csv"
43+
log_file_cp="${log_file}.cp"
44+
log_file_in="${log_file_cp}.tmp"
45+
46+
echo "log_file:$log_file"
47+
48+
49+
if [ $log_encoding_is_base64 = true ] ; then
50+
cp $log_file $log_file_cp
51+
./process_nicla_bhy2_log_base64.py $log_file_cp $info_included_in_data > $log_file_in
52+
else
53+
cp $log_file $log_file_in
54+
fi
55+
56+
echo "log_id,seq,ax,ay,az,gx,gy,gz" > $tmp_file
57+
58+
sed 's/.*\([a-zA-Z]\)\([0-9]\)/\1,\2/g' $log_file_in >> $tmp_file
59+
./check_for_data_loss.py $tmp_file
60+
61+
rm -f $tmp_file
62+
rm -f $log_file_cp
63+
rm -f $log_file_in
Lines changed: 102 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,102 @@
1+
#!/usr/bin/env python
2+
import math
3+
import base64
4+
import struct
5+
import sys
6+
7+
#configurations
8+
#DEBUG = True
9+
DEBUG = False
10+
lines_to_ignore = 500
11+
12+
# the following should be set according to the fimrware settings on the Nicla Senes ME firmware/sketch
13+
data_include_acc = False
14+
data_include_gyr = False
15+
data_include_meta_info = False
16+
17+
18+
if (len(sys.argv) > 1):
19+
log_file_name = sys.argv[1]
20+
else:
21+
log_file_name = 'log_nicla_bhy2.txt'
22+
23+
if (len(sys.argv) > 2):
24+
info_included_in_data = sys.argv[2]
25+
if "accel" in info_included_in_data:
26+
data_include_acc = True
27+
if DEBUG:
28+
print("accel data included")
29+
if "gyro" in info_included_in_data:
30+
data_include_gyr = True
31+
if DEBUG:
32+
print("gyro data included")
33+
if "meta" in info_included_in_data:
34+
data_include_meta_info= True
35+
if DEBUG:
36+
print("meta data included")
37+
38+
#working variables
39+
file_log = open(log_file_name, 'r')
40+
lines = file_log.readlines()
41+
42+
lineCnt = 0
43+
record_len = 0
44+
record_len_before_encoding = 0
45+
46+
if data_include_meta_info:
47+
record_len_before_encoding += 2
48+
49+
if data_include_acc:
50+
record_len_before_encoding += 6
51+
52+
if data_include_gyr:
53+
record_len_before_encoding += 6
54+
55+
56+
record_len = int(math.ceil(record_len_before_encoding / 3)) * 4
57+
if DEBUG:
58+
print("record_len:", record_len)
59+
60+
61+
DEBUG = False
62+
# Strips the newline character
63+
for line in lines:
64+
lineCnt += 1
65+
66+
if (lineCnt <= lines_to_ignore):
67+
continue
68+
69+
line = line.strip()
70+
len_line = len(line)
71+
72+
if (len_line < record_len) or (line[len_line - 1] != '='):
73+
if (lineCnt < len(lines)):
74+
print("line:", lineCnt, " skipped")
75+
else:
76+
if DEBUG:
77+
print("last line:", lineCnt, " skipped")
78+
continue
79+
80+
line = line[-record_len:]
81+
try:
82+
data_imu = base64.b64decode(line)
83+
if DEBUG:
84+
print(data_imu)
85+
print(len(data_imu))
86+
87+
try:
88+
if (data_include_acc and data_include_gyr):
89+
(log_id, seq, ax,ay,az,gx,gy,gz) = struct.unpack("<cBhhhhhh", data_imu)
90+
log_id = log_id.decode('ascii')
91+
print(log_id.strip(), ",", seq, ",", ax, ",", ay, ",", az, ",", gx,",", gy, ",", gz, sep='')
92+
else:
93+
(log_id,seq, x,y,z) = struct.unpack("<cBhhh", data_imu)
94+
log_id = log_id.decode('ascii')
95+
print(log_id, ",", seq, ",", x,",", y, ",", z, sep='')
96+
except struct.error:
97+
print("error: line:", lineCnt, " unpack")
98+
99+
except base64.binascii.Error:
100+
print("error: line:", lineCnt, " incomplete data")
101+
pass
102+

0 commit comments

Comments
 (0)