Skip to content

Commit

Permalink
Merge pull request #29 from tilak-io/dev
Browse files Browse the repository at this point in the history
TLOG Parser
  • Loading branch information
HmZyy authored Dec 20, 2022
2 parents 8dd2e7c + c9c12c0 commit f59a34a
Show file tree
Hide file tree
Showing 26 changed files with 301 additions and 38,218 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
/node_modules
/api/venv
/api/__pycache__/
/api/parsers/__pycache__/
/.pnp
.pnp.js

Expand Down
10 changes: 9 additions & 1 deletion api/cesium_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ def __init__(self,
alpha=1,
useRPY=False,
useXYZ=True,
scale=1,
viewModel=None):

self.name = name
Expand All @@ -23,7 +24,7 @@ def __init__(self,
self.useRPY = useRPY
self.useXYZ = useXYZ
self.tracked = tracked
#self.takeoffKey = takeoff_key
self.scale = scale
self.viewModel = viewModel
self.id = CesiumEntity.next_id
CesiumEntity.next_id += 1
Expand Down Expand Up @@ -75,6 +76,11 @@ def fromJson(cls, json):
else:
tracked = True

if "scale" in json:
scale = json['scale']
else:
scale = 1

return cls(
name=name,
color=color,
Expand All @@ -86,6 +92,7 @@ def fromJson(cls, json):
useRPY=useRPY,
useXYZ=useXYZ,
tracked=tracked,
scale=scale,
viewModel=viewModel)

def toJson(self):
Expand All @@ -98,6 +105,7 @@ def toJson(self):
"useXYZ": self.useXYZ,
"useRPY": self.useRPY,
"tracked": self.tracked,
"scale": self.scale,
"position": self.position,
"attitude": self.attitude
})
2 changes: 1 addition & 1 deletion api/communication.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def listen_for_data(self):
[datadict, json_entities] = self.recv_zipped_pickle(zmq.NOBLOCK)
entities = self.map_entities(json_entities)
print('-> data recieved...')
self.io.emit('entities_loaded', json_entities)
self.io.emit('entities_loaded')
store.Store.get().setStore(datadict, entities)
self.send_zipped_pickle('hi')
except zmq.Again as e:
Expand Down
Empty file added api/parsers/__init__.py
Empty file.
2 changes: 1 addition & 1 deletion api/arduparser.py → api/parsers/arduparser.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import pandas as pd
from cesium_entity import CesiumEntity
from parser import Parser
from .parser import Parser
from pymavlink import mavutil

class ArduParser(Parser):
Expand Down
2 changes: 1 addition & 1 deletion api/csvparser.py → api/parsers/csvparser.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pandas as pd
import numpy as np
from cesium_entity import CesiumEntity
from parser import Parser
from .parser import Parser

class CSVParser(Parser):
def __init__(self):
Expand Down
3 changes: 1 addition & 2 deletions api/djiparser.py → api/parsers/djiparser.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import math
import pandas as pd
import struct
from parser import Parser
from .parser import Parser
from cesium_entity import CesiumEntity

class DJIParser(Parser):
Expand Down Expand Up @@ -34,7 +34,6 @@ def quaternionToEuler(self,q):

def parse(self,filename):
f = open(filename,'rb')
print(filename)
buffer = f.read()
packets = []
for index in range(128,len(buffer)):
Expand Down
16 changes: 7 additions & 9 deletions api/parser.py → api/parsers/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,36 +7,34 @@ def __init__(self):
self.name = "generic_parser"
self.entities = []
self.datadict = {}
self.layout = None


def parse(self,filename):
print("Parsing file")
print("Parsing file:" + filename)

def addEntity(self,entity):
self.entities.append(entity)

def setLayout(self, layout):
self.layout = layout

def setDefaultEntities(self):
pass
def setDefaultEntities(self): pass

def initEntities(self):
config_folder = path.expanduser("~/Documents/tiplot/config/")
if not path.exists(config_folder):
makedirs(config_folder)
config_file = config_folder + self.name + ".json"
if (path.exists(config_file)):
print("+ " + self.name + " config found")
print("+ " + config_file)
# print("+ " + self.name + " config found")
# print("+ " + config_file)
file = open(config_file)
entities = json.load(file)
for entity in entities:
mapped_entity = CesiumEntity.fromJson(entity)
self.addEntity(mapped_entity)
else:
print("- " + self.name + " config not found")
print("- " + config_file)
print("- Using default config for: " + self.name)
# print("- " + self.name + " config not found")
# print("- " + config_file)
# print("- Using default config for: " + self.name)
self.setDefaultEntities()
60 changes: 60 additions & 0 deletions api/parsers/tlogparser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import pandas as pd
from cesium_entity import CesiumEntity
from .parser import Parser
from pymavlink import mavutil

class TLOGParser(Parser):
def __init__(self):
super().__init__()
self.name = "tlog_parser"
self.initDefaultEntity()
self.initEntities()

def parse(self,filename):
mlog = mavutil.mavlink_connection(filename)
buf = {}

while True:
m = mlog.recv_match()
if (m is None): break
name = m.get_type()
data = m.to_dict()
if 'time_boot_ms' in list(data.keys()):
data['timestamp_tiplot'] = data['time_boot_ms'] / 1e3
else:
#ignore tables with no timestamp
continue

if(name in buf):
del data['mavpackettype']
buf[name].append(data)
else:
clean_dict = m.to_dict()
del clean_dict['mavpackettype']
buf[name] = [clean_dict]

self.datadict = {i:pd.DataFrame(buf[i]).bfill() for i in buf}
return [self.datadict, self.entities]


def initDefaultEntity(self):
self.default_entity = CesiumEntity(name='tlog default entity',
useRPY=True,
useXYZ=True,
color="#ffffff",
pathColor="#0000ff",
position={
'table':'LOCAL_POSITION_NED',
'x': 'x',
'y': 'y',
'z': 'z',
},
attitude={
'table':'ATTITUDE',
'roll':'roll',
'pitch':'pitch',
'yaw':'yaw',
})

def setDefaultEntities(self):
self.addEntity(self.default_entity)
2 changes: 1 addition & 1 deletion api/ulgparser.py → api/parsers/ulgparser.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pyulog
import pandas as pd
from cesium_entity import CesiumEntity
from parser import Parser
from .parser import Parser

class ULGParser(Parser):
def __init__(self):
Expand Down
49 changes: 29 additions & 20 deletions api/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,6 @@
from flask_socketio import SocketIO, emit
from flask_cors import CORS
from threading import Thread
from ulgparser import ULGParser
from csvparser import CSVParser
from djiparser import DJIParser
from arduparser import ArduParser
from time import localtime, strftime
from os import makedirs, path, getcwd
from glob import glob
Expand All @@ -16,6 +12,12 @@
import store
import json

from parsers.ulgparser import ULGParser
from parsers.csvparser import CSVParser
from parsers.djiparser import DJIParser
from parsers.arduparser import ArduParser
from parsers.tlogparser import TLOGParser

app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
CORS(app,resources={r"/*":{"origins":"*"}})
Expand All @@ -31,10 +33,11 @@

thread = Thread()
current_parser = None
current_file = None

def choose_parser(file, logs_dir):
global current_parser
parsers = [ULGParser(), CSVParser(), DJIParser(), ArduParser()]
parsers = [ULGParser(), CSVParser(), DJIParser(), ArduParser(), TLOGParser()]
full_path = logs_dir + file
for p in parsers:
try:
Expand All @@ -44,7 +47,7 @@ def choose_parser(file, logs_dir):
current_parser = p
break
except:
print("~> wrong format")
# print("~> wrong format")
ok = False
return ok

Expand All @@ -58,15 +61,6 @@ def connected():
thread.daemon = True
thread.start()

@socketio.on('get_entities_props')
def get_entities():
global currentTime
currentTime = datetime.now()
props,err = store.Store.get().getEntitiesProps()
if err is not None:
emit('error', err)
emit('entities_props', props)

@socketio.on('get_table_columns')
def get_table_columns(data):
index = data['index']
Expand Down Expand Up @@ -119,18 +113,18 @@ def default_entity():
if current_parser is not None:
default = current_parser.default_entity.toJson()
else:
#setting ulg entity as default
default = ULGParser().default_entity.toJson()
default = {}
return default

@app.route('/write_config', methods=['POST'])
def write_config():
config = request.get_json()
if (current_parser is None):
print("-> unable to write config, please choose a parser first")
return {'ok': False, 'error': 'unable to write config, please choose a parser first'}
name = "custom_parser"
else:
name = current_parser.name
store.Store.get().setEntities(config)
with open(configs_dir + current_parser.name + ".json", "w") as outfile:
with open(configs_dir + name + ".json", "w") as outfile:
outfile.write(json.dumps(config, indent=2))
return {'ok': True}

Expand Down Expand Up @@ -177,11 +171,26 @@ def get_logs():

@app.route('/select_log', methods=['POST'])
def select_log():
global current_file
file = request.get_json()
current_file = file
ok = choose_parser(file[0], logs_dir)
return {"ok": ok}


@app.route('/entities_props')
def get_entities_props():
props, err = store.Store.get().getEntitiesProps()
if err is not None: print(err)
return props

@app.route('/current_file')
def get_current_file():
global current_file
if current_file is None:
return {"msg": "no file selected"}
return {"file": current_file}


@socketio.on("disconnect")
def disconnected():
Expand Down
1 change: 1 addition & 0 deletions api/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ def getEntitiesProps(self):
data.append({"id": e.id,
"entity_name": e.name,
"alpha": e.alpha,
"scale": e.scale,
"useRPY": e.useRPY,
"useXYZ": e.useXYZ,
"props": renamed,
Expand Down
Loading

0 comments on commit f59a34a

Please sign in to comment.