Skip to content

Commit

Permalink
Merge pull request #22 from tilak-io/threejs-migration
Browse files Browse the repository at this point in the history
Threejs migration
  • Loading branch information
HmZyy authored Dec 5, 2022
2 parents 8422573 + 1751169 commit e5c9724
Show file tree
Hide file tree
Showing 24 changed files with 10,107 additions and 723 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,6 @@ yarn-debug.log*
yarn-error.log*
yarn.lock
package-lock.json

# old files
*.old
8 changes: 8 additions & 0 deletions api/cesium_entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,15 @@ def __init__(self,
attitude,
alpha=1,
useRPY=False,
useXYZ=True,
viewModel=None):

self.name = name
self.position = position
self.attitude = attitude
self.alpha = alpha
self.useRPY = useRPY
self.useXYZ = useXYZ
#self.takeoffKey = takeoff_key
self.viewModel = viewModel
self.id = CesiumEntity.next_id
Expand All @@ -34,6 +36,11 @@ def fromJson(cls, json):
else:
useRPY = False

if "useXYZ" in json:
useXYZ = json['useXYZ']
else:
useXYZ = True

if "viewModel" in json:
viewModel = json['viewModel']
else:
Expand All @@ -45,5 +52,6 @@ def fromJson(cls, json):
attitude=attitude,
alpha=alpha,
useRPY=useRPY,
useXYZ=useXYZ,
viewModel=viewModel)

1 change: 1 addition & 0 deletions api/csvparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def setDefaultEntities(self):
name='csv default entity',
alpha=1,
useRPY=True,
useXYZ=False,
position={
'table':'data',
'longitude':'lon',
Expand Down
2 changes: 1 addition & 1 deletion api/parser.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from cesium_entity import CesiumEntity
from os import path
from os import path, makedirs
import json

class Parser:
Expand Down
19 changes: 15 additions & 4 deletions api/server.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
from engineio.async_drivers import gevent
from flask import Flask, request
from flask import Flask, request, send_file
from flask_socketio import SocketIO, emit
from flask_cors import CORS
from threading import Thread
from ulgparser import ULGParser
from csvparser import CSVParser
from time import localtime, strftime
from os import makedirs, path
from os import makedirs, path, getcwd
from glob import glob
from communication import Comm
from datetime import datetime
from sys import argv
import store

app = Flask(__name__)
Expand All @@ -20,6 +21,7 @@

logs_dir = path.expanduser("~/Documents/tiplot/logs/")
logs_dir = logs_dir.replace("\\", "/")

if not path.exists(logs_dir):
makedirs(logs_dir)

Expand All @@ -41,7 +43,7 @@ def choose_parser(file, logs_dir):

@socketio.on("connect")
def connected():
print("-> client has connected " + request.sid)
# print("-> client has connected " + request.sid)
global thread
if not thread.is_alive():
print("-> Starting Communications Thread...")
Expand Down Expand Up @@ -75,6 +77,14 @@ def upload_log():
return {'ok': ok}


@app.route('/model')
def model_3d():
if (len(argv) <= 1):
model = getcwd() + "/../obj/main.gltf" # debug mode
else:
model = argv[1]
return send_file(model)



@socketio.on('get_entities_props')
Expand Down Expand Up @@ -132,7 +142,8 @@ def get_takeoff_position():

@socketio.on("disconnect")
def disconnected():
print("-> client has disconnected " + request.sid)
# print("-> client has disconnected " + request.sid)
pass

def print_tiplot():
print('''
Expand Down
36 changes: 22 additions & 14 deletions api/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,22 +40,30 @@ def getKeys(self):
def getEntitiesProps(self):
data = []
for e in self.entities:
if (e.position['table'] == e.attitude['table']):
merged = pd.DataFrame.from_dict(self.datadict[e.position['table']])
else:
merged = pd.merge_asof(self.datadict[e.position['table']], self.datadict[e.attitude['table']], on='timestamp_tiplot').bfill()
if e.useXYZ:
position_columns = [e.position['x'], e.position['y'], e.position['z']]
position_columns_mapped = { e.position['x']: 'x', e.position['y']: 'y', e.position['z']: 'z'}
else:
position_columns = [e.position['altitude'], e.position['lattitude'], e.position['longitude']]
position_columns_mapped = { e.position['longitude']: 'longitude', e.position['altitude']: 'altitude', e.position['lattitude']: 'lattitude'}
if e.useRPY:
if (e.position['table'] == e.attitude['table']):
merged = pd.DataFrame.from_dict(self.datadict[e.position['table']])
else:
merged = pd.merge_asof(self.datadict[e.position['table']], self.datadict[e.attitude['table']], on='timestamp_tiplot').bfill()
raw = merged[[e.position['altitude'], e.position['lattitude'], e.position['longitude'],e.attitude['roll'],e.attitude['pitch'],e.attitude['yaw'],'timestamp_tiplot']]
renamed = raw.rename(columns={e.position['longitude']: 'longitude', e.position['altitude']: 'altitude',e.position['lattitude']: 'lattitude', e.attitude['roll'] : 'roll',e.attitude['pitch'] : 'pitch',e.attitude['yaw'] : 'yaw'}).to_dict('records')
data.append({ "id": e.id,"entity_name": e.name,"alpha": e.alpha, "useRPY": e.useRPY,"props": renamed})
attitude_columns = [e.attitude['roll'], e.attitude['pitch'], e.attitude['yaw']]
attitude_columns_mapped = {e.attitude['roll'] : 'roll', e.attitude['pitch'] : 'pitch', e.attitude['yaw'] : 'yaw'}
else:
if (e.position['table'] == e.attitude['table']):
merged = pd.DataFrame.from_dict(self.datadict[e.position['table']])
else:
merged = pd.merge_asof(self.datadict[e.position['table']], self.datadict[e.attitude['table']], on='timestamp_tiplot').bfill()
raw = merged[[e.position['altitude'], e.position['lattitude'], e.position['longitude'],e.attitude['q0'],e.attitude['q1'],e.attitude['q2'], e.attitude['q3'],'timestamp_tiplot']]
renamed = raw.rename(columns={e.position['longitude']: 'longitude', e.position['altitude']: 'altitude',e.position['lattitude']: 'lattitude', e.attitude['q0'] : 'q0',e.attitude['q1'] : 'q1',e.attitude['q2'] : 'q2',e.attitude['q3'] : 'q3'}).to_dict('records')
data.append({"id": e.id,"entity_name": e.name,"alpha": e.alpha, "useRPY": e.useRPY,"props": renamed})
attitude_columns = [e.attitude['q0'],e.attitude['q1'],e.attitude['q2'], e.attitude['q3']]
attitude_columns_mapped = {e.attitude['q0'] : 'q0', e.attitude['q1'] : 'q1', e.attitude['q2'] : 'q2', e.attitude['q3'] : 'q3'}
columns = position_columns + attitude_columns + ['timestamp_tiplot']
mapped_columns = {}
mapped_columns.update(position_columns_mapped)
mapped_columns.update(attitude_columns_mapped)
raw = merged[columns]
renamed = raw.rename(columns=mapped_columns).to_dict('records')
data.append({"id": e.id,"entity_name": e.name,"alpha": e.alpha, "useRPY": e.useRPY, "useXYZ": e.useXYZ,"props": renamed})

return data

def getEntities(self):
Expand Down
10 changes: 6 additions & 4 deletions api/ulgparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,13 @@ def parse(self,filename):

def setDefaultEntities(self):
entity = CesiumEntity(name='ulg default entity',
useRPY=False,
useXYZ=True,
position={
'table':'vehicle_global_position',
'longitude':'lon',
'lattitude':'lat',
'altitude':'alt',
'table':'vehicle_local_position',
'x':'x',
'y':'y',
'z':'z',
},
attitude={
'table':'vehicle_attitude',
Expand Down
Binary file added obj/main.gltf
Binary file not shown.
Loading

0 comments on commit e5c9724

Please sign in to comment.