Skip to content

Commit

Permalink
cleanup
Browse files Browse the repository at this point in the history
add compress pickle to reqs

add joblib to reqs

Continuning cleanup and vscode integration

more clean up
  • Loading branch information
misko committed Dec 29, 2023
1 parent 016ba74 commit 48c9598
Show file tree
Hide file tree
Showing 42 changed files with 521 additions and 465 deletions.
Empty file added .env
Empty file.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
.DS_Store
**/.DS_Store
**/__pycache__
spf/data
test_data
test_data.zip
test_data_dl.zip
test_data.txt
6 changes: 6 additions & 0 deletions .ipynb_checkpoints/polygon_testing-checkpoint.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 5
}
21 changes: 21 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
{
"terminal.integrated.env.osx": {
"PYTHONPATH": "${workspaceFolder}/"
},
"terminal.integrated.env.linux": {
"PYTHONPATH": "${workspaceFolder}/"
},
"python.testing.pytestArgs": [
"tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"python.testing.cwd": "${workspaceFolder}/",
"[python]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "ms-python.black-formatter",
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit"
},
}
}
Empty file added __init__.py
Empty file.
Binary file removed experiments/.DS_Store
Binary file not shown.
Binary file removed experiments/14/.DS_Store
Binary file not shown.
17 changes: 16 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,31 @@ async-timeout==4.0.3
attrs==23.1.0
black==23.12.0
click==8.1.7
compress-pickle==2.1.0
contourpy==1.1.0
cycler==0.11.0
exceptiongroup==1.2.0
filelock==3.13.1
flake8==6.1.0
fonttools==4.42.0
frozenlist==1.4.0
fsspec==2023.12.2
idna==3.6
imageio==2.33.1
importlib-resources==6.0.1
iniconfig==2.0.0
Jinja2==3.1.2
joblib==1.3.2
kiwisolver==1.4.4
lazy_loader==0.3
libaio==0.9.1
MarkupSafe==2.1.3
matplotlib==3.7.2
mccabe==0.7.0
mpmath==1.3.0
multidict==6.0.4
mypy-extensions==1.0.0
networkx==3.2.1
numpy==1.25.2
packaging==23.1
pathspec==0.12.1
Expand All @@ -33,10 +43,15 @@ pyparsing==3.0.9
pyserial==3.5
pytest==7.4.3
python-dateutil==2.8.2
scikit-image==0.22.0
scipy==1.11.4
shapely==2.0.2
six==1.16.0
sympy==1.12
tifffile==2023.12.9
tomli==2.0.1
typing-extensions==4.9.0
torch==2.1.2
tqdm==4.66.1
typing_extensions==4.9.0
yarl==1.9.4
zipp==3.16.2
Empty file added spf/__init__.py
Empty file.
10 changes: 4 additions & 6 deletions spf/dataset/spf_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,18 @@

import bisect
import os
import pickle

import matplotlib.pyplot as plt
import numpy as np
import torch
from torch.utils.data import DataLoader, Dataset
from compress_pickle import dump, load
from torch.utils.data import Dataset

from spf.model_training_and_inference.utils.image_utils import (
from spf.dataset.spf_generate import generate_session
from spf.plot.image_utils import (
detector_positions_to_theta_grid,
labels_to_source_images,
radio_to_image,
)
from spf.model_training_and_inference.utils.spf_generate import generate_session
from compress_pickle import dump, load

output_cols = { # maybe this should get moved to the dataset part...
"src_pos": [0, 1],
Expand Down
16 changes: 3 additions & 13 deletions spf/dataset/spf_generate.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,9 @@
import argparse
import os
import pickle
import sys
import bz2

import numpy as np
from joblib import Parallel, delayed
from tqdm import tqdm
from compress_pickle import dump, load
from spf.rf import (
NoiseWrapper,
IQSource,
UCADetector,
ULADetector,
beamformer,
)

from spf.rf import IQSource, NoiseWrapper, UCADetector, ULADetector, beamformer

c = 3e8 # speed of light

Expand Down
18 changes: 12 additions & 6 deletions spf/grbl/grbl_interactive.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import serial
import time
import sys
import numpy as np
import time

import matplotlib.path as pltpath
import numpy as np
import serial
from scipy.spatial import ConvexHull

home_pA = np.array([3568, 0])
Expand Down Expand Up @@ -63,9 +64,14 @@ def __init__(self, calibration_point, pA, pB, bounding_box):
self.pB = pB
if len(bounding_box) >= 3:
hull = ConvexHull(bounding_box)
if len(np.unique(hull.simplices))!=len(bounding_box):
print("Points do not form a simple hull, most likely non convex")
print("Points in the hull are, " + ",".join(map(str,[ bounding_box[x] for x in np.unique(hull.simplices)])))
if len(np.unique(hull.simplices)) != len(bounding_box):
print("Points do not form a simple hull, most likely non convex")
print(
"Points in the hull are, "
+ ",".join(
map(str, [bounding_box[x] for x in np.unique(hull.simplices)])
)
)
raise ValueError
self.polygon = pltpath.Path(bounding_box)
else:
Expand Down
3 changes: 2 additions & 1 deletion spf/grbl/run_grbl.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import serial
import sys

import serial

if len(sys.argv) != 3:
print("%s device file" % sys.argv[0])
sys.exit(1)
Expand Down
22 changes: 12 additions & 10 deletions spf/grbl_sdr_collect.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
from sdrpluto.gather import (
setup_rxtx_and_phase_calibration,
setup_rx_and_tx,
get_avg_phase,
)
from grbl.grbl_interactive import GRBLManager
from spf.rf import beamformer
import argparse
import os
import sys
import threading
import time

import numpy as np
import sys
import os
import argparse
from grbl.grbl_interactive import GRBLManager

from spf.rf import beamformer
from spf.sdrpluto.sdr_controller import (
get_avg_phase,
setup_rx_and_tx,
setup_rxtx_and_phase_calibration,
)


def bounce_grbl(gm):
Expand Down
8 changes: 3 additions & 5 deletions spf/model_training_and_inference/01_generate_data.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
import argparse
import os
import pickle
import sys
import bz2

import numpy as np
from compress_pickle import dump, load
from joblib import Parallel, delayed
from tqdm import tqdm
from compress_pickle import dump, load

from utils.spf_generate import generate_session_and_dump, generate_session
from spf.dataset.spf_generate import generate_session, generate_session_and_dump

if __name__ == "__main__":
parser = argparse.ArgumentParser()
Expand Down
29 changes: 9 additions & 20 deletions spf/model_training_and_inference/12_task2_model_training.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,22 @@
import os
import argparse
import time
import random
import os
import pickle
import random
import time

import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torchvision
from torch import Tensor, nn
from torch.nn import TransformerEncoder, TransformerEncoderLayer
from torch.utils.data import dataset, random_split

from utils.image_utils import labels_to_source_images
from models.models import (
SingleSnapshotNet,
SnapshotNet,
Task1Net,
TransformerEncOnlyModel,
UNet,
)
from utils.spf_dataset import (
SessionsDataset,
from models.models import SingleSnapshotNet, SnapshotNet, Task1Net, UNet

from spf.dataset.spf_dataset import (
SessionsDatasetTask2,
collate_fn,
output_cols,
input_cols,
output_cols,
)
from spf.plot.image_utils import labels_to_source_images

torch.set_printoptions(precision=5, sci_mode=False, linewidth=1000)

Expand Down
30 changes: 6 additions & 24 deletions spf/model_training_and_inference/13_learn_beamformer.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,17 @@
import os
import argparse
import os
import pickle
import random
import time
from functools import cache
import random
import pickle

import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torchvision
from torch import Tensor, nn
from torch.nn import TransformerEncoder, TransformerEncoderLayer
from torch.utils.data import dataset, random_split

from utils.image_utils import labels_to_source_images
from models.models import (
SingleSnapshotNet,
SnapshotNet,
Task1Net,
TransformerModel,
UNet,
ComplexFFNN,
HybridFFNN,
)
from utils.spf_dataset import (
SessionsDataset,
SessionsDatasetTask2,
collate_fn_beamformer,
)
from models.models import ComplexFFNN, HybridFFNN

from spf.dataset.spf_dataset import SessionsDatasetTask2, collate_fn_beamformer

torch.set_printoptions(precision=5, sci_mode=False, linewidth=1000)

Expand Down
25 changes: 7 additions & 18 deletions spf/model_training_and_inference/14_task3_model_training.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,22 @@
import os
import argparse
import time
import random
import os
import pickle
import random
import time

import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torchvision
from torch import Tensor, nn
from torch.nn import TransformerEncoder, TransformerEncoderLayer
from torch.utils.data import dataset, random_split
from matplotlib.patches import Ellipse
from models.models import SnapshotNet, TrajectoryNet

from models.models import (
SingleSnapshotNet,
SnapshotNet,
Task1Net,
TransformerEncOnlyModel,
UNet,
TrajectoryNet,
)
from utils.spf_dataset import (
from spf.dataset.spf_dataset import (
SessionsDatasetRealTask2,
SessionsDatasetTask2,
collate_fn_transformer_filter,
output_cols,
input_cols,
output_cols,
)

torch.set_num_threads(8)
Expand Down
6 changes: 2 additions & 4 deletions spf/model_training_and_inference/90_real_session_plotter.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import argparse
import pickle

from utils.plot import filenames_to_gif, plot_full_session
from compress_pickle import dump, load
from utils.spf_dataset import SessionsDatasetReal
from spf.dataset.spf_dataset import SessionsDatasetReal
from spf.plot.plot import filenames_to_gif, plot_full_session

if __name__ == "__main__":
parser = argparse.ArgumentParser()
Expand Down
6 changes: 2 additions & 4 deletions spf/model_training_and_inference/90_session_plotter.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import argparse
import pickle

from utils.plot import filenames_to_gif, plot_full_session
from compress_pickle import dump, load
from utils.spf_dataset import SessionsDatasetTask2
from spf.dataset.spf_dataset import SessionsDatasetTask2
from spf.plot.plot import filenames_to_gif, plot_full_session

if __name__ == "__main__":
parser = argparse.ArgumentParser()
Expand Down
6 changes: 2 additions & 4 deletions spf/model_training_and_inference/91_line_plotter.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import argparse
import pickle

from utils.plot import filenames_to_gif, plot_lines
from compress_pickle import dump, load
from utils.spf_dataset import SessionsDataset
from spf.dataset.spf_dataset import SessionsDataset
from spf.plot.plot import filenames_to_gif, plot_lines

if __name__ == "__main__":
parser = argparse.ArgumentParser()
Expand Down
Loading

0 comments on commit 48c9598

Please sign in to comment.