Skip to content

Commit

Permalink
Add in notebook for simulated data
Browse files Browse the repository at this point in the history
  • Loading branch information
misko committed Dec 29, 2023
1 parent 48c9598 commit 2b16b92
Show file tree
Hide file tree
Showing 5 changed files with 276 additions and 30 deletions.
19 changes: 10 additions & 9 deletions spf/dataset/spf_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ def generate_session(args_and_session_idx):
detector_speed = args.detector_speed
if args.detector_speed < 0:
detector_speed = np.random.uniform(low=0.0, high=-args.detector_speed)
sigma = args.sigma
if args.sigma < 0:
sigma = np.random.uniform(low=0.0, high=-args.sigma)
sigma_noise = args.sigma_noise
if args.sigma_noise < 0:
sigma_noise = np.random.uniform(low=0.0, high=-args.sigma_noise)
detector_noise = args.detector_noise
if args.detector_noise < 0:
detector_noise = np.random.uniform(low=0.0, high=-args.detector_noise)
Expand Down Expand Up @@ -111,13 +111,13 @@ def generate_session(args_and_session_idx):
broadcasting_positions_at_t = np.zeros((args.time_steps, n_sources, 1))
broadcasting_heading_at_t = np.zeros((args.time_steps, n_sources, 1))
receiver_positions_at_t = np.zeros((args.time_steps, args.elements, 2))
source_theta_at_t = np.zeros((args.time_steps, 1, 1))
source_distance_at_t = np.zeros((args.time_steps, 1, 1))
source_theta_at_t = np.zeros((args.time_steps, 1))
source_distance_at_t = np.zeros((args.time_steps, 1))

detector_orientation_at_t = np.ones((args.time_steps, 1))

signal_matrixs_at_t = np.zeros(
(args.time_steps, args.elements, args.samples_per_snapshot),
(args.time_steps, args.elements, args.readings_per_snapshot),
dtype=np.complex64,
)
beam_former_outputs_at_t = np.zeros((args.time_steps, args.beam_former_spacing))
Expand Down Expand Up @@ -213,7 +213,7 @@ def generate_session(args_and_session_idx):
current_source_positions[[tdm_source_idx]], # x, y position
args.carrier_frequency,
),
sigma=sigma,
sigma=sigma_noise,
)
)

Expand Down Expand Up @@ -253,7 +253,7 @@ def generate_session(args_and_session_idx):

signal_matrixs_at_t[t_idx] = d.get_signal_matrix(
start_time=time_stamps[t_idx, 0],
duration=args.samples_per_snapshot / d.sampling_frequency,
duration=args.readings_per_snapshot / d.sampling_frequency,
)
thetas_at_t[t_idx], beam_former_outputs_at_t[t_idx], _ = beamformer_f(
d.all_receiver_pos(with_offset=False),
Expand All @@ -280,12 +280,13 @@ def generate_session(args_and_session_idx):
t_idx
] = 0 # (np.arctan2(diff[[1]],diff[[0]])-d.orientation+np.pi)%(2*np.pi)-np.pi
source_distance_at_t[t_idx] = 0 # np.sqrt(np.power(diff,2).sum())

session = {
"broadcasting_positions_at_t": broadcasting_positions_at_t, # list of (time_steps,sources,1)
"source_positions_at_t": source_positions_at_t, # (time_steps,sources,2[x,y])
"source_velocities_at_t": source_velocities_at_t, # (time_steps,sources,2[x,y])
"receiver_positions_at_t": receiver_positions_at_t, # (time_steps,receivers,2[x,y])
"signal_matrixs_at_t": signal_matrixs_at_t, # (time_steps,receivers,samples_per_snapshot)
"signal_matrixs_at_t": signal_matrixs_at_t, # (time_steps,receivers,readings_per_snapshot)
"beam_former_outputs_at_t": beam_former_outputs_at_t, # (timesteps,thetas_tested_for_steering)
"thetas_at_t": thetas_at_t, # (timesteps,thetas_tested_for_steering)
"detector_position_phase_offsets_at_t": detector_position_phase_offsets_at_t, # (timesteps,1) # phase offset for orbit dynamics
Expand Down
4 changes: 2 additions & 2 deletions spf/model_training_and_inference/01_generate_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@
)
parser.add_argument("--detector-speed", type=float, required=False, default=10.0)
parser.add_argument("--source-speed", type=float, required=False, default=0.0)
parser.add_argument("--sigma", type=float, required=False, default=1.0)
parser.add_argument("--sigma-noise", type=float, required=False, default=1.0)
parser.add_argument("--time-steps", type=int, required=False, default=100)
parser.add_argument("--time-interval", type=float, required=False, default=0.3)
parser.add_argument("--samples-per-snapshot", type=int, required=False, default=3)
parser.add_argument("--readings-per-snapshot", type=int, required=False, default=3)
parser.add_argument("--sessions", type=int, required=False, default=1024)
parser.add_argument(
"--output", type=str, required=False, default="sessions-default"
Expand Down
8 changes: 4 additions & 4 deletions spf/model_training_and_inference/13_learn_beamformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,9 +293,9 @@ def plot_loss(
print("init network")
models = []

# signal_matrix ~ (snapshots_per_sample,n_antennas,samples_per_snapshot)
# signal_matrix ~ (snapshots_per_sample,n_antennas,readings_per_snapshot)

_, n_receivers, samples_per_snapshot = ds_train[0]["signal_matrixs_at_t"].shape
_, n_receivers, readings_per_snapshot = ds_train[0]["signal_matrixs_at_t"].shape
_, beam_former_bins = ds_train[0]["beam_former_outputs_at_t"].shape

for snapshots_per_sample in [1]:
Expand All @@ -310,7 +310,7 @@ def plot_loss(
snapshots_per_sample,
),
"model": ComplexFFNN(
d_inputs=n_receivers * samples_per_snapshot + 2,
d_inputs=n_receivers * readings_per_snapshot + 2,
d_outputs=beam_former_bins,
d_hidden=beam_former_bins * 2,
n_layers=n_complex_layers,
Expand All @@ -332,7 +332,7 @@ def plot_loss(
snapshots_per_sample,
),
"model": HybridFFNN(
d_inputs=n_receivers * samples_per_snapshot + 2,
d_inputs=n_receivers * readings_per_snapshot + 2,
d_outputs=beam_former_bins,
n_complex_layers=n_complex_layers,
n_real_layers=8,
Expand Down
33 changes: 18 additions & 15 deletions spf/notebooks/load_real_data_session.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 2,
"id": "71dfe5c6-b68f-49a2-8275-728cdb65c529",
"metadata": {},
"outputs": [],
Expand All @@ -13,7 +13,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 3,
"id": "3a95ed23-b380-4cfd-8b47-60fa14290340",
"metadata": {},
"outputs": [],
Expand All @@ -25,24 +25,19 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 4,
"id": "59584a7c-1e24-4e41-9952-23e7298bc583",
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"sys.path.append(repo_root) # go to parent dir\n",
"import argparse\n",
"import pickle\n",
"\n",
"from spf.plot.plot import filenames_to_gif, plot_full_session\n",
"from compress_pickle import dump, load\n",
"from spf.dataset.spf_dataset import SessionsDatasetReal"
]
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 5,
"id": "18b1c02a-10ce-4b47-ae2f-98625f3bdd59",
"metadata": {},
"outputs": [],
Expand All @@ -57,7 +52,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 6,
"id": "032c99f4-6b81-4082-a4d6-47af09b8009e",
"metadata": {},
"outputs": [
Expand All @@ -84,7 +79,15 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": null,
"id": "934efc57",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 7,
"id": "7084912d-c62f-4383-a25b-79a6a99583f5",
"metadata": {},
"outputs": [],
Expand All @@ -96,17 +99,17 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 8,
"id": "21858d41-83af-4adb-b5cb-55e33b81c247",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<matplotlib.legend.Legend at 0x13dc55f10>"
"<matplotlib.legend.Legend at 0x16395b290>"
]
},
"execution_count": 9,
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
},
Expand Down Expand Up @@ -153,7 +156,7 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 9,
"id": "ab349f95-e442-4b52-b1dc-b18e1748874c",
"metadata": {},
"outputs": [
Expand Down
242 changes: 242 additions & 0 deletions spf/notebooks/load_simulated_data_session.ipynb

Large diffs are not rendered by default.

0 comments on commit 2b16b92

Please sign in to comment.