Skip to content

Commit

Permalink
pylbo: create legacy header and prepare for 2.0 datfile format
Browse files Browse the repository at this point in the history
  • Loading branch information
n-claes committed Mar 2, 2023
1 parent 7cec0e7 commit 5b618da
Show file tree
Hide file tree
Showing 15 changed files with 776 additions and 465 deletions.
519 changes: 193 additions & 326 deletions post_processing/pylbo/data_containers.py

Large diffs are not rendered by default.

188 changes: 88 additions & 100 deletions post_processing/pylbo/file_handler.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import os
import numpy as np
import tkinter as tk
from pathlib import Path
from tkinter import filedialog

import numpy as np
from pylbo.data_containers import LegolasDataSeries, LegolasDataSet
from pylbo.exceptions import InvalidLegolasFile
from pylbo.data_containers import LegolasDataSet, LegolasDataSeries
from pylbo.utilities.toolbox import transform_to_list
from pylbo.utilities.logger import pylboLogger
from pylbo.utilities.toolbox import transform_to_list


def _validate_file(file):
Expand All @@ -33,16 +34,14 @@ def _validate_file(file):
raise InvalidLegolasFile(path_to_file)


def load(datfile, display_info=True):
def load(datfile):
"""
Loads a single Legolas datfile.
Parameters
----------
datfile : str, ~os.PathLike
Path to the datfile.
display_info : bool
If `True`, datfile information is written to terminal.
Raises
------
Expand All @@ -58,29 +57,30 @@ def load(datfile, display_info=True):
raise ValueError("load() takes a single datfile.")
_validate_file(datfile)
ds = LegolasDataSet(datfile)
if display_info:
pylboLogger.info(f"Legolas version : {ds.legolas_version}")
pylboLogger.info(f"file loaded : {ds.datfile.parent} -- {ds.datfile.name}")
pylboLogger.info(f"gridpoints : {ds.gridpoints}")
pylboLogger.info(f"geometry : {ds.geometry} in {ds.x_start, ds.x_end}")
pylboLogger.info(f"equilibrium : {ds.eq_type}")
if ds.header["matrices_written"]:
pylboLogger.info("matrices present in datfile")
if ds.header["eigenfuncs_written"]:
pylboLogger.info("eigenfunctions present in datfile")
if ds.header.get("derived_eigenfuncs_written", False):
pylboLogger.info("derived eigenfunctions present in datfile")
if ds.header.get("eigenfunction_subset_used", False):
saved_efs = len(ds.header["ef_written_idxs"])
total_efs = len(ds.eigenvalues)
pylboLogger.info(
f"subset saved: {saved_efs}/{total_efs} eigenvalues have eigenfunctions"
)
pylboLogger.info("-" * 75)
pylboLogger.info(f"Legolas version : {ds.legolas_version}")
pylboLogger.info(f"file loaded : {ds.datfile.parent} -- {ds.datfile.name}")
pylboLogger.info(f"gridpoints : {ds.gridpoints}")
pylboLogger.info(f"geometry : {ds.geometry} in {ds.x_start, ds.x_end}")
pylboLogger.info(f"equilibrium : {ds.eq_type}")
if ds.has_matrices:
pylboLogger.info("matrices present in datfile")
if ds.has_eigenvectors:
pylboLogger.info("eigenvectors present in datfile")
if ds.has_efs:
pylboLogger.info("eigenfunctions present in datfile")
if ds.has_derived_efs:
pylboLogger.info("derived eigenfunctions present in datfile")
if ds.has_ef_subset:
saved_efs = len(ds.header["ef_written_idxs"])
total_efs = len(ds.eigenvalues)
pylboLogger.info(
f"subset saved: {saved_efs}/{total_efs} eigenvalues have eigenfunctions"
)
pylboLogger.info("-" * 75)
return ds


def load_series(datfiles, display_info=True):
def load_series(datfiles):
"""
Loads multiple Legolas datfiles.
Expand All @@ -89,8 +89,6 @@ def load_series(datfiles, display_info=True):
datfiles : list, numpy.ndarray
Paths to the datfiles that should be loaded, in list/array form. Every element
should be a string or a ~os.PathLike object.
display_info : bool
If `True`, datfile information is written to terminal.
Raises
------
Expand All @@ -109,78 +107,68 @@ def load_series(datfiles, display_info=True):
_validate_file(datfile)
series = LegolasDataSeries(datfiles)

if display_info:
# handle version printing
versions = [ds.legolas_version.parse() for ds in series.datasets]
minversion, maxversion = min(versions), max(versions)
if minversion == maxversion:
info_msg = str(minversion)
else:
info_msg = f"{minversion} --> {maxversion}"
pylboLogger.info(f"Legolas_version : {info_msg}")

# handle file information printing
names = sorted([ds.datfile.name for ds in series.datasets])
pylboLogger.info(f"files loaded : {names[0]} --> {names[-1]}")

# handle gridpoints printing
pts = [ds.gridpoints for ds in series.datasets]
minpts, maxpts = min(pts), max(pts)
if minpts == maxpts:
info_msg = str(minpts)
else:
info_msg = f"{minpts} --> {maxpts}"
pylboLogger.info(f"gridpoints : {info_msg}")

# handle geometry printing
if not isinstance(series.geometry, str) and len(series.geometry) > 1:
pylboLogger.warning("multiple geometries detected!")
else:
pylboLogger.info(f"geometries : {series.geometry}")

# handle equilibrium printing
equils = set([ds.eq_type for ds in series.datasets])
if len(equils) > 1:
pylboLogger.error(f"multiple equilibria detected! -- {equils}")
raise ValueError
else:
pylboLogger.info(f"equilibria : {equils.pop()}")

# check presence of matrices
matrices_present = set(
[ds.header["matrices_written"] for ds in series.datasets]
)
if len(matrices_present) > 1:
pylboLogger.info("matrices present in some datfiles, but not all")
else:
if matrices_present.pop():
pylboLogger.info("matrices present in all datfiles")

# check presence of eigenfunctions
efs_present = set([ds.header["eigenfuncs_written"] for ds in series.datasets])
if len(efs_present) > 1:
pylboLogger.info("eigenfunctions present in some datfiles, but not all")
else:
if efs_present.pop():
pylboLogger.info("eigenfunctions present in all datfiles")

# check presence of derived eigenfunctions
defs_present = set(
[
ds.header.get("derived_eigenfuncs_written", False)
for ds in series.datasets
]
)
if len(defs_present) == 0:
pylboLogger.info("no derived eigenfunctions present")
elif len(defs_present) > 1:
pylboLogger.info(
"derived eigenfunctions present in some datfiles, but not all"
)
else:
if defs_present.pop():
pylboLogger.info("derived eigenfunctions present in all datfiles")
pylboLogger.info("-" * 75)
# handle version printing
versions = [ds.legolas_version.parse() for ds in series.datasets]
minversion, maxversion = min(versions), max(versions)
if minversion == maxversion:
info_msg = str(minversion)
else:
info_msg = f"{minversion} --> {maxversion}"
pylboLogger.info(f"Legolas_version : {info_msg}")

# handle file information printing
names = sorted([ds.datfile.name for ds in series.datasets])
pylboLogger.info(f"files loaded : {names[0]} --> {names[-1]}")

# handle gridpoints printing
pts = [ds.gridpoints for ds in series.datasets]
minpts, maxpts = min(pts), max(pts)
if minpts == maxpts:
info_msg = str(minpts)
else:
info_msg = f"{minpts} --> {maxpts}"
pylboLogger.info(f"gridpoints : {info_msg}")

# handle geometry printing
if not isinstance(series.geometry, str) and len(series.geometry) > 1:
pylboLogger.warning("multiple geometries detected!")
else:
pylboLogger.info(f"geometries : {series.geometry}")

# handle equilibrium printing
equils = set([ds.eq_type for ds in series.datasets])
if len(equils) > 1:
pylboLogger.error(f"multiple equilibria detected! -- {equils}")
raise ValueError
else:
pylboLogger.info(f"equilibria : {equils.pop()}")

# check presence of matrices
matrices_present = set(series.has_matrices)
if len(matrices_present) > 1:
pylboLogger.info("matrices present in some datfiles, but not all")
else:
if matrices_present.pop():
pylboLogger.info("matrices present in all datfiles")

# check presence of eigenfunctions
efs_present = set(series.has_efs)
if len(efs_present) > 1:
pylboLogger.info("eigenfunctions present in some datfiles, but not all")
else:
if efs_present.pop():
pylboLogger.info("eigenfunctions present in all datfiles")

# check presence of derived eigenfunctions
defs_present = set(series.has_derived_efs)
if len(defs_present) == 0:
pylboLogger.info("no derived eigenfunctions present")
elif len(defs_present) > 1:
pylboLogger.info("derived eigenfunctions present in some datfiles, but not all")
else:
if defs_present.pop():
pylboLogger.info("derived eigenfunctions present in all datfiles")
pylboLogger.info("-" * 75)
return series


Expand Down
31 changes: 17 additions & 14 deletions post_processing/pylbo/utilities/datfile_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ def read_grid(istream, header):
The base grid from the datfile.
"""
istream.seek(header["offsets"]["grid"])
fmt = ALIGN + header["gridpts"] * "d"
fmt = ALIGN + header["gridpoints"] * "d"
grid = struct.unpack(fmt, istream.read(struct.calcsize(fmt)))
return np.asarray(grid)

Expand All @@ -371,7 +371,7 @@ def read_grid_gauss(istream, header):
The Gaussian grid from the datfile.
"""
istream.seek(header["offsets"]["grid_gauss"])
fmt = ALIGN + header["gauss_gridpts"] * "d"
fmt = ALIGN + header["gauss_gridpoints"] * "d"
grid_gauss = struct.unpack(fmt, istream.read(struct.calcsize(fmt)))
return np.asarray(grid_gauss)

Expand All @@ -393,7 +393,7 @@ def read_ef_grid(istream, header):
The eigenfunction grid from the datfile.
"""
istream.seek(header["offsets"]["ef_grid"])
fmt = ALIGN + header["ef_gridpts"] * "d"
fmt = ALIGN + header["ef_gridpoints"] * "d"
ef_grid = struct.unpack(fmt, istream.read(struct.calcsize(fmt)))
return np.asarray(ef_grid)

Expand All @@ -414,14 +414,17 @@ def read_eigenvectors(istream, header):
eigenvectors : numpy.ndarray(dtype=complex, ndim=2)
The eigenvectors from the datfile, one in each column.
"""
istream.seek(header["offsets"]["eigenvectors"])
fmt = ALIGN + (2 * "d") * header["eigenvec_len"] * header["nb_eigenvecs"]
offsets = header["offsets"]
ev_length = offsets["eigenvector_length"]
nb_evs = offsets["nb_eigenvectors"]
istream.seek(offsets["eigenvectors"])
fmt = ALIGN + (2 * "d") * ev_length * nb_evs
hdr = struct.unpack(fmt, istream.read(struct.calcsize(fmt)))
reals = hdr[::2]
imags = hdr[1::2]
return np.reshape(
np.asarray([complex(x, y) for x, y in zip(reals, imags)]),
(header["eigenvec_len"], header["nb_eigenvecs"]),
(ev_length, nb_evs),
order="F",
)

Expand Down Expand Up @@ -468,7 +471,7 @@ def read_eigenvalues(istream, header, omit_large_evs=True):
The eigenvalues from the datfile, with optionally omitted large values.
"""
istream.seek(header["offsets"]["eigenvalues"])
fmt = ALIGN + 2 * header["nb_eigenvals"] * "d"
fmt = ALIGN + 2 * header["nb_eigenvalues"] * "d"
hdr = struct.unpack(fmt, istream.read(struct.calcsize(fmt)))
# hdr is a 1D list with [real, imag, real, imag, real, imag...]
reals = hdr[::2]
Expand Down Expand Up @@ -496,10 +499,10 @@ def read_equilibrium_arrays(istream, header):
Dictionary containing the equilibrium arrays, with keys given by
`header['equil_names']`.
"""
istream.seek(header["offsets"]["equil_arrays"])
istream.seek(header["offsets"]["equilibrium_arrays"])
equil_arrays = {}
for name in header["equil_names"]:
fmt = ALIGN + header["gauss_gridpts"] * "d"
for name in header["equilibrium_names"]:
fmt = ALIGN + header["gauss_gridpoints"] * "d"
equil_array = struct.unpack(fmt, istream.read(struct.calcsize(fmt)))
equil_arrays.update({name: np.asarray(equil_array)})
return equil_arrays
Expand Down Expand Up @@ -530,7 +533,7 @@ def read_eigenfunction(istream, header, ev_index):
eigenvalue, associated with the same `ef_index`.
"""
ef_offset = header["offsets"]["ef_arrays"]
ef_gridpts = header["ef_gridpts"]
ef_gridpts = header["ef_gridpoints"]
nb_eigenfuncs = len(header["ef_written_idxs"])
eigenfunctions = {}

Expand Down Expand Up @@ -587,7 +590,7 @@ def read_derived_eigenfunction(istream, header, ev_index):
eigenvalue, associated with the same `ev_index`.
"""
ef_offset = header["offsets"]["derived_ef_arrays"]
ef_gridpts = header["ef_gridpts"]
ef_gridpts = header["ef_gridpoints"]
nb_eigenfuncs = len(header["ef_written_idxs"])
eigenfunctions = {}

Expand Down Expand Up @@ -636,7 +639,7 @@ def read_matrix_B(istream, header):
the rows and column indices.
"""
istream.seek(header["offsets"]["matrix_B"])
fmt = ALIGN + (2 * "i" + "d") * header["nonzero_B_elements"]
fmt = ALIGN + (2 * "i" + "d") * header["offsets"]["nonzero_B_elements"]
hdr = struct.unpack(fmt, istream.read(struct.calcsize(fmt)))
rows = np.asarray(hdr[::3]) # rows are 1, 4, 7, 10 etc (Fortran indexing)
cols = np.asarray(hdr[1::3]) # columns are 2, 5, 8, 11 etc (Fortran indexing)
Expand Down Expand Up @@ -666,7 +669,7 @@ def read_matrix_A(istream, header):
the row and column indices.
"""
istream.seek(header["offsets"]["matrix_A"])
fmt = ALIGN + (2 * "i" + 2 * "d") * header["nonzero_A_elements"]
fmt = ALIGN + (2 * "i" + 2 * "d") * header["offsets"]["nonzero_A_elements"]
hdr = struct.unpack(fmt, istream.read(struct.calcsize(fmt)))
rows = np.asarray(hdr[::4])
cols = np.asarray(hdr[1::4])
Expand Down
Empty file.
Loading

0 comments on commit 5b618da

Please sign in to comment.