Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Epicsarch qs #378

Merged
merged 21 commits into from
Apr 4, 2024
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
af4d09c
ENH: Adding --update feature to epicsarch-qs (in progress)
c-tsoi Oct 5, 2023
a256c15
changed the helper function to return updated dictionary to a list
c-tsoi Oct 27, 2023
a1612e4
able to update entries with matching aliases and PVs, and check qs fo…
c-tsoi Nov 13, 2023
20eca00
Refining with exceptions
c-tsoi Nov 15, 2023
1c27469
Able to identify duplicates and write to the correctly to the archfile
c-tsoi Dec 7, 2023
139c817
Able to identify duplicates and write to the correctly to the archfil…
c-tsoi Dec 7, 2023
e7dd922
Adding a method to pull all user objects from the cds tab in the ques…
c-tsoi Jan 23, 2024
52e54c1
Adding softlink feature, written, but still needs to test. Also, move…
c-tsoi Feb 8, 2024
bfaeb73
adding comments for better readability
c-tsoi Feb 8, 2024
cf40af7
tested running from xppopr and soft link feature
c-tsoi Feb 21, 2024
2cbcf40
testing cds items
c-tsoi Mar 13, 2024
9992d9c
Fixing pre-commit errors and updating files
c-tsoi Mar 13, 2024
d4aa2ea
Cleaned test_print_dry and marked with xfail_2.
c-tsoi Mar 19, 2024
fe4c640
Update hutch_python/epics_arch.py
c-tsoi Mar 21, 2024
a537f74
Update hutch_python/epics_arch.py
c-tsoi Mar 21, 2024
cb551d2
Update hutch_python/epics_arch.py
c-tsoi Mar 25, 2024
3e83fbc
Update hutch_python/qs_load.py
c-tsoi Mar 25, 2024
f3a0a9c
Update hutch_python/epics_arch.py
c-tsoi Mar 25, 2024
e5f7037
Update hutch_python/epics_arch.py
c-tsoi Mar 25, 2024
91b2840
Fixing issues addressed in the PR.
c-tsoi Mar 27, 2024
29fa3ce
Removing a single debugging message. Adding more information to help …
c-tsoi Apr 4, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
215 changes: 200 additions & 15 deletions hutch_python/epics_arch.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@
import argparse
import logging
import os
import subprocess
import sys

from .constants import EPICS_ARCH_FILE_PATH
from .qs_load import get_qs_client
from .qs_load import get_qs_client, pull_cds_items

logger = logging.getLogger(__name__)
logging.basicConfig(level="INFO")
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved


def _create_parser():
Expand All @@ -28,7 +30,15 @@ def _create_parser():

parser.add_argument('--dry-run', action='store_true', default=False,
help='Print to stdout what would be written in the '
'archFIle.')
'archFile.')
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved

parser.add_argument('--level', '-l', required=False, type=str, default="INFO",
help='Show the debug logging stream')
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved

parser.add_argument('--cds-items', nargs=2, action='store', default=None,
help="Pulls all data from CDS tab. E.g.: xppx1003221 run21 X-10032")
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved

parser.add_argument('--link', '-sl', action='store_true', default=None, help="create softlink for experiement")
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
return parser


Expand All @@ -37,10 +47,21 @@ def main():
parser = _create_parser()
parsed_args = parser.parse_args()
kwargs = vars(parsed_args)
logger_setup(parsed_args)
logger.debug("\nepicsarch-qs test script, git")
ZLLentz marked this conversation as resolved.
Show resolved Hide resolved
create_arch_file(**kwargs)


def create_arch_file(experiment, hutch=None, path=None, dry_run=False):
def logger_setup(args):
# Setting up the logger, to show the level when enabled
logging.getLogger().addHandler(logging.NullHandler())
shown_logger = logging.getLogger('epicsarch-qs')
logger.setLevel(args.level)
logger.debug("Logger Level: ", logger.getEffectiveLevel())
logger.debug("Set logging level of %r to %r", shown_logger.name, args.level)
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved


def create_arch_file(experiment, level=None, hutch=None, path=None, dry_run=False, update=False, cds_items=None, link=None):
"""
Create an epicsArch file for the experiment.

Expand All @@ -56,6 +77,8 @@ def create_arch_file(experiment, hutch=None, path=None, dry_run=False):
dry_run : bool
To indicate if only print to stdout the data that would be stored
in the epicsArch file and not create the file.
update : bool
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
To look into the qsdata and update the epicsArch file instead of overwriting it.

Examples
--------
Expand Down Expand Up @@ -93,13 +116,138 @@ def create_arch_file(experiment, hutch=None, path=None, dry_run=False):
file_path = path
elif hutch:
file_path = EPICS_ARCH_FILE_PATH.format(hutch.lower())
elif cds_items:
pull_cds_data(experiment, cds_items)
return
elif link:
update_file(exp_name=experiment, path=EPICS_ARCH_FILE_PATH.format(experiment[0:3]))
create_softlink(experiment)
return
else:
file_path = EPICS_ARCH_FILE_PATH.format(experiment[0:3])
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
create_file(exp_name=experiment, path=file_path)
update_file(exp_name=experiment, path=file_path)
elif dry_run:
print_dry_run(experiment)


def pull_cds_data(exp, run):
logger.debug("in client")
pull_cds_items(exp, run)
ZLLentz marked this conversation as resolved.
Show resolved Hide resolved


def create_softlink(experiment):
logger.debug("in softlink")
# remove the old soft link and add a new one (update), *THIS HAS NOT BEEN TESTED YET*
# this removes the softlink in the /cds/group/pcds/dist/pds/{}/misc/
# rm_result = subprocess.run(['unlink', EPICS_ARCH_FILE_PATH.format(experiment[0:3]) + 'epicsArch_' + experiment[0:3].upper() + '_exp_specific.txt'])

# This adds a new softlink in /cds/group/pcds/dist/pds/{}/misc/
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
subprocess.run(['ln', '-sf', EPICS_ARCH_FILE_PATH.format(experiment[0:3]) + 'epicsArch_' + experiment + '.txt', EPICS_ARCH_FILE_PATH.format(experiment[0:3]) + 'epicsArch_' + experiment[0:3].upper() + '_exp_specific.txt'])
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved


def check_for_duplicates(qs_data, af_data):

c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
"""
Check for duplicate PVs in the questionnaire, the code already throws a warning for duplicate aliases.
If duplicates (PV or Alias) are found in the questionnaire throw error and prompt user to fix and re-run. If they are found in the epicsArch file then step through each match and update accordingly.

Parameters
----------
qs_data : list
af_data : list

Examples
--------
>>> epicsarch-qs xpplv6818 --dry-run

Returns
-------
updatea_arch_list : list
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
Updated list containing sorted alias, PVs.

c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
"""

"""
Part 1: Parse Data from the questionnaire and the archfile
Part 2: Check the questionnaire for pv duplicates
"""

# PART 1

# Convert lists to dictionaries to sort as a key - value pair while also removing any whitespice in the aliases.

# Questionnaire Data, removing whitespaces and newline chars
qsDict = dict(zip(qs_data[::2], qs_data[1::2]))
qsDict = {k.replace(" ", ""): v for k, v in qsDict.items()}
qsDict = {k.replace("\n", ""): v for k, v in qsDict.items()}
qsDict = {k: v.replace(" ", "") for k, v in qsDict.items()}
qsDict = {k: v.replace("\n", "") for k, v in qsDict.items()}
sorted_qsDict = dict(sorted(qsDict.items()))

# If the archfile is not empty then clean it if not ,skip
if len(af_data) > 0:
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
# ArchFile Data, removing whitespaces and newline chars
afDict = dict(zip(af_data[::2], af_data[1::2]))
afDict = {k.replace(" ", ""): v for k, v in afDict.items()}
afDict = {k.replace("\n", ""): v for k, v in afDict.items()}
afDict = {k: v.replace(" ", "") for k, v in afDict.items()}
afDict = {k: v.replace("\n", "") for k, v in afDict.items()}
sorted_afDict = dict(sorted(afDict.items()))

# PART 2

# Check the questionaire for duplicate PVs
# Making reverse multidict to help identify duplicate values in questionnaire.
rev_keyDict = {}
for key, value in sorted_qsDict.items():
rev_keyDict.setdefault(value, list()).append(key)

pvDuplicate = [key for key, values in rev_keyDict.items() if len(values) > 1]
# Looking for duplicates of PVs in the questionaire
# also print out the alias for PV, change removing to warning operater to remove dup then rerun
for dup in pvDuplicate:
logger.debug("!Duplicate PV in questionnaire!:" + str(dup))
for value in rev_keyDict[dup][1:]:
logger.debug("Found PV duplicate(s) from questionnaire: " + value + ", " + sorted_qsDict[value])
raise Exception("Please remove duplicates and re-run script!")
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved

# Check to see if the archfile has any data in it
if len(af_data) == 0:
logger.debug("CFD: Case: no archfile given, returning cleaned questionnaire data\n")
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
cleaned_qs_data = [x for item in sorted_qsDict.items() for x in item]
return cleaned_qs_data

# Once we have cleared any duplicates in the questionnaire we moving on to updating values according to the which field matches.

# Checking for matching PVs in questionnaire and archfile
# if the PV matches update the alias by removing the old key and making a new one
for (k, val) in sorted_qsDict.items():
# this looks up the key in the af Dictionary by finding the value
foundKey = get_key(val, sorted_afDict)
if k in sorted_afDict:
logger.debug("!Alias Match in questionnaire and archfile! Updating PV: " + k + ", " + sorted_qsDict[k])
sorted_afDict[k] = sorted_qsDict[k]
elif foundKey:
del sorted_afDict[foundKey]
sorted_afDict[k] = val
logger.debug("!PV Match in questionnaire and archfile! Updating Alias: " + k + ", " + val)

sorted_afDict = dict(sorted(sorted_afDict.items()))
updated_arch_list = [x for item in sorted_afDict.items() for x in item]
logger.debug("\nUpdated Arch List:\n")
logger.debug(updated_arch_list)
return updated_arch_list


def read_archfile(exp_path):
if os.path.exists(exp_path):
with open(exp_path, "r") as experiment:
lines = experiment.readlines()
return lines
else:
raise OSError('ArchFile not found: %s' % exp_path)
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved


def print_dry_run(exp_name):
"""
Print to stdout the data that would be stored in the epicsArch file.
Expand All @@ -113,9 +261,30 @@ def print_dry_run(exp_name):
--------
>>> epicsarch-qs xpplv6818 --dry-run
"""
data = get_questionnaire_data(exp_name)
for item in data:
print(item)

qs_data = get_questionnaire_data(exp_name)

"""
Updating experiment file.
"""

af_path = EPICS_ARCH_FILE_PATH.format(exp_name[0:3]) + 'epicsArch_' + exp_name + '.txt'
af_data = read_archfile(af_path)
if not os.path.exists(af_path):
raise OSError('print_dry_run, invalid path: %s' % af_path)
elif os.path.exists(af_path):
updated_archFile = check_for_duplicates(qs_data, af_data)

for item in updated_archFile:
print(item)


def get_key(val, my_dict):
for k, v in my_dict.items():
if val == v:
return k
strError = ""
return strError
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved


def get_questionnaire_data(exp_name):
Expand Down Expand Up @@ -171,7 +340,7 @@ def get_items(exp_name):
return items


def create_file(exp_name, path):
def update_file(exp_name, path):
"""
Create a file with aliases and pvs from the questionnaire.

Expand All @@ -182,16 +351,32 @@ def create_file(exp_name, path):
path : str
Directory where to create the epicsArch file.
"""
data_list = get_questionnaire_data(exp_name)
if not os.path.exists(path):
raise OSError('Invalid path: %s' % path)
exp_name = str(exp_name)
file_path = ''.join((path, 'epicsArch_', exp_name, '.txt'))
qs_data = get_questionnaire_data(exp_name)

logger.info('Creating epicsArch file for experiment: %s', exp_name)
logger.debug("UpdateFile: qs_data:\n" + str(qs_data))

logger.debug("\nPath: " + str(path))
af_path = str(path) + "epicsArch_" + str(exp_name) + ".txt"
logger.debug("\nAF Path: " + str(af_path))
exp_name = str(exp_name)
c-tsoi marked this conversation as resolved.
Show resolved Hide resolved
file_path = ''.join((str(path), 'epicsArch_', str(exp_name), '.txt'))
if not os.path.exists(str(path)):
raise OSError('Invalid path: %s' % path)
# if the path exists but archfile does not, create af and pull qsd
elif os.path.exists(path) and not os.path.exists(af_path):
logger.debug("UpdateFile: Path is valid, creating archfile\n")
logger.debug('Creating epicsArch file for experiment: %s', exp_name)
cleaned_data = check_for_duplicates(qs_data, {})

# if the path and archfile exists, update af and pull
elif os.path.exists(path) and os.path.exists(af_path):
logger.debug("UpdateFile: Path exists and archfile exists\n")
af_data = read_archfile(af_path)
cleaned_data = check_for_duplicates(qs_data, af_data)

# Write updates to the corresponding file
with open(file_path, 'w') as f:
for data in data_list:
for data in cleaned_data:
try:
f.write(f'{data}\n')
except OSError as ex:
Expand Down
Loading