Skip to content

Commit

Permalink
Merge pull request #1275 from bernt-matthias/topic/22.05
Browse files Browse the repository at this point in the history
bump galaxy packages to 22.05
  • Loading branch information
mvdbeek authored Feb 10, 2023
2 parents 2c71d88 + 5992585 commit f55829c
Show file tree
Hide file tree
Showing 15 changed files with 16 additions and 24 deletions.
2 changes: 1 addition & 1 deletion planemo/commands/cmd_autoupdate.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def cli(ctx, paths, **kwds): # noqa C901

if any(r.type in {RunnableType.galaxy_tool, RunnableType.directory} for r in runnables):
# update Galaxy tools
for (tool_path, tool_xml) in yield_tool_sources_on_paths(ctx, paths, recursive):
for tool_path, tool_xml in yield_tool_sources_on_paths(ctx, paths, recursive):
if tool_path.split("/")[-1] in tools_to_skip:
info("Skipping tool %s" % tool_path)
continue
Expand Down
2 changes: 1 addition & 1 deletion planemo/commands/cmd_ci_find_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def cli(ctx, paths, **kwds):
operations over for continuous integration operations.
"""
tool_paths = []
for (tool_path, tool_source) in yield_tool_sources_on_paths(ctx, paths, recursive=True):
for tool_path, tool_source in yield_tool_sources_on_paths(ctx, paths, recursive=True):
if is_tool_load_error(tool_source):
continue
tool_paths.append(tool_path)
Expand Down
1 change: 0 additions & 1 deletion planemo/commands/cmd_shed_diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,6 @@ def cli(ctx, paths, **kwds):
}

def diff(realized_repository):

# We create a temporary redirection from kwds's
# output to our tempfile. This lets us capture the
# diff and redirect it to their requested location as
Expand Down
8 changes: 3 additions & 5 deletions planemo/conda.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def collect_conda_targets(ctx, paths, recursive=False, found_tool_callback=None)
else:
real_paths.append(path)

for (tool_path, tool_source) in yield_tool_sources_on_paths(
for tool_path, tool_source in yield_tool_sources_on_paths(
ctx, real_paths, recursive=recursive, exclude_deprecated=True
):
if found_tool_callback:
Expand Down Expand Up @@ -132,9 +132,7 @@ def collect_conda_target_lists_and_tool_paths(ctx, paths, recursive=False, found
"""
conda_target_lists = set()
tool_paths = collections.defaultdict(list)
for (tool_path, tool_source) in yield_tool_sources_on_paths(
ctx, paths, recursive=recursive, yield_load_errors=False
):
for tool_path, tool_source in yield_tool_sources_on_paths(ctx, paths, recursive=recursive, yield_load_errors=False):
try:
if found_tool_callback:
found_tool_callback(tool_path)
Expand All @@ -153,7 +151,7 @@ def collect_conda_target_lists_and_tool_paths(ctx, paths, recursive=False, found

def tool_source_conda_targets(tool_source):
"""Load CondaTarget object from supplied abstract tool source."""
requirements, _ = tool_source.parse_requirements_and_containers()
requirements, *_ = tool_source.parse_requirements_and_containers()
return conda_util.requirements_to_conda_targets(requirements)


Expand Down
2 changes: 1 addition & 1 deletion planemo/engine/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def test(self, runnables, test_timeout):
test_cases = [t for tl in map(cases, runnables) for t in tl]
test_results = self._collect_test_results(test_cases, test_timeout)
tests = []
for (test_case, run_response) in test_results:
for test_case, run_response in test_results:
test_case_data = test_case.structured_test_data(run_response)
tests.append(test_case_data)
test_data = {
Expand Down
1 change: 0 additions & 1 deletion planemo/galaxy/activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,6 @@ def _get_metadata(self, history_content_type, content_id):
raise Exception("Unknown history content type encountered [%s]" % history_content_type)

def collect_outputs(self, ctx, output_directory):

outputs_dict = {}
# TODO: rather than creating a directory just use
# Galaxy paths if they are available in this
Expand Down
2 changes: 1 addition & 1 deletion planemo/galaxy/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
TYPE_CHECKING,
)

from galaxy.containers.docker_model import DockerVolume
from galaxy.tool_util.deps import docker_util
from galaxy.tool_util.deps.container_volumes import DockerVolume
from galaxy.util.commands import argv_to_str
from packaging.version import parse as parse_version

Expand Down
3 changes: 1 addition & 2 deletions planemo/galaxy/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def describe_outputs(runnable, gi=None):
workflow = _raw_dict(runnable.path)

outputs = []
for (order_index, step) in workflow["steps"].items():
for order_index, step in workflow["steps"].items():
optional = False
if not step.get("tool_id"):
# One of the parameter types ... need eliminate this guesswork on the Galaxy side
Expand Down Expand Up @@ -370,7 +370,6 @@ def _elements_to_test_def(


def _job_inputs_template_from_invocation(invocation_id, galaxy_url, galaxy_api_key):

user_gi = gi(url=galaxy_url, key=galaxy_api_key)
invocation = user_gi.invocations.show_invocation(invocation_id)
template = {}
Expand Down
1 change: 0 additions & 1 deletion planemo/reports/allure.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ def process(self, structured_data, file_modication_datetime=None):
plugin_manager.unregister(plugin=self.logger)

def process_test_case(self, test_case, file_modication_datetime=None):

with self.lifecycle.schedule_test_case() as test_result:
test_index = test_case["id"]
test_data = test_case.get("data") or {}
Expand Down
4 changes: 2 additions & 2 deletions planemo/shed/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -963,7 +963,7 @@ def __init__(self, repo_pairs, description=None):
def __str__(self):
contents = '<repositories description="%s">' % self.description
line_template = ' <repository owner="%s" name="%s" />\n'
for (owner, name) in self.repo_pairs:
for owner, name in self.repo_pairs:
contents += line_template % (owner, name)
contents += "</repositories>"
return contents
Expand Down Expand Up @@ -1021,7 +1021,7 @@ def _realize_to(self, ctx, directory, name, multiple, **kwds):
continue
realized_file.realize_to(directory)

for (name, contents) in config.get("_files", {}).items():
for name, contents in config.get("_files", {}).items():
path = os.path.join(directory, name)
with open(path, "w") as f:
f.write(contents)
Expand Down
2 changes: 1 addition & 1 deletion planemo/shed_lint.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def lint_repository(ctx, realized_repository, **kwds):

def lint_repository_tools(ctx, realized_repository, lint_ctx, lint_args):
path = realized_repository.path
for (tool_path, tool_source) in yield_tool_sources(ctx, path, recursive=True):
for tool_path, tool_source in yield_tool_sources(ctx, path, recursive=True):
original_path = tool_path.replace(path, realized_repository.real_path)
info("+Linting tool %s" % original_path)
if handle_tool_load_error(tool_path, tool_source):
Expand Down
2 changes: 1 addition & 1 deletion planemo/tool_lint.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def lint_tools_on_path(ctx, paths, lint_args, **kwds):
assert_tools = kwds.get("assert_tools", True)
recursive = kwds.get("recursive", False)
exit_codes = []
for (tool_path, tool_xml) in yield_tool_sources_on_paths(ctx, paths, recursive):
for tool_path, tool_xml in yield_tool_sources_on_paths(ctx, paths, recursive):
if handle_tool_load_error(tool_path, tool_xml):
exit_codes.append(EXIT_CODE_GENERIC_FAILURE)
continue
Expand Down
4 changes: 2 additions & 2 deletions planemo/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def yield_tool_sources_on_paths(
) -> Iterator[Tuple[str, Union[ToolSource, object]]]:
"""Walk paths and yield ToolSource objects discovered."""
for path in paths:
for (tool_path, tool_source) in yield_tool_sources(ctx, path, recursive, yield_load_errors):
for tool_path, tool_source in yield_tool_sources(ctx, path, recursive, yield_load_errors):
if exclude_deprecated and "deprecated" in tool_path:
continue
yield (tool_path, tool_source)
Expand All @@ -71,7 +71,7 @@ def yield_tool_sources(
recursive,
register_load_errors=True,
)
for (tool_path, tool_source) in tools:
for tool_path, tool_source in tools:
if is_tool_load_error(tool_source):
if yield_load_errors:
yield (tool_path, tool_source)
Expand Down
5 changes: 2 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@ bioblend>=1.0.0
click!=8.0.2
cwltool>=1.0.20191225192155
ephemeris>=0.10.3
galaxy-containers<22.5
galaxy-tool-util>=21.1.1,<22.5
galaxy-util>=20.5.0,<22.5
galaxy-tool-util>=22.5.dev2,<23.0
galaxy-util[template]>=22.5.dev2,<23.0
glob2
gxformat2>=0.14.0
h5py
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,6 @@ def _stream_fasta_to_file(fasta_stream, target_directory, sequence_id, close_str
fasta_base_filename = "%s.fa" % sequence_id
fasta_filename = os.path.join(target_directory, fasta_base_filename)
with open(fasta_filename, "wb+") as fasta_writer:

if isinstance(fasta_stream, list) and len(fasta_stream) == 1:
fasta_stream = fasta_stream[0]

Expand Down

0 comments on commit f55829c

Please sign in to comment.