Skip to content

Commit

Permalink
Merge branch 'master' into topic/reusable-workflows
Browse files Browse the repository at this point in the history
  • Loading branch information
bernt-matthias authored Nov 17, 2022
2 parents 4de59d7 + e054664 commit 9588a7d
Show file tree
Hide file tree
Showing 344 changed files with 187,205 additions and 26,426 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ jobs:
max-chunks: 40
secrets:
PAT: ${{ secrets.PAT }}

test:
name: Test tools
needs: setup
Expand All @@ -31,4 +32,4 @@ jobs:
galaxy-head-sha: ${{ needs.setup.outputs.galaxy-head-sha }}
# test-timeout: 900
secrets:
PAT: ${{ secrets.PAT }}
PAT: ${{ secrets.PAT }}
3 changes: 3 additions & 0 deletions .github/workflows/pr.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ jobs:
# max-file-size: 1M
# report-level: all
# fail-level: warn


# Planemo test the changed repositories, each chunk creates an artifact
# containing HTML and JSON reports for the executed tests
test:
Expand Down Expand Up @@ -73,6 +75,7 @@ jobs:
TTS_API_KEY: ${{ secrets.TTS_API_KEY }}
TS_API_KEY: ${{ secrets.TS_API_KEY }}


determine-success:
name: Check workflow success
needs: [setup, lint, test]
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/slash.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Slash Command Dispatch
# TODO if: github.repository_owner == 'galaxyproject'
if: github.repository_owner == 'galaxyproject'
uses: peter-evans/slash-command-dispatch@v3
with:
token: ${{ secrets.PAT }}
Expand Down
16 changes: 15 additions & 1 deletion data_managers/data_manager_mitos/data_manager/data_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,21 @@ def url_download(tpe, db, workdir):
src.close()
with tarfile.open(tarfname, "r:bz2") as tar:
dirname = tar.getnames()[0]
tar.extractall(workdir)

def is_within_directory(directory, target):
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory

def safe_extract(tar, path=".", members=None, *, numeric_owner=False):
for member in tar.getmembers():
member_path = os.path.join(path, member.name)
if not is_within_directory(path, member_path):
raise Exception("Attempted Path Traversal in Tar File")
tar.extractall(path, members, numeric_owner=numeric_owner)

safe_extract(tar, workdir)
os.remove(tarfname)
return dirname

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,20 @@ def extract_archive(filepath, ext, db):
archive_content_path = "tmp"
if ext == "tar.gz" or ext == "tgz":
with tarfile.open(filepath) as tar:
tar.extractall(path=archive_content_path)
def is_within_directory(directory, target):
abs_directory = os.path.abspath(directory)
abs_target = os.path.abspath(target)
prefix = os.path.commonprefix([abs_directory, abs_target])
return prefix == abs_directory

def safe_extract(tar, path=".", members=None, *, numeric_owner=False):
for member in tar.getmembers():
member_path = os.path.join(path, member.name)
if not is_within_directory(path, member_path):
raise Exception("Attempted Path Traversal in Tar File")
tar.extractall(path, members, numeric_owner=numeric_owner)

safe_extract(tar, path=archive_content_path)
archive_content_path = find_archive_content_path(archive_content_path)
elif ext == "zip":
with zipfile.ZipFile(filepath, 'r') as zip_ref:
Expand Down
10 changes: 10 additions & 0 deletions data_managers/data_manager_semibin/.shed.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
name: data_manager_semibin
owner: iuc
description: "SemiBin: Semi-supervised Metagenomic Binning Using Siamese Neural Networks"
homepage_url: https://semibin.readthedocs.io/en/latest/
long_description: |
Command tool for metagenomic binning with semi-supervised deep learning using information from reference genomes
remote_repository_url: "https://github.com/galaxyproject/tools-iuc/tree/master/data_managers/data_manager_semibin"
type: unrestricted
categories:
- Data Managers
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
#!/usr/bin/env python
#
# Data manager for reference data for the MetaPhlAn Galaxy tools
import argparse
import json
import subprocess
from datetime import date
from pathlib import Path


# Utility functions for interacting with Galaxy JSON
def read_input_json(json_fp):
"""Read the JSON supplied from the data manager tool
Returns a tuple (param_dict,extra_files_path)
'param_dict' is an arbitrary dictionary of parameters
input into the tool; 'extra_files_path' is the path
to a directory where output files must be put for the
receiving data manager to pick them up.
NB the directory pointed to by 'extra_files_path'
doesn't exist initially, it is the job of the script
to create it if necessary.
"""
with open(json_fp) as fh:
params = json.load(fh)
return (params['param_dict'],
Path(params['output_data'][0]['extra_files_path']))


# Utility functions for creating data table dictionaries
#
# Example usage:
# >>> d = create_data_tables_dict()
# >>> add_data_table(d,'my_data')
# >>> add_data_table_entry(dict(dbkey='hg19',value='human'))
# >>> add_data_table_entry(dict(dbkey='mm9',value='mouse'))
# >>> print(json.dumps(d))
def create_data_tables_dict():
"""Return a dictionary for storing data table information
Returns a dictionary that can be used with 'add_data_table'
and 'add_data_table_entry' to store information about a
data table. It can be converted to JSON to be sent back to
the data manager.
"""
d = {
'data_tables': {}
}
return d


def add_data_table(d, table):
"""Add a data table to the data tables dictionary
Creates a placeholder for a data table called 'table'.
"""
d['data_tables'][table] = []


def add_data_table_entry(d, table, entry):
"""Add an entry to a data table
Appends an entry to the data table 'table'. 'entry'
should be a dictionary where the keys are the names of
columns in the data table.
Raises an exception if the named data table doesn't
exist.
"""
try:
d['data_tables'][table].append(entry)
except KeyError:
raise Exception("add_data_table_entry: no table '%s'" % table)


def download_gtdb(data_tables, table_name, target_dp, test=False):
"""Download GTDB
Creates references to the specified file(s) on the Galaxy
server in the appropriate data table (determined from the
file extension).
The 'data_tables' dictionary should have been created using
the 'create_data_tables_dict' and 'add_data_table' functions.
Arguments:
data_tables: a dictionary containing the data table info
table_name: name of the table
target_dp: directory to put copy or link to the data file
"""
db_dp = target_dp
if not test:
cmd = "SemiBin download_GTDB --reference-db-data-dir %s" % (db_dp)
subprocess.check_call(cmd, shell=True)
dbkey = 'gtdb'
name = "GTDB reference genome generated by MMseqs2 used in SemiBin"
else:
dbkey = 'test'
name = "Test"
empty_fp = db_dp / Path("empty")
empty_fp.touch()
add_data_table_entry(
data_tables,
table_name,
dict(
dbkey=dbkey,
value='%s' % (date.today().strftime("%d%m%Y")),
name=name,
path=str(db_dp)))


if __name__ == "__main__":
print("Starting...")

# Read command line
parser = argparse.ArgumentParser(description='Download reference genomes (GTDB)')
parser.add_argument('--json', help="Path to JSON file")
parser.add_argument('--test', action='store_true', help="Test")
args = parser.parse_args()
print("args : %s" % args)

# Read the input JSON
json_fp = Path(args.json)
params, target_dp = read_input_json(json_fp)

# Make the target directory
print("Making %s" % target_dp)
target_dp.mkdir(parents=True, exist_ok=True)

# Set up data tables dictionary
data_tables = create_data_tables_dict()
add_data_table(data_tables, "gtdb")

# Fetch data from specified data sources
print("Download and build database")
download_gtdb(
data_tables,
"gtdb",
target_dp,
args.test)

# Write output JSON
print("Outputting JSON")
with open(json_fp, 'w') as fh:
json.dump(data_tables, fh, sort_keys=True)
print("Done.")
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
<tool id="data_manager_semibin" name="Download reference genomes (GTDB) for SemiBin" tool_type="manage_data" version="@TOOL_VERSION@+galaxy@VERSION_SUFFIX@" profile="@PROFILE@">
<description></description>
<requirements>
<requirement type="package" version="@TOOL_VERSION@">semibin</requirement>
</requirements>
<macros>
<token name="@TOOL_VERSION@">1.1.1</token>
<token name="@VERSION_SUFFIX@">0</token>
<token name="@PROFILE@">21.01</token>
</macros>
<command detect_errors="exit_code"><![CDATA[
python '$__tool_directory__/data_manager_semibin.py'
--json '$out_file'
$type
]]></command>
<inputs>
<param name="type" type="hidden" value="" />
</inputs>
<outputs>
<data name="out_file" format="data_manager_json"/>
</outputs>
<tests>
<test expect_num_outputs="1">
<param name="type" value="--test" />
<output name="out_file">
<assert_contents>
<has_text text="test"/>
<has_text text="Test"/>
</assert_contents>
</output>
</test>
</tests>
<help><![CDATA[
This tool downloads reference genomes (GTDB) for SemiBin
]]></help>
<citations>
<citation type="doi">10.1038/s41467-022-29843-y</citation>
</citations>
</tool>
21 changes: 21 additions & 0 deletions data_managers/data_manager_semibin/data_manager_conf.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
<?xml version="1.0"?>
<data_managers>
<data_manager tool_file="data_manager/data_manager_semibin.xml" id="data_manager_semibin" >
<data_table name="gtdb"> <!-- Defines a Data Table to be modified. -->
<output> <!-- Handle the output of the Data Manager Tool -->
<column name="value" /> <!-- columns that are going to be specified by the Data Manager Tool -->
<column name="name" /> <!-- columns that are going to be specified by the Data Manager Tool -->
<column name="dbkey" /> <!-- columns that are going to be specified by the Data Manager Tool -->
<column name="path" output_ref="out_file" >
<move type="directory">
<source>${path}</source>
<target base="${GALAXY_DATA_MANAGER_DATA_PATH}">semibin/data/${dbkey}</target>
</move>
<value_translation>${GALAXY_DATA_MANAGER_DATA_PATH}/semibin/data/${dbkey}</value_translation>
<value_translation type="function">abspath</value_translation>
</column>
</output>
</data_table>
</data_manager>
</data_managers>

3 changes: 3 additions & 0 deletions data_managers/data_manager_semibin/test-data/gtdb.loc
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#file has this format (white space characters are TAB characters)
#dbkey description name /path/to/data
test-date description test /path/to/data
2 changes: 2 additions & 0 deletions data_managers/data_manager_semibin/tool-data/gtdb.loc.sample
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#file has this format (white space characters are TAB characters)
#dbkey description name /path/to/data
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
<tables>
<table name="gtdb" comment_char="#">
<columns>value, name, dbkey, path</columns>
<file path="tool-data/gtdb.loc" />
</table>
</tables>
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
<tables>
<table name="gtdb" comment_char="#">
<columns>value, name, dbkey, path</columns>
<file path="${__HERE__}/test-data/gtdb.loc" />
</table>
</tables>
6 changes: 5 additions & 1 deletion tools/bandage/bandage_image.xml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
<tool id="bandage_image" name="Bandage Image" version="@TOOL_VERSION@+galaxy3" profile="18.01">
<tool id="bandage_image" name="Bandage Image" version="@TOOL_VERSION@+galaxy4" profile="21.05">
<description>visualize de novo assembly graphs</description>
<macros>
<import>macros.xml</import>
Expand All @@ -21,6 +21,9 @@ Bandage
#if $fontsize:
--fontsize '$fontsize'
#end if
#if $nodewidth:
--nodewidth '$nodewidth'
#end if
$names
$lengths
]]></command>
Expand All @@ -31,6 +34,7 @@ Bandage
<param argument="--names" type="boolean" truevalue="--names" falsevalue="" label="Node name labels?"/>
<param argument="--lengths" type="boolean" truevalue="--lengths" falsevalue="" label="Node length labels?"/>
<param argument="--fontsize" type="integer" min="5" optional="true" label="Font size" help="Node font size?"/>
<param argument="--nodewidth" type="float" min="5" optional="true" label="Node width" help="Node width for graph image?" />
<param name="output_format" type="select" label="Produce jpg, png or svg file?">
<option selected="True" value="jpg">.jpg</option>
<option value="png">.png</option>
Expand Down
6 changes: 3 additions & 3 deletions tools/blast/macros.xml
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
<macros>
<token name="@TOOL_VERSION@">1.6.0</token>
<token name="@VERSION_SUFFIX@">1</token>
<token name="@TOOL_VERSION@">1.7.0</token>
<token name="@VERSION_SUFFIX@">0</token>
<token name="@PROFILE@">20.09</token>
<xml name="requirements">
<requirements>
<requirement type="package" version="@TOOL_VERSION@">magicblast</requirement>
<requirement type="package" version="1.15">samtools</requirement>
<requirement type="package" version="1.16.1">samtools</requirement>
</requirements>
</xml>
<xml name="output_sort_param">
Expand Down
4 changes: 2 additions & 2 deletions tools/blast/magicblast.xml
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ $output_options.no_discordant
<param name="subject" value="subject1.fasta.gz" ftype="fasta.gz"/>
<output name="output" ftype="bam">
<assert_contents>
<has_size value="62080" delta="50"/>
<has_size value="61454" delta="50"/>
</assert_contents>
</output>
</test>
Expand All @@ -329,7 +329,7 @@ $output_options.no_discordant
<param name="database" value="phiX174"/>
<output name="output" ftype="bam">
<assert_contents>
<has_size value="62079" delta="50"/>
<has_size value="61457" delta="50"/>
</assert_contents>
</output>
</test>
Expand Down
Loading

0 comments on commit 9588a7d

Please sign in to comment.