Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions .github/actions/nf-test-action/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,14 @@ inputs:
paths:
description: "Test paths"
required: true
jfrog_username:
description: "JFrog registry username"
required: false
default: ""
jfrog_password:
description: "JFrog registry password or token"
required: false
default: ""

runs:
using: "composite"
Expand Down Expand Up @@ -72,6 +80,14 @@ runs:
nextflow secrets set ONCOKB_TOKEN $ONCOKB_TOKEN


- name: Login to JFrog Container Registry
if: ${{ inputs.profile == 'docker' && inputs.jfrog_username != '' }}
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3
with:
registry: mskcc.jfrog.io
username: ${{ inputs.jfrog_username }}
password: ${{ inputs.jfrog_password }}

# TODO Skip failing conda tests and document their failures
# https://github.com/nf-core/modules/issues/7017
- name: Run nf-test
Expand Down
4 changes: 3 additions & 1 deletion .github/skip_nf_test.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,9 @@
"subworkflows/msk/phylowgs",
"subworkflows/msk/generate_mutated_peptides",
"subworkflows/msk/neoantigen_editing",
"subworkflows/msk/traceback"
"subworkflows/msk/traceback",
"modules/msk/hlahd",
"subworkflows/msk/hlahd_from_bam"
],
"docker": [],
"singularity": []
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/nf-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,8 @@ jobs:
shard: ${{ matrix.shard }}
total_shards: ${{ env.TOTAL_SHARDS }}
paths: "${{ join(fromJson(steps.filter.outputs.filtered_paths), ' ') }}"
jfrog_username: ${{ secrets.JFROG_USERNAME }}
jfrog_password: ${{ secrets.JFROG_PASSWORD }}

confirm-pass:
runs-on: ubuntu-latest
Expand Down
7 changes: 7 additions & 0 deletions modules/msk/hlahd/environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json
channels:
- conda-forge
- bioconda
dependencies:
- "YOUR-TOOL=HERE"
64 changes: 64 additions & 0 deletions modules/msk/hlahd/main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
process HLAHD {
tag "$meta.id"
label 'process_high'

conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'docker://mskcc.jfrog.io/omicswf-docker-dev-local/mskcc-omics-workflows/hlahd:1.7.1':
'mskcc.jfrog.io/omicswf-docker-dev-local/mskcc-omics-workflows/hlahd:1.7.1' }"

input:
tuple val(meta), path(fastq_1), path(fastq_2)

output:
tuple val(meta), path("${prefix}/result/${prefix}_final.result.txt"), emit: result
tuple val(meta), path("${prefix}/result/${prefix}_*.est.txt"), emit: result_per_locus
path "versions.yml", emit: versions

when:
task.ext.when == null || task.ext.when

script:
def args = task.ext.args ?: ''
def min_read = task.ext.args2 ?: '100'
prefix = task.ext.prefix ?: "${meta.id}"
def install_dir = '/opt/hlahd/current'
"""
if [[ \$(ulimit -n) -lt 1024 ]]; then ulimit -n 1024; fi

ln -sf /usr/bin/python3 ./python
export PATH=\$PWD:\$PATH

mkdir -p ${prefix}

bash ${install_dir}/bin/hlahd.sh \\
-t ${task.cpus} \\
-m ${min_read} \\
-f ${install_dir}/freq_data \\
${args} \\
${fastq_1} \\
${fastq_2} \\
${install_dir}/HLA_gene.split.txt \\
${install_dir}/dictionary \\
${prefix} \\
.

cat <<-END_VERSIONS > versions.yml
"${task.process}":
hlahd: \$(bash ${install_dir}/bin/hlahd.sh 2>&1 | grep -oP 'HLA-HD version \\K[0-9.]+' | head -1)
END_VERSIONS
"""

stub:
prefix = task.ext.prefix ?: "${meta.id}"
"""
mkdir -p ${prefix}/result
touch ${prefix}/result/${prefix}_final.result.txt
touch ${prefix}/result/${prefix}_A.est.txt

cat <<-END_VERSIONS > versions.yml
"${task.process}":
hlahd: \$(bash /opt/hlahd/current/bin/hlahd.sh 2>&1 | grep -oP 'HLA-HD version \\K[0-9.]+' | head -1)
END_VERSIONS
"""
}
67 changes: 67 additions & 0 deletions modules/msk/hlahd/meta.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
name: "hlahd"
description: HLA typing from paired-end FASTQ reads using HLA-HD
keywords:
- HLA
- immunology
- typing
- genomics
tools:
- "hlahd":
description:
"HLA-HD (HLA typing from High-quality Dictionary) performs HLA typing
from paired-end FASTQ reads using bowtie2 alignment against HLA allele dictionaries."
homepage: "https://w3.genome.med.kyoto-u.ac.jp/HLA-HD/"
documentation: "https://w3.genome.med.kyoto-u.ac.jp/HLA-HD/"
licence:
- "ACADEMIC SOFTWARE LICENSE"
identifier: ""
input:
- - meta:
type: map
description: |
Groovy Map containing sample information
e.g. `[ id:'sample1', single_end:false ]`
- fastq_1:
type: file
description: First read of paired-end FASTQ input
pattern: "*.{fastq,fastq.gz,fq,fq.gz}"
ontologies:
- edam: http://edamontology.org/format_1930
- fastq_2:
type: file
description: Second read of paired-end FASTQ input
pattern: "*.{fastq,fastq.gz,fq,fq.gz}"
ontologies:
- edam: http://edamontology.org/format_1930
output:
result:
- - meta:
type: map
description: |
Groovy Map containing sample information
e.g. `[ id:'sample1', single_end:false ]`
- ${prefix}/result/${prefix}_final.result.txt:
type: file
description: Final HLA typing result file containing best-call alleles for all loci
pattern: "**/result/*_final.result.txt"
result_per_locus:
- - meta:
type: map
description: |
Groovy Map containing sample information
e.g. `[ id:'sample1', single_end:false ]`
- ${prefix}/result/${prefix}_*.est.txt:
type: file
description: Per-locus HLA estimation files (one file per HLA gene)
pattern: "**/result/*_*.est.txt"
versions:
- versions.yml:
type: file
description: File containing software versions
pattern: "versions.yml"
ontologies:
- edam: http://edamontology.org/format_3750
authors:
- "@johnoooh"
maintainers:
- "@johnoooh"
64 changes: 64 additions & 0 deletions modules/msk/hlahd/tests/main.nf.test
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
nextflow_process {

name "Test Process HLAHD"
script "../main.nf"
process "HLAHD"

tag "modules"
tag "modules_msk"
tag "hlahd"

test("hlahd - fastq pair - result txt") {

when {
process {
"""
input[0] = [
[ id:'test_sample', single_end:false ], // meta map
file(params.test_data_mskcc['hlahd']['fastq_1'], checkIfExists: true),
file(params.test_data_mskcc['hlahd']['fastq_2'], checkIfExists: true)
]
"""
}
}

then {
assertAll(
{ assert process.success },
{ assert snapshot(
process.out.result,
process.out.versions
).match()
}
)
}

}

test("hlahd - fastq pair - stub") {

options "-stub"

when {
process {
"""
input[0] = [
[ id:'test_sample', single_end:false ], // meta map
file(params.test_data_mskcc['hlahd']['fastq_1'], checkIfExists: true),
file(params.test_data_mskcc['hlahd']['fastq_2'], checkIfExists: true)
]
"""
}
}

then {
assertAll(
{ assert process.success },
{ assert path(process.out.result.get(0).get(1)).exists() },
{ assert snapshot(process.out.versions).match() }
)
}

}

}
35 changes: 35 additions & 0 deletions modules/msk/hlahd/tests/main.nf.test.snap
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
"hlahd - fastq pair - stub": {
"content": [
[
"versions.yml:md5,f196d451477cda61837f7cfb2ed3c9b4"
]
],
"timestamp": "2026-03-05T16:10:44.004384",
"meta": {
"nf-test": "0.9.2",
"nextflow": "25.10.4"
}
},
"hlahd - fastq pair - result txt": {
"content": [
[
[
{
"id": "test_sample",
"single_end": false
},
"test_sample_final.result.txt:md5,6f83fc8ac5bd3b9f56853b583595e2a0"
]
],
[
"versions.yml:md5,f196d451477cda61837f7cfb2ed3c9b4"
]
],
"timestamp": "2026-03-09T11:03:42.014639",
"meta": {
"nf-test": "0.9.4",
"nextflow": "25.10.4"
}
}
}
2 changes: 2 additions & 0 deletions modules/msk/hlahd/tests/tags.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
hlahd:
- modules/msk/hlahd/**
77 changes: 77 additions & 0 deletions subworkflows/msk/hlahd_from_bam/main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
include { SAMTOOLS_VIEW } from '../../../modules/nf-core/samtools/view/main'
include { GATK4_REVERTSAM } from '../../../modules/nf-core/gatk4/revertsam/main'
include { SAMTOOLS_FASTQ } from '../../../modules/nf-core/samtools/fastq/main'
include { HLAHD } from '../../../modules/msk/hlahd/main'

workflow HLAHD_FROM_BAM {

take:
ch_bam // channel: [ val(meta), path(bam), path(bai) ]
skip_revert_sam // val: Boolean

main:

ch_versions = Channel.empty()

//
// MODULE: Extract HLA region from BAM using samtools view.
// The caller configures the region to extract via ext.args in modules.config,
// e.g. ext.args = '-b chr6:28000000-34000000'
//
SAMTOOLS_VIEW(
ch_bam,
[[],[]],
[],
[]
)
ch_versions = ch_versions.mix(SAMTOOLS_VIEW.out.versions.first())

//
// Optional: Revert base quality score recalibration with GATK4 RevertSam.
// Set skip_revert_sam = true when the BAM has no BQSR applied (e.g. already
// in OQ-restored state, or produced by a tool that does not perform BQSR).
//
if (!skip_revert_sam) {

GATK4_REVERTSAM(
SAMTOOLS_VIEW.out.bam
)
ch_versions = ch_versions.mix(GATK4_REVERTSAM.out.versions.first())
ch_for_fastq = GATK4_REVERTSAM.out.bam

} else {

ch_for_fastq = SAMTOOLS_VIEW.out.bam

}

//
// MODULE: Convert BAM to paired FASTQ files.
// SAMTOOLS_FASTQ emits .out.fastq as [ meta, [fq1, fq2] ]; unpack into
// separate paths so HLAHD receives the three-element tuple it expects.
//
SAMTOOLS_FASTQ(
ch_for_fastq,
false
)
ch_versions = ch_versions.mix(SAMTOOLS_FASTQ.out.versions.first())

ch_fastq_for_hlahd = SAMTOOLS_FASTQ.out.fastq
.map { meta, fastqs ->
def (fq1, fq2) = fastqs
[meta, fq1, fq2]
}

//
// MODULE: Run HLA-HD to call HLA alleles from paired FASTQ files.
//
HLAHD(
ch_fastq_for_hlahd
)
ch_versions = ch_versions.mix(HLAHD.out.versions.first())

emit:
result = HLAHD.out.result // channel: [ val(meta), path(result/*_final.result.txt) ]
result_per_locus = HLAHD.out.result_per_locus // channel: [ val(meta), path(result/*_*.est.txt) ]
versions = ch_versions // channel: [ path(versions.yml) ]
}
Loading
Loading