Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
e6cf76a
Add helper to create additional container indexes
bbimber Jan 15, 2026
b7cebee
Allow docker to auto-retry after common failures
bbimber Jan 15, 2026
96d6882
Update implementation of study assignment pivot table
bbimber Jan 16, 2026
be332df
Use query parameters in alignment_summary queries (#371)
bbimber Jan 17, 2026
8b1047d
Bump react-router (#369)
dependabot[bot] Jan 17, 2026
8dad895
Set baseUrl in PredictTcellActivation.R
bbimber Jan 18, 2026
509f6bb
Support --skip-tso-trimming nimble flag
bbimber Jan 20, 2026
c060d95
Remove sequenceanalysis indexes (#373)
bbimber Jan 20, 2026
0ec3f23
Update filename case
bbimber Jan 20, 2026
a847538
Improve SnpEff check for existing index
bbimber Jan 27, 2026
035009d
Support new gene component
bbimber Jan 27, 2026
c74f621
Add check for null objects are Pseudobulking
bbimber Jan 28, 2026
a41120f
Enhance regex filtering in TrimmingTextArea
bbimber Jan 28, 2026
b70e2b8
Support expectedDefaultAssay in merge
bbimber Jan 28, 2026
7ab62f3
Bugfix to expectedDefaultAssay in merge
bbimber Jan 29, 2026
93bb5df
Bugfix when ADT filter specified by no ADT data exist
bbimber Jan 31, 2026
1d4d7a2
Bugfix to JBrowse processing with gff/gtf files that are pre-gzipped
bbimber Jan 31, 2026
19e211e
Fix NPE
bbimber Feb 1, 2026
44335b5
Bump tar in /jbrowse in the npm_and_yarn group across 1 directory (#374)
dependabot[bot] Feb 2, 2026
b199f32
Bump the npm_and_yarn group across 1 directory with 2 updates (#376)
dependabot[bot] Feb 2, 2026
0d1b5f1
Store cdr3WithProductive with response data
bbimber Feb 2, 2026
c152ed6
Fix NPE (#375)
bbimber Feb 3, 2026
bad6efb
Bump jspdf in /jbrowse in the npm_and_yarn group across 1 directory (…
dependabot[bot] Feb 3, 2026
3601972
Bugfix to GLNexus
bbimber Feb 5, 2026
43f4603
Fix to fileSource required for indexing bug
bbimber Feb 5, 2026
f98c8ea
Migrate NT sequence files from flat dir to hashed structure (#378)
bbimber Feb 7, 2026
b259c8d
Merge discvr-25.11 to develop
bbimber Feb 7, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import htsjdk.samtools.util.StringUtil;
import org.apache.commons.io.IOUtils;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.jetbrains.annotations.Nullable;
import org.labkey.api.data.Container;
import org.labkey.api.data.ContainerManager;
Expand All @@ -32,8 +31,11 @@
import org.labkey.api.exp.api.ExpData;
import org.labkey.api.exp.api.ExperimentService;
import org.labkey.api.files.FileContentService;
import org.labkey.api.security.Crypt;
import org.labkey.api.security.User;
import org.labkey.api.util.FileUtil;
import org.labkey.api.util.MemTracker;
import org.labkey.api.util.logging.LogHelper;
import org.labkey.api.writer.PrintWriters;

import java.io.File;
Expand All @@ -55,7 +57,9 @@
*/
public class RefNtSequenceModel implements Serializable
{
private static final Logger _log = LogManager.getLogger(RefNtSequenceModel.class);
private static final Logger _log = LogHelper.getLogger(RefNtSequenceModel.class, "Messages related to Reference NT Sequences");

public static String BASE_DIRNAME = ".sequences";

private int _rowid;
private String _name;
Expand Down Expand Up @@ -414,7 +418,7 @@ public byte[] getSequenceBases()

public void createFileForSequence(User u, String sequence, @Nullable File outDir) throws IOException
{
File output = getExpectedSequenceFile(outDir);
File output = getExpectedSequenceFile();
if (output.exists())
{
output.delete();
Expand All @@ -439,9 +443,9 @@ public void createFileForSequence(User u, String sequence, @Nullable File outDir
Table.update(u, ti, this, _rowid);
}

private File getExpectedSequenceFile(@Nullable File outDir) throws IllegalArgumentException
public File getExpectedSequenceFile() throws IllegalArgumentException
{
return new File(getSequenceDir(true, outDir), _rowid + ".txt.gz");
return FileUtil.appendName(getHashedDir(true), _rowid + ".txt.gz");
}

private Container getLabKeyContainer()
Expand All @@ -455,33 +459,17 @@ private Container getLabKeyContainer()
return c;
}

private File getSequenceDir(boolean create, @Nullable File outDir) throws IllegalArgumentException
private File getBaseSequenceDir() throws IllegalArgumentException
{
Container c = getLabKeyContainer();
File ret = outDir == null ? getReferenceSequenceDir(c) : outDir;
if (create && !ret.exists())
{
ret.mkdirs();
}

return ret;
}

private File getReferenceSequenceDir(Container c) throws IllegalArgumentException
{
FileContentService fileService = FileContentService.get();
File root = fileService == null ? null : fileService.getFileRoot(c, FileContentService.ContentType.files);
if (root == null)
{
throw new IllegalArgumentException("File root not defined for container: " + c.getPath());
}

return new File(root, ".sequences");
}

public void writeSequence(Writer writer, int lineLength) throws IOException
{
writeSequence(writer, lineLength, null, null);
return FileUtil.appendName(root, BASE_DIRNAME);
}

public void writeSequence(Writer writer, int lineLength, Integer start, Integer end) throws IOException
Expand Down Expand Up @@ -562,6 +550,26 @@ public File getOffsetsFile()
return null;
}

return new File(d.getFile().getParentFile(), getRowid() + "_offsets.txt");
return FileUtil.appendName(d.getFile().getParentFile(), getRowid() + "_offsets.txt");
}

private File getHashedDir(boolean create)
{
File baseDir = getBaseSequenceDir();
String digest = Crypt.MD5.digest(String.valueOf(getRowid()));

baseDir = FileUtil.appendName(baseDir, digest.substring(0,4));
baseDir = FileUtil.appendName(baseDir, digest.substring(4,8));
baseDir = FileUtil.appendName(baseDir, digest.substring(8,12));
baseDir = FileUtil.appendName(baseDir, digest.substring(12,20));
baseDir = FileUtil.appendName(baseDir, digest.substring(20,28));
baseDir = FileUtil.appendName(baseDir, digest.substring(28,32));

if (create)
{
baseDir.mkdirs();
}

return baseDir;
}
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package org.labkey.api.sequenceanalysis.run;

import org.apache.commons.collections4.list.UnmodifiableList;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -34,6 +35,7 @@ public class DockerWrapper extends AbstractCommandWrapper
private boolean _useLocalContainerStorage;
private String _alternateUserHome = null;
private final Map<String, String> _dockerEnvironment = new HashMap<>();
private int _maxRetries = 3;

public DockerWrapper(String containerName, Logger log, PipelineContext ctx)
{
Expand Down Expand Up @@ -199,7 +201,7 @@ public void executeWithDocker(List<String> containerArgs, File workDir, Pipeline

localBashScript.setExecutable(true);
dockerBashScript.setExecutable(true);
execute(Arrays.asList("/bin/bash", localBashScript.getPath()));
executeWithRetry(Arrays.asList("/bin/bash", localBashScript.getPath()));

if (_useLocalContainerStorage)
{
Expand All @@ -214,6 +216,59 @@ public void executeWithDocker(List<String> containerArgs, File workDir, Pipeline
}
}

public int getMaxRetries()
{
return _maxRetries;
}

public void setMaxRetries(int maxRetries)
{
_maxRetries = maxRetries;
}

// NOTE: when running on a shared/cluster environment with multiple containers initializing concurrently, conflicts can result in these error codes.
// As a convenience, build in auto-retry behavior if one of these occurs
private final List<Integer> ALLOWABLE_FAIL_CODES = new UnmodifiableList<>(Arrays.asList(125, 127));

private void executeWithRetry(final List<String> args) throws PipelineJobException
{
int retries = 0;
while (retries <= getMaxRetries())
{
try
{
execute(args);
break;
}
catch (PipelineJobException e)
{
if (ALLOWABLE_FAIL_CODES.contains(getLastReturnCode()))
{
retries++;
if (retries > getMaxRetries())
{
getLogger().info("Maximum retries exceeded");
throw e;
}

getLogger().info("Exit code " + getLastReturnCode() + ", retrying after 1 sec (" + retries + " of " + getMaxRetries()+ ")");
try
{
Thread.sleep(1000);
}
catch (InterruptedException ex)
{
throw new PipelineJobException(ex);
}
}
else
{
throw e;
}
}
}
}

private String getEffectiveContainerName()
{
return _containerName;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,20 +1,7 @@
/*
* Copyright (c) 2012 LabKey Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
PARAMETERS(AnalysisId INTEGER)

select
(CAST(a.analysis_id as varchar) || '<>' || a.lineages) as key,
(CAST(AnalysisId as varchar) || '<>' || a.lineages) as key,
a.analysis_id,
a.lineages,
max(a.totalLineages) as totalLineages,
Expand All @@ -25,13 +12,13 @@ select
round(100 * (cast(sum(a.total) as float) / cast(max(a.total_reads) as float)), 2) as percent,
group_concat(distinct a.haplotypesWithAllele) as haplotypesWithAllele,

CAST((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id AND s.rowid IN (
SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.ref_nt_id.locus = a.loci and asj.status = true
CAST((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId AND s.rowid IN (
SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.analysis_id = AnalysisId AND asj.ref_nt_id.locus = a.loci and asj.status = true
)
) as integer) as total_reads_from_locus,

round(100 * (cast(sum(a.total) as float) / cast((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id AND s.rowid IN (
SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.ref_nt_id.locus = a.loci and asj.status = true
round(100 * (cast(sum(a.total) as float) / cast((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId AND s.rowid IN (
SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.analysis_id = AnalysisId AND asj.ref_nt_id.locus = a.loci and asj.status = true
)
) as float)), 2) as percent_from_locus,
group_concat(distinct a.rowid, ',') as rowids
Expand All @@ -47,15 +34,16 @@ FROM (
group_concat(distinct coalesce(j.ref_nt_id.locus, j.ref_nt_id.name), chr(10)) as loci,

total,
cast((select sum(total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id) as integer) as total_reads,
cast((select sum(total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId) as integer) as total_reads,
group_concat(distinct hs.haplotype, chr(10)) as haplotypesWithAllele

from sequenceanalysis.alignment_summary a
join sequenceanalysis.alignment_summary_junction j ON (j.alignment_id = a.rowid and j.status = true)
join sequenceanalysis.alignment_summary_junction j ON (j.analysis_id = AnalysisId AND j.alignment_id = a.rowid and j.status = true)
left join sequenceanalysis.haplotype_sequences hs ON ((
(hs.name = j.ref_nt_id.lineage AND hs.type = 'Lineage') OR
(hs.name = j.ref_nt_id.name AND hs.type = 'Allele')
) AND hs.haplotype.datedisabled IS NULL)
WHERE a.analysis_id = AnalysisId
group by a.analysis_id, a.rowid, a.total

) a
Expand Down

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,18 +1,5 @@
/*
* Copyright (c) 2012 LabKey Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
PARAMETERS(AnalysisId INTEGER)

select
a.analysis_id,
a.alleles,
Expand All @@ -36,13 +23,13 @@ select
group_concat(a.rowid, ',') as rowids,
group_concat(distinct a.haplotypesWithAllele) as haplotypesWithAllele,

CAST((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id AND s.rowid IN (
SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.ref_nt_id.locus = a.loci and asj.status = true
CAST((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId AND s.rowid IN (
SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.analysis_id = AnalysisId AND asj.ref_nt_id.locus = a.loci and asj.status = true
)
) as INTEGER) as total_reads_from_locus,

round(100 * (cast(sum(a.total) as float) / CASE WHEN count(a.lineages) = 0 THEN max(a.total_reads) ELSE cast((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id AND s.rowid IN (
SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.ref_nt_id.locus = a.loci and asj.status = true
round(100 * (cast(sum(a.total) as float) / CASE WHEN count(a.lineages) = 0 THEN max(a.total_reads) ELSE cast((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId AND s.rowid IN (
SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.analysis_id = AnalysisId AND asj.ref_nt_id.locus = a.loci and asj.status = true
)
) as float) END), 2) as percent_from_locus,
max(lastModified) as lastModified,
Expand All @@ -67,14 +54,15 @@ FROM (
total_forward,
total_reverse,
valid_pairs,
(select sum(total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id) as total_reads,
(select sum(total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId) as total_reads,
max(j.modified) as lastModified
from sequenceanalysis.alignment_summary a
left join sequenceanalysis.alignment_summary_junction j ON (j.alignment_id = a.rowid and j.status = true)
left join sequenceanalysis.alignment_summary_junction j ON (j.analysis_id = AnalysisId AND j.alignment_id = a.rowid and j.status = true)
left join sequenceanalysis.haplotype_sequences hs ON ((
(hs.name = j.ref_nt_id.lineage AND hs.type = 'Lineage') OR
(hs.name = j.ref_nt_id.name AND hs.type = 'Allele')
) AND hs.haplotype.datedisabled IS NULL)
WHERE a.analysis_id = AnalysisId
group by a.analysis_id, a.rowid, a.total, total_forward, total_reverse, valid_pairs

) a
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
-- This is a reversal of SequenceAnalysis-12.329-12.330.sql:
DROP INDEX IF EXISTS sequenceanalysis.IDX_asj_status_container_alignment_id_ref_nt_id;
DROP INDEX IF EXISTS sequenceanalysis.IDX_haplotypes_name_date;
DROP INDEX IF EXISTS sequenceanalysis.IDX_haplotype_sequences_name_haplotype_type;
DROP INDEX IF EXISTS sequenceanalysis.IDX_alignment_summary_analysis_id_rowid_container_total;
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
SELECT core.executeJavaUpgradeCode('migrateSequenceDirs');
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
-- This is a reversal of SequenceAnalysis-12.329-12.330.sql:
DROP INDEX IDX_asj_status_container_alignment_id_ref_nt_id ON sequenceanalysis.alignment_summary_junction;
DROP INDEX IDX_haplotypes_name_date ON sequenceanalysis.haplotypes;
DROP INDEX IDX_haplotype_sequences_name_haplotype_type ON sequenceanalysis.haplotype_sequences;
DROP INDEX IDX_alignment_summary_analysis_id_rowid_container_total ON sequenceanalysis.alignment_summary;
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
EXEC core.executeJavaUpgradeCode 'migrateSequenceDirs';
11 changes: 1 addition & 10 deletions SequenceAnalysis/resources/schemas/sequenceanalysis.xml
Original file line number Diff line number Diff line change
Expand Up @@ -523,17 +523,8 @@
<onClick>SequenceAnalysis.window.RunExportWindow.downloadFilesForAnalysis(dataRegionName);</onClick>
</item>
<item text="Alignment Reports">
<item text="Alignment Summary">
<onClick>SequenceAnalysis.Buttons.viewQuery(dataRegionName, {queryName: 'alignment_summary_grouped'})</onClick>
</item>
<item text="Alignment Summary, By Lineage">
<onClick>SequenceAnalysis.Buttons.viewQuery(dataRegionName, {queryName: 'alignment_summary_by_lineage'})</onClick>
</item>
<item text="Alignment Summary, Pivoted By Lineage">
<onClick>SequenceAnalysis.Buttons.viewAlignmentsPivoted(dataRegionName)</onClick>
</item>
<item text="Matching Haplotypes">
<onClick>SequenceAnalysis.Buttons.viewQuery(dataRegionName, {queryName: 'haplotypeMatches'})</onClick>
<onClick>SequenceAnalysis.Buttons.viewMatchingHaplotypes(dataRegionName)</onClick>
</item>
<item text="View Coverage">
<onClick>SequenceAnalysis.Buttons.viewQuery(dataRegionName, {queryName: 'sequence_coverage'})</onClick>
Expand Down
Loading