From e6cf76a0b51a377b068bf0be01096a66e4fbb1af Mon Sep 17 00:00:00 2001 From: bbimber Date: Thu, 15 Jan 2026 13:07:24 -0800 Subject: [PATCH 01/26] Add helper to create additional container indexes --- .../discvrcore/DiscvrCoreController.java | 32 ++++++++ .../labkey/discvrcore/DiscvrCoreManager.java | 73 +++++++++++++++++++ 2 files changed, 105 insertions(+) diff --git a/discvrcore/src/org/labkey/discvrcore/DiscvrCoreController.java b/discvrcore/src/org/labkey/discvrcore/DiscvrCoreController.java index f948c987c..667e5769d 100644 --- a/discvrcore/src/org/labkey/discvrcore/DiscvrCoreController.java +++ b/discvrcore/src/org/labkey/discvrcore/DiscvrCoreController.java @@ -302,4 +302,36 @@ public URLHelper getRedirectURL(Object o) throws Exception return DetailsURL.fromString("admin/manageFileRoot.view", getContainer()).getActionURL(); } } + + @UtilityAction(label = "Add Custom Core.Container Indexes", description = "Provides a mechanism to truncate the query and dataset audit tables for a container") + @RequiresPermission(AdminPermission.class) + public static class AddCustomIndexesAction extends ConfirmAction + { + @Override + public ModelAndView getConfirmView(Object o, BindException errors) throws Exception + { + setTitle("Add Custom Core.Container Indexes"); + + return HtmlView.of("This action will add custom indexes to core.contains. Only do this if you are absolutely certain about the consequences. Do you want to continue?"); + } + + @Override + public boolean handlePost(Object o, BindException errors) throws Exception + { + return DiscvrCoreManager.get().addCoreContainersIndexes(); + } + + @Override + public void validateCommand(Object o, Errors errors) + { + + } + + @NotNull + @Override + public URLHelper getSuccessURL(Object o) + { + return PageFlowUtil.urlProvider(PipelineUrls.class).urlBegin(getContainer()); + } + } } diff --git a/discvrcore/src/org/labkey/discvrcore/DiscvrCoreManager.java b/discvrcore/src/org/labkey/discvrcore/DiscvrCoreManager.java index 209ebed17..c34f843a3 100644 --- a/discvrcore/src/org/labkey/discvrcore/DiscvrCoreManager.java +++ b/discvrcore/src/org/labkey/discvrcore/DiscvrCoreManager.java @@ -16,8 +16,26 @@ package org.labkey.discvrcore; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Logger; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SqlExecutor; +import org.labkey.api.data.TableInfo; +import org.labkey.api.util.logging.LogHelper; + +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.List; +import java.util.Set; + public class DiscvrCoreManager { + private static final Logger _log = LogHelper.getLogger(DiscvrCoreManager.class, "Messages from DiscvrCoreManager"); + private static final DiscvrCoreManager _instance = new DiscvrCoreManager(); private DiscvrCoreManager() @@ -29,4 +47,59 @@ public static DiscvrCoreManager get() { return _instance; } + + public boolean addCoreContainersIndexes() + { + try + { + TableInfo ti = CoreSchema.getInstance().getTableInfoContainers(); + addCustomIndex(ti, Arrays.asList("EntityId", "RowId", "Type", "Parent")); + addCustomIndex(ti, Arrays.asList("Parent", "EntityId", "Type", "RowId")); + + return true; + } + catch (Exception e) + { + _log.error("Unable to create container indexes", e); + return false; + } + } + + private void addCustomIndex(TableInfo ti, List columnNames) throws Exception + { + String idxName = getIndexName(ti.getName(), columnNames); + if (doesIndexExist(ti, idxName)) + { + return; + } + + createIndex(ti, idxName, columnNames); + } + + private String getIndexName(String tableName, List indexCols) + { + return "IDX_discvr_" + tableName + "_" + StringUtils.join(indexCols, "_"); + } + + private boolean doesIndexExist(TableInfo ti, String indexName) throws SQLException + { + Set indexNames = new CaseInsensitiveHashSet(); + DatabaseMetaData meta = ti.getSchema().getScope().getConnection().getMetaData(); + try (ResultSet rs = meta.getIndexInfo(ti.getSchema().getScope().getDatabaseName(), ti.getSchema().getName(), ti.getName(), false, false)) + { + while (rs.next()) + { + indexNames.add(rs.getString("INDEX_NAME")); + } + } + + return indexNames.contains(indexName); + } + + private void createIndex(TableInfo realTable, String indexName, List columns) + { + _log.info("Creating index on column(s): " + StringUtils.join(columns, ", ") + " for table: " + realTable.getName()); + SQLFragment sql = new SQLFragment("CREATE NONCLUSTERED INDEX " + indexName + " ON " + realTable.getSelectName() + "(" + StringUtils.join(columns, ", ") + ")"); + new SqlExecutor(realTable.getSchema()).execute(sql); + } } \ No newline at end of file From b7cebeebcf8b57b01d3389601ea5499b1e2a9260 Mon Sep 17 00:00:00 2001 From: bbimber Date: Thu, 15 Jan 2026 13:13:25 -0800 Subject: [PATCH 02/26] Allow docker to auto-retry after common failures --- .../sequenceanalysis/run/DockerWrapper.java | 52 ++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/run/DockerWrapper.java b/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/run/DockerWrapper.java index dfc2bacc1..8163b1f95 100644 --- a/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/run/DockerWrapper.java +++ b/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/run/DockerWrapper.java @@ -1,5 +1,6 @@ package org.labkey.api.sequenceanalysis.run; +import org.apache.commons.collections4.list.UnmodifiableList; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -34,6 +35,7 @@ public class DockerWrapper extends AbstractCommandWrapper private boolean _useLocalContainerStorage; private String _alternateUserHome = null; private final Map _dockerEnvironment = new HashMap<>(); + private int _maxRetries = 3; public DockerWrapper(String containerName, Logger log, PipelineContext ctx) { @@ -199,7 +201,7 @@ public void executeWithDocker(List containerArgs, File workDir, Pipeline localBashScript.setExecutable(true); dockerBashScript.setExecutable(true); - execute(Arrays.asList("/bin/bash", localBashScript.getPath())); + executeWithRetry(Arrays.asList("/bin/bash", localBashScript.getPath())); if (_useLocalContainerStorage) { @@ -214,6 +216,54 @@ public void executeWithDocker(List containerArgs, File workDir, Pipeline } } + public int getMaxRetries() + { + return _maxRetries; + } + + // NOTE: when running on a shared/cluster environment with multiple containers initializing concurrently, conflicts can result in these error codes. + // As a convenience, build in auto-retry behavior if one of these occurs + private final List ALLOWABLE_FAIL_CODES = new UnmodifiableList<>(Arrays.asList(125, 127)); + + private void executeWithRetry(final List args) throws PipelineJobException + { + int retries = 0; + while (retries <= getMaxRetries()) + { + try + { + execute(args); + break; + } + catch (PipelineJobException e) + { + if (ALLOWABLE_FAIL_CODES.contains(getLastReturnCode())) + { + retries++; + if (retries > getMaxRetries()) + { + getLogger().info("Maximum retries exceeded"); + throw e; + } + + getLogger().info("Exit code " + getLastReturnCode() + ", retrying after 1 sec (" + retries + " of " + getMaxRetries()+ ")"); + try + { + Thread.sleep(1000); + } + catch (InterruptedException ex) + { + throw new PipelineJobException(ex); + } + } + else + { + throw e; + } + } + } + } + private String getEffectiveContainerName() { return _containerName; From 96d68828dd462f5fb50f93976fb19024e3b11028 Mon Sep 17 00:00:00 2001 From: bbimber Date: Thu, 15 Jan 2026 21:58:07 -0800 Subject: [PATCH 03/26] Update implementation of study assignment pivot table --- Studies/resources/views/studiesDetails.html | 9 -- .../web/studies/panel/StudiesFilterType.js | 5 +- .../studies/query/StudiesTableCustomizer.java | 131 +++--------------- .../studies/query/StudiesUserSchema.java | 88 +++++++++++- 4 files changed, 107 insertions(+), 126 deletions(-) delete mode 100644 Studies/resources/views/studiesDetails.html diff --git a/Studies/resources/views/studiesDetails.html b/Studies/resources/views/studiesDetails.html deleted file mode 100644 index 861748943..000000000 --- a/Studies/resources/views/studiesDetails.html +++ /dev/null @@ -1,9 +0,0 @@ -PLACEHOLDER: make a page that accepts a studyId and renders useful detail, including: - -studies.studies -studies.studyCohorts -studies.expectedTimepoints - -summary of data, including total subjects, links to datasets - -links to show subject/timepoint mapping. Maybe link to a QC report. diff --git a/Studies/resources/web/studies/panel/StudiesFilterType.js b/Studies/resources/web/studies/panel/StudiesFilterType.js index ede4ebf09..85b3f6e28 100644 --- a/Studies/resources/web/studies/panel/StudiesFilterType.js +++ b/Studies/resources/web/studies/panel/StudiesFilterType.js @@ -74,8 +74,9 @@ Ext4.define('Laboratory.panel.StudiesFilterType', { return filterArray; } - var studyName = filters.studies[0]; - filterArray.nonRemovable.push(LABKEY.Filter.create(studyFieldName, studyName, LABKEY.Filter.Types.CONTAINS)); + const studyName = filters.studies[0]; + const projectFieldName = 'allProjectsPivot/' + studyName + '::lastStartDate'; + filterArray.nonRemovable.push(LABKEY.Filter.create(projectFieldName, null, LABKEY.Filter.Types.NONBLANK)); return filterArray; }, diff --git a/Studies/src/org/labkey/studies/query/StudiesTableCustomizer.java b/Studies/src/org/labkey/studies/query/StudiesTableCustomizer.java index 917d512fc..0cb755059 100644 --- a/Studies/src/org/labkey/studies/query/StudiesTableCustomizer.java +++ b/Studies/src/org/labkey/studies/query/StudiesTableCustomizer.java @@ -8,25 +8,21 @@ import org.labkey.api.data.BaseColumnInfo; import org.labkey.api.data.ColumnInfo; import org.labkey.api.data.Container; -import org.labkey.api.data.MutableColumnInfo; import org.labkey.api.data.TableCustomizer; import org.labkey.api.data.TableInfo; import org.labkey.api.ldk.LDKService; import org.labkey.api.query.ExprColumn; import org.labkey.api.query.FieldKey; -import org.labkey.api.query.LookupForeignKey; -import org.labkey.api.query.QueryDefinition; -import org.labkey.api.query.QueryException; +import org.labkey.api.query.QueryForeignKey; import org.labkey.api.query.QueryService; import org.labkey.api.query.UserSchema; import org.labkey.api.study.DatasetTable; -import org.labkey.api.study.Study; import org.labkey.api.study.StudyService; import org.labkey.api.util.logging.LogHelper; +import org.labkey.studies.StudiesSchema; import org.labkey.studies.StudiesServiceImpl; -import java.util.ArrayList; -import java.util.List; +import java.util.Objects; public class StudiesTableCustomizer implements TableCustomizer { @@ -81,58 +77,37 @@ private void doCustomize(AbstractTableInfo ati) addProjectAssignmentColumns(ati); } - private String getSubjectColName(Container c) - { - Study s = StudyService.get().getStudy(c.isWorkbookOrTab() ? c.getParent() : c); - if (s == null) - { - return null; - } - - return s.getSubjectColumnName(); - } - private void addProjectAssignmentColumns(AbstractTableInfo ati) { final String pivotColName = "allProjectsPivot"; if (ati.getColumn(pivotColName) != null) - return; - - List pks = ati.getPkColumns(); - ColumnInfo pk; - if (pks.size() == 1) { - pk = pks.get(0); + return; } - else + + if (!StudiesServiceImpl.get().hasAssignmentDataset(ati.getUserSchema().getContainer())) { - if (! (ati instanceof DatasetTable)) - { - _log.error("Table does not have a single PK column: " + ati.getName()); - return; - } - else - { - pk = pks.get(0); - } + return; } - if (!StudiesServiceImpl.get().hasAssignmentDataset(ati.getUserSchema().getContainer())) + final String subjectColumnName = Objects.requireNonNull(StudyService.get()).getSubjectColumnName(ati.getUserSchema().getContainer().isWorkbookOrTab() ? ati.getUserSchema().getContainer().getParent() : ati.getUserSchema().getContainer()); + if (subjectColumnName == null) { + _log.error("Unable to find the study's subjectColumn in StudiesTableCustomizer"); return; } - final String subjectSelectName = getSubjectColName(ati.getUserSchema().getContainer()); - if (subjectSelectName == null) + ColumnInfo subjectCol = ati.getColumn(subjectColumnName); + if (subjectCol == null) { - _log.error("Unable to find subjectSelectName in StudiesTableCustomizer"); + _log.error("Table lacks the column " + subjectColumnName + ", " + ati.getName()); return; } - final String pkColSelectName = pk.getFieldKey().toSQLString(); + Container target = ati.getUserSchema().getContainer().isWorkbookOrTab() ? ati.getUserSchema().getContainer().getParent() : ati.getUserSchema().getContainer(); - final String lookupName = ati.getName() + "_allProjectsPivot"; - BaseColumnInfo col2 = new ExprColumn(ati, FieldKey.fromString(pivotColName), pk.getValueSql(ExprColumn.STR_TABLE_ALIAS), pk.getJdbcType(), pk); + UserSchema studiesUs = QueryService.get().getUserSchema(ati.getUserSchema().getUser(), target, StudiesSchema.NAME); + BaseColumnInfo col2 = new ExprColumn(ati, FieldKey.fromString(pivotColName), subjectCol.getValueSql(ExprColumn.STR_TABLE_ALIAS), subjectCol.getJdbcType(), subjectCol); col2.setLabel("Assignment By Study"); col2.setName(pivotColName); col2.setCalculated(true); @@ -144,80 +119,8 @@ private void addProjectAssignmentColumns(AbstractTableInfo ati) col2.setIsUnselectable(true); col2.setUserEditable(false); col2.setKeyField(false); - col2.setFk(new LookupForeignKey(){ - @Override - public TableInfo getLookupTableInfo() - { - final UserSchema us = ati.getUserSchema(); - Container target = us.getContainer().isWorkbookOrTab() ? us.getContainer().getParent() : us.getContainer(); - QueryDefinition qd = createQueryDef(us, lookupName); - - qd.setSql(getAssignmentPivotSql(target, ati, pkColSelectName, subjectSelectName)); - qd.setIsTemporary(true); - - List errors = new ArrayList<>(); - TableInfo ti = qd.getTable(errors, true); - - if (!errors.isEmpty()){ - _log.error("Problem with table customizer: " + ati.getPublicName()); - for (QueryException e : errors) - { - _log.error(e.getMessage()); - } - } - - if (ti != null) - { - MutableColumnInfo col = (MutableColumnInfo) ti.getColumn(pk.getName()); - col.setKeyField(true); - col.setHidden(true); - - ((MutableColumnInfo)ti.getColumn("lastStartDate")).setLabel("Most Recent Assignment Date"); - } - - return ti; - } - }); + col2.setFk(new QueryForeignKey(studiesUs, null, studiesUs, target, StudiesUserSchema.TABLE_ASSIGNMENT_BY_STUDY, subjectColumnName, subjectColumnName)); ati.addColumn(col2); } - - private String getAssignmentPivotSql(Container source, final AbstractTableInfo ati, String pkColSelectName, String subjectSelectName) - { - return "SELECT\n" + - "s." + pkColSelectName + ",\n" + - "p.study,\n" + - "max(p.date) as lastStartDate\n" + - "\n" + - "FROM " + ati.getPublicSchemaName() + "." + ati.getPublicName() + " s\n" + - "JOIN \"" + source.getPath() + "\".study.assignment p\n" + - "ON (s." + subjectSelectName + " = p." + subjectSelectName + ")\n" + - "WHERE s." + subjectSelectName + " IS NOT NULL\n" + - "\n" + - "GROUP BY s." + pkColSelectName + ", p.study\n" + - "PIVOT lastStartDate by study IN (select distinct studyName from studies.studies)"; - } - - // TODO: move to parent class - protected QueryDefinition createQueryDef(UserSchema us, String queryName) - { - if (!us.getContainer().isWorkbook()) - { - return QueryService.get().createQueryDef(us.getUser(), us.getContainer(), us, queryName); - } - - // The rationale is that if we are querying from a workbook, preferentially translate to the parent US - // However, there are situations like workbook-scoped lists, where that query might not exist on the parent - UserSchema parentUserSchema = QueryService.get().getUserSchema(us.getUser(), us.getContainer().getParent(), us.getSchemaPath()); - assert parentUserSchema != null; - - if (parentUserSchema.getTableNames().contains(queryName)) - { - return QueryService.get().createQueryDef(parentUserSchema.getUser(), parentUserSchema.getContainer(), parentUserSchema, queryName); - } - else - { - return QueryService.get().createQueryDef(us.getUser(), us.getContainer(), us, queryName); - } - } } diff --git a/Studies/src/org/labkey/studies/query/StudiesUserSchema.java b/Studies/src/org/labkey/studies/query/StudiesUserSchema.java index 4db90aaee..8145559c4 100644 --- a/Studies/src/org/labkey/studies/query/StudiesUserSchema.java +++ b/Studies/src/org/labkey/studies/query/StudiesUserSchema.java @@ -1,6 +1,5 @@ package org.labkey.studies.query; -import com.google.gwt.user.client.ui.TabBar; import org.apache.logging.log4j.Logger; import org.labkey.api.collections.CaseInsensitiveHashMap; import org.labkey.api.collections.CaseInsensitiveTreeSet; @@ -9,6 +8,7 @@ import org.labkey.api.data.ContainerFilter; import org.labkey.api.data.DbSchema; import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MutableColumnInfo; import org.labkey.api.data.SQLFragment; import org.labkey.api.data.SchemaTableInfo; import org.labkey.api.data.SimpleFilter; @@ -22,6 +22,7 @@ import org.labkey.api.query.QueryException; import org.labkey.api.query.QueryService; import org.labkey.api.query.SimpleUserSchema; +import org.labkey.api.query.UserSchema; import org.labkey.api.security.User; import org.labkey.api.security.permissions.DeletePermission; import org.labkey.api.security.permissions.InsertPermission; @@ -29,13 +30,16 @@ import org.labkey.api.security.permissions.UpdatePermission; import org.labkey.api.studies.StudiesService; import org.labkey.api.studies.security.StudiesDataAdminPermission; +import org.labkey.api.study.StudyService; import org.labkey.api.util.logging.LogHelper; import org.labkey.studies.StudiesSchema; +import org.labkey.studies.StudiesServiceImpl; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import static org.labkey.studies.StudiesSchema.TABLE_ANCHOR_EVENTS; @@ -50,6 +54,7 @@ public class StudiesUserSchema extends SimpleUserSchema { private static final Logger _log = LogHelper.getLogger(StudiesUserSchema.class, "Messages related to Studies Service"); private static final String TABLE_EVENT_TYPES = "studyEventTypes"; + public static final String TABLE_ASSIGNMENT_BY_STUDY = "assignmentByStudy"; public StudiesUserSchema(User user, Container container, DbSchema dbschema) { @@ -63,6 +68,11 @@ public Set getTableNames() available.add(TABLE_EVENT_TYPES); available.addAll(getPropertySetNames().keySet()); + if (StudiesServiceImpl.get().hasAssignmentDataset(getContainer())) + { + available.add(TABLE_ASSIGNMENT_BY_STUDY); + } + return Collections.unmodifiableSet(available); } @@ -160,6 +170,10 @@ else if (TABLE_EVENT_TYPES.equalsIgnoreCase(name)) { return createEventTypesTable(getContainer()); } + else if (TABLE_ASSIGNMENT_BY_STUDY.equalsIgnoreCase(name)) + { + return createAssignmentByStudyTable(getContainer()); + } //try to find it in propertySets Map> nameMap = getPropertySetNames(); @@ -241,6 +255,76 @@ private LookupSetTable createForPropertySet(StudiesUserSchema us, ContainerFilte return ret.init(); } + private TableInfo createAssignmentByStudyTable(Container c) + { + if (!StudiesServiceImpl.get().hasAssignmentDataset(c)) + { + return null; + } + + final String subjectSelectName = Objects.requireNonNull(StudyService.get()).getSubjectColumnName(getTargetContainer()); + QueryDefinition qd = createQueryDef(TABLE_ASSIGNMENT_BY_STUDY); + qd.setSql(getAssignmentPivotSql(subjectSelectName)); + + List errors = new ArrayList<>(); + TableInfo ti = qd.getTable(errors, true); + if (!errors.isEmpty()){ + _log.error("Problem creating: " + TABLE_ASSIGNMENT_BY_STUDY); + for (QueryException e : errors) + { + _log.error(e.getMessage(), e); + } + } + + if (ti != null) + { + MutableColumnInfo col = (MutableColumnInfo) ti.getColumn(subjectSelectName); + col.setKeyField(true); + col.setHidden(true); + + ((MutableColumnInfo)ti.getColumn("lastStartDate")).setLabel("Most Recent Assignment Date"); + if (ti instanceof AbstractTableInfo ati) + { + ati.setTitle("Assignment By Study"); + } + } + + return ti; + } + + private String getAssignmentPivotSql(final String subjectSelectName) + { + return "SELECT\n" + + "p." + subjectSelectName + ",\n" + + "p.study,\n" + + "max(p.date) as lastStartDate\n" + + "FROM \"" + getTargetContainer().getPath() + "\".study.assignment p\n" + + "GROUP BY p." + subjectSelectName + ", p.study\n" + + "PIVOT lastStartDate by study IN (select distinct studyName from studies.studies)"; + } + + private QueryDefinition createQueryDef(String queryName) + { + if (!getContainer().isWorkbook()) + { + return QueryService.get().createQueryDef(getUser(), getContainer(), this, queryName); + } + + // The rationale is that if we are querying from a workbook, preferentially translate to the parent US + // However, there are situations like workbook-scoped lists, where that query might not exist on the parent + UserSchema parentUserSchema = QueryService.get().getUserSchema(getUser(), getContainer().getParent(), getSchemaPath()); + assert parentUserSchema != null; + + if (parentUserSchema.getTableNames().contains(queryName)) + { + return QueryService.get().createQueryDef(parentUserSchema.getUser(), parentUserSchema.getContainer(), parentUserSchema, queryName); + } + else + { + return QueryService.get().createQueryDef(getUser(), getContainer(), this, queryName); + } + } + private TableInfo createEventTypesTable(Container container) { StringBuilder sql = new StringBuilder("SELECT * FROM ("); @@ -283,4 +367,6 @@ private TableInfo createEventTypesTable(Container container) return ti; } + + } From be332df576f798df016ea3049061765ca9fbaad6 Mon Sep 17 00:00:00 2001 From: bbimber Date: Fri, 16 Jan 2026 16:32:59 -0800 Subject: [PATCH 04/26] Use query parameters in alignment_summary queries (#371) * Use query parameters in alignment_summary queries --- .../alignment_summary_by_lineage.sql | 32 +- ...nment_summary_by_lineage_pivoted.query.xml | 9 - .../alignment_summary_by_lineage_pivoted.sql | 9 - .../alignment_summary_grouped.sql | 30 +- .../resources/schemas/sequenceanalysis.xml | 11 +- .../resources/views/sbtToGeneTable.html | 454 ------------------ .../resources/views/sbtToGeneTable.view.xml | 6 - .../sequenceanalysisButtons.js | 11 +- .../SequenceAnalysisModule.java | 3 - .../analysis/SbtGeneCountHandler.java | 118 ----- .../analysis/SequenceBasedTypingAnalysis.java | 16 +- 11 files changed, 37 insertions(+), 662 deletions(-) delete mode 100644 SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage_pivoted.query.xml delete mode 100644 SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage_pivoted.sql delete mode 100644 SequenceAnalysis/resources/views/sbtToGeneTable.html delete mode 100644 SequenceAnalysis/resources/views/sbtToGeneTable.view.xml delete mode 100644 SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/SbtGeneCountHandler.java diff --git a/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage.sql b/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage.sql index a89cd1f50..7ab8394a5 100644 --- a/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage.sql +++ b/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage.sql @@ -1,20 +1,7 @@ -/* - * Copyright (c) 2012 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +PARAMETERS(AnalysisId INTEGER) + select - (CAST(a.analysis_id as varchar) || '<>' || a.lineages) as key, + (CAST(AnalysisId as varchar) || '<>' || a.lineages) as key, a.analysis_id, a.lineages, max(a.totalLineages) as totalLineages, @@ -25,13 +12,13 @@ select round(100 * (cast(sum(a.total) as float) / cast(max(a.total_reads) as float)), 2) as percent, group_concat(distinct a.haplotypesWithAllele) as haplotypesWithAllele, - CAST((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id AND s.rowid IN ( - SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.ref_nt_id.locus = a.loci and asj.status = true + CAST((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId AND s.rowid IN ( + SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.analysis_id = AnalysisId AND asj.ref_nt_id.locus = a.loci and asj.status = true ) ) as integer) as total_reads_from_locus, - round(100 * (cast(sum(a.total) as float) / cast((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id AND s.rowid IN ( - SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.ref_nt_id.locus = a.loci and asj.status = true + round(100 * (cast(sum(a.total) as float) / cast((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId AND s.rowid IN ( + SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.analysis_id = AnalysisId AND asj.ref_nt_id.locus = a.loci and asj.status = true ) ) as float)), 2) as percent_from_locus, group_concat(distinct a.rowid, ',') as rowids @@ -47,15 +34,16 @@ FROM ( group_concat(distinct coalesce(j.ref_nt_id.locus, j.ref_nt_id.name), chr(10)) as loci, total, - cast((select sum(total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id) as integer) as total_reads, + cast((select sum(total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId) as integer) as total_reads, group_concat(distinct hs.haplotype, chr(10)) as haplotypesWithAllele from sequenceanalysis.alignment_summary a - join sequenceanalysis.alignment_summary_junction j ON (j.alignment_id = a.rowid and j.status = true) + join sequenceanalysis.alignment_summary_junction j ON (j.analysis_id = AnalysisId AND j.alignment_id = a.rowid and j.status = true) left join sequenceanalysis.haplotype_sequences hs ON (( (hs.name = j.ref_nt_id.lineage AND hs.type = 'Lineage') OR (hs.name = j.ref_nt_id.name AND hs.type = 'Allele') ) AND hs.haplotype.datedisabled IS NULL) + WHERE a.analysis_id = AnalysisId group by a.analysis_id, a.rowid, a.total ) a diff --git a/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage_pivoted.query.xml b/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage_pivoted.query.xml deleted file mode 100644 index 2876baf89..000000000 --- a/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage_pivoted.query.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - Alignment Summary By Lineage -
-
-
-
diff --git a/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage_pivoted.sql b/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage_pivoted.sql deleted file mode 100644 index 13bb7d346..000000000 --- a/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_by_lineage_pivoted.sql +++ /dev/null @@ -1,9 +0,0 @@ -SELECT - a.analysis_id, - a.lineages, - sum(a.percent) as percent - -FROM sequenceanalysis.alignment_summary_by_lineage a - -GROUP BY a.analysis_id, a.lineages -PIVOT percent BY lineages \ No newline at end of file diff --git a/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_grouped.sql b/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_grouped.sql index 5bfff44a0..ee61c695e 100644 --- a/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_grouped.sql +++ b/SequenceAnalysis/resources/queries/sequenceanalysis/alignment_summary_grouped.sql @@ -1,18 +1,5 @@ -/* - * Copyright (c) 2012 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +PARAMETERS(AnalysisId INTEGER) + select a.analysis_id, a.alleles, @@ -36,13 +23,13 @@ select group_concat(a.rowid, ',') as rowids, group_concat(distinct a.haplotypesWithAllele) as haplotypesWithAllele, - CAST((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id AND s.rowid IN ( - SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.ref_nt_id.locus = a.loci and asj.status = true + CAST((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId AND s.rowid IN ( + SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.analysis_id = AnalysisId AND asj.ref_nt_id.locus = a.loci and asj.status = true ) ) as INTEGER) as total_reads_from_locus, - round(100 * (cast(sum(a.total) as float) / CASE WHEN count(a.lineages) = 0 THEN max(a.total_reads) ELSE cast((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id AND s.rowid IN ( - SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.ref_nt_id.locus = a.loci and asj.status = true + round(100 * (cast(sum(a.total) as float) / CASE WHEN count(a.lineages) = 0 THEN max(a.total_reads) ELSE cast((select sum(s.total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId AND s.rowid IN ( + SELECT distinct asj.alignment_id from sequenceanalysis.alignment_summary_junction asj WHERE asj.analysis_id = AnalysisId AND asj.ref_nt_id.locus = a.loci and asj.status = true ) ) as float) END), 2) as percent_from_locus, max(lastModified) as lastModified, @@ -67,14 +54,15 @@ FROM ( total_forward, total_reverse, valid_pairs, - (select sum(total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = a.analysis_id) as total_reads, + (select sum(total) as total FROM sequenceanalysis.alignment_summary s WHERE s.analysis_id = AnalysisId) as total_reads, max(j.modified) as lastModified from sequenceanalysis.alignment_summary a - left join sequenceanalysis.alignment_summary_junction j ON (j.alignment_id = a.rowid and j.status = true) + left join sequenceanalysis.alignment_summary_junction j ON (j.analysis_id = AnalysisId AND j.alignment_id = a.rowid and j.status = true) left join sequenceanalysis.haplotype_sequences hs ON (( (hs.name = j.ref_nt_id.lineage AND hs.type = 'Lineage') OR (hs.name = j.ref_nt_id.name AND hs.type = 'Allele') ) AND hs.haplotype.datedisabled IS NULL) + WHERE a.analysis_id = AnalysisId group by a.analysis_id, a.rowid, a.total, total_forward, total_reverse, valid_pairs ) a diff --git a/SequenceAnalysis/resources/schemas/sequenceanalysis.xml b/SequenceAnalysis/resources/schemas/sequenceanalysis.xml index 5e420b38c..322de8124 100644 --- a/SequenceAnalysis/resources/schemas/sequenceanalysis.xml +++ b/SequenceAnalysis/resources/schemas/sequenceanalysis.xml @@ -523,17 +523,8 @@ SequenceAnalysis.window.RunExportWindow.downloadFilesForAnalysis(dataRegionName); - - SequenceAnalysis.Buttons.viewQuery(dataRegionName, {queryName: 'alignment_summary_grouped'}) - - - SequenceAnalysis.Buttons.viewQuery(dataRegionName, {queryName: 'alignment_summary_by_lineage'}) - - - SequenceAnalysis.Buttons.viewAlignmentsPivoted(dataRegionName) - - SequenceAnalysis.Buttons.viewQuery(dataRegionName, {queryName: 'haplotypeMatches'}) + SequenceAnalysis.Buttons.viewMatchingHaplotypes(dataRegionName) SequenceAnalysis.Buttons.viewQuery(dataRegionName, {queryName: 'sequence_coverage'}) diff --git a/SequenceAnalysis/resources/views/sbtToGeneTable.html b/SequenceAnalysis/resources/views/sbtToGeneTable.html deleted file mode 100644 index f506f9472..000000000 --- a/SequenceAnalysis/resources/views/sbtToGeneTable.html +++ /dev/null @@ -1,454 +0,0 @@ - \ No newline at end of file diff --git a/SequenceAnalysis/resources/views/sbtToGeneTable.view.xml b/SequenceAnalysis/resources/views/sbtToGeneTable.view.xml deleted file mode 100644 index e95eaf46b..000000000 --- a/SequenceAnalysis/resources/views/sbtToGeneTable.view.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/SequenceAnalysis/resources/web/SequenceAnalysis/sequenceanalysisButtons.js b/SequenceAnalysis/resources/web/SequenceAnalysis/sequenceanalysisButtons.js index e2cd084f9..df68e755c 100644 --- a/SequenceAnalysis/resources/web/SequenceAnalysis/sequenceanalysisButtons.js +++ b/SequenceAnalysis/resources/web/SequenceAnalysis/sequenceanalysisButtons.js @@ -163,7 +163,7 @@ SequenceAnalysis.Buttons = new function(){ ); }, - viewAlignmentsPivoted: function(dataRegionName){ + viewMatchingHaplotypes: function(dataRegionName){ var dataRegion = LABKEY.DataRegions[dataRegionName]; var checked = dataRegion.getChecked(); if (!checked.length){ @@ -171,11 +171,16 @@ SequenceAnalysis.Buttons = new function(){ return; } + if (checked.length !== 1) { + alert('Only one row at a time can be selected'); + return; + } + window.location = LABKEY.ActionURL.buildURL( 'sequenceanalysis', - 'lineagePivot', + 'haplotypeMatches', dataRegion.containerPath, - {analysisIds: checked.join(';')} + {'query.param.AnalysisId': checked[0]} ); }, diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java index 3fc642130..e6ddc71b3 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java @@ -57,7 +57,6 @@ import org.labkey.sequenceanalysis.analysis.PrintReadBackedHaplotypesHandler; import org.labkey.sequenceanalysis.analysis.RecalculateSequenceMetricsHandler; import org.labkey.sequenceanalysis.analysis.RnaSeqcHandler; -import org.labkey.sequenceanalysis.analysis.SbtGeneCountHandler; import org.labkey.sequenceanalysis.analysis.UnmappedSequenceBasedGenotypeHandler; import org.labkey.sequenceanalysis.analysis.UpdateReadsetFilesHandler; import org.labkey.sequenceanalysis.button.AddSraRunButton; @@ -189,7 +188,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.LinkedList; import java.util.Set; @@ -389,7 +387,6 @@ public static void registerPipelineSteps() SequenceAnalysisService.get().registerFileHandler(new RnaSeqcHandler()); SequenceAnalysisService.get().registerFileHandler(new CombineStarGeneCountsHandler()); SequenceAnalysisService.get().registerFileHandler(new CombineSubreadGeneCountsHandler()); - SequenceAnalysisService.get().registerFileHandler(new SbtGeneCountHandler()); SequenceAnalysisService.get().registerFileHandler(new ProcessVariantsHandler()); SequenceAnalysisService.get().registerFileHandler(new UnmappedReadExportHandler()); SequenceAnalysisService.get().registerFileHandler(new MergeVcfsAndGenotypesHandler()); diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/SbtGeneCountHandler.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/SbtGeneCountHandler.java deleted file mode 100644 index 0ebe5e999..000000000 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/SbtGeneCountHandler.java +++ /dev/null @@ -1,118 +0,0 @@ -package org.labkey.sequenceanalysis.analysis; - -import org.json.JSONObject; -import org.labkey.api.data.Container; -import org.labkey.api.module.Module; -import org.labkey.api.module.ModuleLoader; -import org.labkey.api.pipeline.PipelineJob; -import org.labkey.api.pipeline.PipelineJobException; -import org.labkey.api.pipeline.RecordedAction; -import org.labkey.api.query.DetailsURL; -import org.labkey.api.security.User; -import org.labkey.api.sequenceanalysis.SequenceOutputFile; -import org.labkey.api.sequenceanalysis.pipeline.SequenceAnalysisJobSupport; -import org.labkey.api.sequenceanalysis.pipeline.SequenceOutputHandler; -import org.labkey.api.util.FileType; -import org.labkey.api.view.ActionURL; -import org.labkey.sequenceanalysis.SequenceAnalysisModule; - -import java.io.File; -import java.util.LinkedHashSet; -import java.util.List; - -public class SbtGeneCountHandler implements SequenceOutputHandler -{ - private final FileType _txtType = new FileType(List.of(".txt"), ".txt", false, FileType.gzSupportLevel.NO_GZ); - - public SbtGeneCountHandler() - { - - } - - @Override - public String getName() - { - return "Append SBT To Gene Counts"; - } - - @Override - public String getDescription() - { - return "This will gather SBT data associated with the readsets used to make this combined gene table, and output them as a table suitable to append or analyze in concert with this table."; - } - - @Override - public String getButtonJSHandler() - { - return null; - } - - @Override - public ActionURL getButtonSuccessUrl(Container c, User u, List outputFileIds) - { - return DetailsURL.fromString("/sequenceanalysis/sbtToGeneTable.view?outputFileId=" + outputFileIds.iterator().next(), c).getActionURL(); - } - - @Override - public boolean useWorkbooks() - { - return true; - } - - @Override - public Module getOwningModule() - { - return ModuleLoader.getInstance().getModule(SequenceAnalysisModule.class); - } - - @Override - public LinkedHashSet getClientDependencies() - { - return null; - } - - @Override - public boolean canProcess(SequenceOutputFile f) - { - return f.getCategory() != null && f.getCategory().startsWith("Gene Count Table") && (_txtType.isType(f.getFile())); - } - - @Override - public boolean doRunRemote() - { - return false; - } - - @Override - public boolean doRunLocal() - { - return false; - } - - @Override - public SequenceOutputProcessor getProcessor() - { - return new Processor(); - } - - @Override - public boolean doSplitJobs() - { - return false; - } - - private static class Processor implements SequenceOutputProcessor - { - @Override - public void processFilesOnWebserver(PipelineJob job, SequenceAnalysisJobSupport support, List inputFiles, JSONObject params, File outputDir, List actions, List outputsToCreate) throws UnsupportedOperationException, PipelineJobException - { - - } - - @Override - public void processFilesRemote(List inputFiles, JobContext ctx) throws UnsupportedOperationException, PipelineJobException - { - - } - } -} diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/analysis/SequenceBasedTypingAnalysis.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/analysis/SequenceBasedTypingAnalysis.java index c3d9343d7..1f7847b5f 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/analysis/SequenceBasedTypingAnalysis.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/analysis/SequenceBasedTypingAnalysis.java @@ -177,7 +177,7 @@ public static void prepareLineageMapFiles(SequenceAnalysisJobSupport support, Lo continue; } - File lineageMapFile = new File(sourceDirectory, genome.getGenomeId() + "_lineageMap.txt"); + File lineageMapFile = FileUtil.appendName(sourceDirectory, genome.getGenomeId() + "_lineageMap.txt"); try (final CSVWriter writer = new CSVWriter(PrintWriters.getPrintWriter(lineageMapFile), '\t', CSVWriter.NO_QUOTE_CHARACTER)) { log.info("writing lineage map file"); @@ -227,7 +227,7 @@ public Output performAnalysisPerSampleLocal(AnalysisModel model, File inputBam, throw new PipelineJobException("Genome not found: " + model.getLibraryId()); } - File lineageMapFile = new File(getPipelineCtx().getSourceDirectory(), referenceGenome.getGenomeId() + "_lineageMap.txt"); + File lineageMapFile = FileUtil.appendName(getPipelineCtx().getSourceDirectory(), referenceGenome.getGenomeId() + "_lineageMap.txt"); if (lineageMapFile.exists()) { getPipelineCtx().getLogger().debug("deleting lineage map file: " + lineageMapFile.getName()); @@ -264,8 +264,8 @@ public Output performAnalysisPerSampleRemote(Readset rs, File inputBam, Referenc BamIterator bi = new BamIterator(inputBam, referenceGenome.getWorkingFastaFile(), getPipelineCtx().getLogger()); List aggregators = new ArrayList<>(); - File workDir = new File(getPipelineCtx().getSourceDirectory(), FileUtil.getBaseName(inputBam)); - File sbtOutputLog = new File(workDir, FileUtil.getBaseName(inputBam) + ".sbt.txt.gz"); + File workDir = FileUtil.appendName(getPipelineCtx().getSourceDirectory(), FileUtil.getBaseName(inputBam)); + File sbtOutputLog = FileUtil.appendName(workDir, FileUtil.getBaseName(inputBam) + ".sbt.txt.gz"); SequenceBasedTypingAlignmentAggregator agg = new SequenceBasedTypingAlignmentAggregator(getPipelineCtx().getLogger(), referenceGenome.getWorkingFastaFile(), avgBaseQualityAggregator, toolParams); if (getProvider().getParameterByName("writeLog").extractValue(getPipelineCtx().getJob(), getProvider(), getStepIdx(), Boolean.class, false)) @@ -277,7 +277,7 @@ public Output performAnalysisPerSampleRemote(Readset rs, File inputBam, Referenc agg.setOutputLog(sbtOutputLog); } - File lineageMapFile = new File(getPipelineCtx().getSourceDirectory(), referenceGenome.getGenomeId() + "_lineageMap.txt"); + File lineageMapFile = FileUtil.appendName(getPipelineCtx().getSourceDirectory(), referenceGenome.getGenomeId() + "_lineageMap.txt"); if (lineageMapFile.exists()) { getPipelineCtx().getLogger().debug("using lineage map: " + lineageMapFile.getName()); @@ -371,7 +371,7 @@ public Output performAnalysisPerSampleRemote(Readset rs, File inputBam, Referenc protected File getSBTSummaryFile(File outputDir, File bam) { - return new File(outputDir, FileUtil.getBaseName(bam) + ".sbt_hits.txt.gz"); + return FileUtil.appendName(outputDir, FileUtil.getBaseName(bam) + ".sbt_hits.txt.gz"); } public static class AlignmentGroupCompare @@ -383,7 +383,9 @@ public AlignmentGroupCompare(final long analysisId, Container c, User u) { this.analysisId = analysisId; - new TableSelector(QueryService.get().getUserSchema(u, c, "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("analysis_id", "alleles", "lineages", "totalLineages", "total_reads", "total_forward", "total_reverse", "valid_pairs", "rowids"), new SimpleFilter(FieldKey.fromString("analysis_id"), analysisId), null).forEachResults(rs -> { + TableSelector ts = new TableSelector(QueryService.get().getUserSchema(u, c, "sequenceanalysis").getTable("alignment_summary_grouped"), PageFlowUtil.set("analysis_id", "alleles", "lineages", "totalLineages", "total_reads", "total_forward", "total_reverse", "valid_pairs", "rowids"), new SimpleFilter(FieldKey.fromString("analysis_id"), analysisId), null); + ts.setNamedParameters(Map.of("AnalysisId", this.analysisId)); + ts.forEachResults(rs -> { if (rs.getString(FieldKey.fromString("alleles")) == null) { return; From 8b1047da9e4e032743ca27980adfdd8fba751531 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Jan 2026 16:33:56 -0800 Subject: [PATCH 05/26] Bump react-router (#369) Bumps the npm_and_yarn group with 1 update in the /jbrowse directory: [react-router](https://github.com/remix-run/react-router/tree/HEAD/packages/react-router). Updates `react-router` from 6.30.1 to 6.30.3 - [Release notes](https://github.com/remix-run/react-router/releases) - [Changelog](https://github.com/remix-run/react-router/blob/main/CHANGELOG.md) - [Commits](https://github.com/remix-run/react-router/commits/react-router@6.30.3/packages/react-router) --- updated-dependencies: - dependency-name: react-router dependency-version: 6.30.3 dependency-type: indirect dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- jbrowse/package-lock.json | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/jbrowse/package-lock.json b/jbrowse/package-lock.json index b50edc098..8ef817b6b 100644 --- a/jbrowse/package-lock.json +++ b/jbrowse/package-lock.json @@ -3928,9 +3928,10 @@ } }, "node_modules/@remix-run/router": { - "version": "1.23.0", - "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.23.0.tgz", - "integrity": "sha512-O3rHJzAQKamUz1fvE0Qaw0xSFqsA/yafi2iqeE0pvdFtCO1viYx8QL6f3Ln/aCCTLxs68SLf0KPM9eSeM8yBnA==", + "version": "1.23.2", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.23.2.tgz", + "integrity": "sha512-Ic6m2U/rMjTkhERIa/0ZtXJP17QUi2CbWE7cqx4J58M8aA3QTfW+2UlQ4psvTX9IO1RfNVhK3pcpdjej7L+t2w==", + "license": "MIT", "engines": { "node": ">=14.0.0" } @@ -9839,11 +9840,12 @@ } }, "node_modules/react-router": { - "version": "6.30.1", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.30.1.tgz", - "integrity": "sha512-X1m21aEmxGXqENEPG3T6u0Th7g0aS4ZmoNynhbs+Cn+q+QGTLt+d5IQ2bHAXKzKcxGJjxACpVbnYQSCRcfxHlQ==", + "version": "6.30.3", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.30.3.tgz", + "integrity": "sha512-XRnlbKMTmktBkjCLE8/XcZFlnHvr2Ltdr1eJX4idL55/9BbORzyZEaIkBFDhFGCEWBBItsVrDxwx3gnisMitdw==", + "license": "MIT", "dependencies": { - "@remix-run/router": "1.23.0" + "@remix-run/router": "1.23.2" }, "engines": { "node": ">=14.0.0" @@ -9853,12 +9855,13 @@ } }, "node_modules/react-router-dom": { - "version": "6.30.1", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.1.tgz", - "integrity": "sha512-llKsgOkZdbPU1Eg3zK8lCn+sjD9wMRZZPuzmdWWX5SUs8OFkN5HnFVC0u5KMeMaC9aoancFI/KoLuKPqN+hxHw==", + "version": "6.30.3", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.3.tgz", + "integrity": "sha512-pxPcv1AczD4vso7G4Z3TKcvlxK7g7TNt3/FNGMhfqyntocvYKj+GCatfigGDjbLozC4baguJ0ReCigoDJXb0ag==", + "license": "MIT", "dependencies": { - "@remix-run/router": "1.23.0", - "react-router": "6.30.1" + "@remix-run/router": "1.23.2", + "react-router": "6.30.3" }, "engines": { "node": ">=14.0.0" From 8dad8955f5844bd599f407691e7498160dbbf6d2 Mon Sep 17 00:00:00 2001 From: bbimber Date: Sun, 18 Jan 2026 07:42:04 -0800 Subject: [PATCH 06/26] Set baseUrl in PredictTcellActivation.R --- singlecell/resources/chunks/PredictTcellActivation.R | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/singlecell/resources/chunks/PredictTcellActivation.R b/singlecell/resources/chunks/PredictTcellActivation.R index 41bbb6fc5..5efc2ab7b 100644 --- a/singlecell/resources/chunks/PredictTcellActivation.R +++ b/singlecell/resources/chunks/PredictTcellActivation.R @@ -1,3 +1,12 @@ +netRc <- paste0(Sys.getenv('USER_HOME'), '/.netrc') +if (!file.exists(netRc)) { + print(list.files(Sys.getenv('USER_HOME'))) + stop(paste0('Unable to find file: ', netRc)) +} + +invisible(Rlabkey::labkey.setCurlOptions(NETRC_FILE = netRc)) +Rdiscvr::SetLabKeyDefaults(baseUrl = serverBaseUrl, defaultFolder = defaultLabKeyFolder) + for (datasetId in names(seuratObjects)) { printName(datasetId) seuratObj <- readSeuratRDS(seuratObjects[[datasetId]]) From 509f6bb593ae9307833dd00c1fbd17aca3dab4d3 Mon Sep 17 00:00:00 2001 From: bbimber Date: Tue, 20 Jan 2026 09:34:52 -0800 Subject: [PATCH 07/26] Support --skip-tso-trimming nimble flag --- .../singlecell/run/NimbleAlignmentStep.java | 5 +- .../labkey/singlecell/run/NimbleAnalysis.java | 2 +- .../run/NimbleBulkAlignmentStep.java | 2 +- .../labkey/singlecell/run/NimbleHelper.java | 71 ++++++------------- 4 files changed, 29 insertions(+), 51 deletions(-) diff --git a/singlecell/src/org/labkey/singlecell/run/NimbleAlignmentStep.java b/singlecell/src/org/labkey/singlecell/run/NimbleAlignmentStep.java index a06484d2d..3126941b2 100644 --- a/singlecell/src/org/labkey/singlecell/run/NimbleAlignmentStep.java +++ b/singlecell/src/org/labkey/singlecell/run/NimbleAlignmentStep.java @@ -89,19 +89,22 @@ public AlignmentOutput performAlignment(Readset rs, List inputFastqs1, @Nu File loupeFile = getCachedLoupeFile(rs, throwIfNotFound); File localBam; + boolean skipTsoTrimming; if (loupeFile == null) { localBam = performCellRangerAlignment(output, rs, inputFastqs1, inputFastqs2, outputDirectory, referenceGenome, basename, readGroupId, platformUnit); + skipTsoTrimming = false; } else { localBam = createNimbleBam(output, rs, inputFastqs1, inputFastqs2); + skipTsoTrimming = true; } // Now run nimble itself: NimbleHelper helper = new NimbleHelper(getPipelineCtx(), getProvider(), getStepIdx()); - helper.doNimbleAlign(localBam, output, rs, basename); + helper.doNimbleAlign(localBam, output, rs, basename, skipTsoTrimming); output.setBAM(localBam); return output; diff --git a/singlecell/src/org/labkey/singlecell/run/NimbleAnalysis.java b/singlecell/src/org/labkey/singlecell/run/NimbleAnalysis.java index b24d546b2..f3e00aeff 100644 --- a/singlecell/src/org/labkey/singlecell/run/NimbleAnalysis.java +++ b/singlecell/src/org/labkey/singlecell/run/NimbleAnalysis.java @@ -56,7 +56,7 @@ public Output performAnalysisPerSampleRemote(Readset rs, File inputBam, Referenc { AnalysisOutputImpl output = new AnalysisOutputImpl(); NimbleHelper helper = new NimbleHelper(getPipelineCtx(), getProvider(), getStepIdx()); - helper.doNimbleAlign(inputBam, output, rs, FileUtil.getBaseName(inputBam)); + helper.doNimbleAlign(inputBam, output, rs, FileUtil.getBaseName(inputBam), false); return output; } diff --git a/singlecell/src/org/labkey/singlecell/run/NimbleBulkAlignmentStep.java b/singlecell/src/org/labkey/singlecell/run/NimbleBulkAlignmentStep.java index 4311d3e8b..d5b9dacd2 100644 --- a/singlecell/src/org/labkey/singlecell/run/NimbleBulkAlignmentStep.java +++ b/singlecell/src/org/labkey/singlecell/run/NimbleBulkAlignmentStep.java @@ -144,7 +144,7 @@ public AlignmentOutput performAlignment(Readset rs, List inputFastqs1, @Nu // Now run nimble itself: NimbleHelper helper = new NimbleHelper(getPipelineCtx(), getProvider(), getStepIdx()); - helper.doNimbleAlign(outputBam, output, rs, basename); + helper.doNimbleAlign(outputBam, output, rs, basename, true); output.setBAM(outputBam); return output; diff --git a/singlecell/src/org/labkey/singlecell/run/NimbleHelper.java b/singlecell/src/org/labkey/singlecell/run/NimbleHelper.java index 7e30115b5..c6c702fba 100644 --- a/singlecell/src/org/labkey/singlecell/run/NimbleHelper.java +++ b/singlecell/src/org/labkey/singlecell/run/NimbleHelper.java @@ -162,7 +162,7 @@ public void prepareGenome(int genomeId) throws PipelineJobException private File getLocalIndexDir(int genomeId, boolean createIfMissing) { - File dir = new File(getPipelineCtx().getSourceDirectory(), "genome." + genomeId); + File dir = FileUtil.appendName(getPipelineCtx().getSourceDirectory(), "genome." + genomeId); if (createIfMissing && !dir.exists()) { dir.mkdir(); @@ -187,10 +187,10 @@ private File getGenomeCsv(int genomeId, boolean forceWorkDir) throws PipelineJob if (!forceWorkDir && AlignerIndexUtil.hasCachedIndex(getPipelineCtx(), "nimble", rg)) { File indexDir = AlignerIndexUtil.getIndexDir(rg, "nimble"); - return new File(indexDir, "genome." + genomeId + ".csv"); + return FileUtil.appendName(indexDir, "genome." + genomeId + ".csv"); } - return checkForLegacyGenome(new File(getLocalIndexDir(genomeId, true), "genome." + genomeId + ".csv")); + return FileUtil.appendName(getLocalIndexDir(genomeId, true), "genome." + genomeId + ".csv"); } private File getGenomeFasta(int genomeId) throws PipelineJobException @@ -209,43 +209,13 @@ private File getGenomeFasta(int genomeId, boolean forceWorkDir) throws PipelineJ if (!forceWorkDir && AlignerIndexUtil.hasCachedIndex(getPipelineCtx(), "nimble", rg)) { File indexDir = AlignerIndexUtil.getIndexDir(rg, "nimble"); - return new File(indexDir, "genome." + genomeId + ".fasta"); + return FileUtil.appendName(indexDir, "genome." + genomeId + ".fasta"); } - return checkForLegacyGenome(new File(getLocalIndexDir(genomeId, true), "genome." + genomeId + ".fasta")); + return FileUtil.appendName(getLocalIndexDir(genomeId, true), "genome." + genomeId + ".fasta"); } - // TODO: This should ultimately be removed: - private File checkForLegacyGenome(File fileNewLocation) throws PipelineJobException - { - if (fileNewLocation.exists()) - { - return fileNewLocation; - } - - File oldLocation = new File(fileNewLocation.getParentFile().getParentFile(), fileNewLocation.getName()); - if (oldLocation.exists()) - { - getPipelineCtx().getLogger().debug("Genome file found in old location, moving: " + oldLocation.getPath()); - if (!fileNewLocation.getParentFile().exists()) - { - fileNewLocation.getParentFile().mkdir(); - } - - try - { - FileUtils.moveFile(oldLocation, fileNewLocation); - } - catch (IOException e) - { - throw new PipelineJobException(e); - } - } - - return fileNewLocation; - } - - public void doNimbleAlign(File bam, PipelineStepOutput output, Readset rs, String basename) throws UnsupportedOperationException, PipelineJobException + public void doNimbleAlign(File bam, PipelineStepOutput output, Readset rs, String basename, boolean doTsoTrimming) throws UnsupportedOperationException, PipelineJobException { getPipelineCtx().getJob().setStatus(PipelineJob.TaskStatus.running, "Running Nimble Align"); List genomes = getGenomes(); @@ -274,7 +244,7 @@ public void doNimbleAlign(File bam, PipelineStepOutput output, Readset rs, Strin jsons.add(refJson); } - Map resultMap = doAlignment(genomes, jsons, bam, output); + Map resultMap = doAlignment(genomes, jsons, bam, output, doTsoTrimming); for (NimbleGenome genome : genomes) { File results = resultMap.get(genome); @@ -307,7 +277,7 @@ public void doNimbleAlign(File bam, PipelineStepOutput output, Readset rs, Strin private File prepareReference(File genomeCsv, File genomeFasta, NimbleGenome genome, PipelineStepOutput output) throws PipelineJobException { - File nimbleJson = new File(getPipelineCtx().getWorkingDirectory(), genome.genomeId + ".json"); + File nimbleJson = FileUtil.appendName(getPipelineCtx().getWorkingDirectory(), genome.genomeId + ".json"); runUsingDocker(Arrays.asList("python3", "-m", "nimble", "generate", "--opt-file", genomeFasta.getPath(), "--file", genomeCsv.getPath(), "--output_path", nimbleJson.getPath()), output, "generate-" + genome.genomeId); if (!nimbleJson.exists()) { @@ -401,7 +371,7 @@ else if ("strict".equals(alignTemplate)) } } - private Map doAlignment(List genomes, List refJsons, File bam, PipelineStepOutput output) throws PipelineJobException + private Map doAlignment(List genomes, List refJsons, File bam, PipelineStepOutput output, boolean skipTsoTrimming) throws PipelineJobException { Map resultMap = new HashMap<>(); @@ -425,7 +395,12 @@ private Map doAlignment(List genomes, List doAlignment(List genomes, List doAlignment(List genomes, List inputFastqs1, List inputFastqs2, File loupeFile) throws PipelineJobException @@ -600,7 +575,7 @@ public static File runFastqToBam(PipelineStepOutput output, PipelineContext ctx, int bamIdx = 0; while (bamIdx < inputFastqs1.size()) { - File outputBam = new File(ctx.getWorkingDirectory(), FileUtil.makeLegalName(rs.getName()) + ".unmapped." + bamIdx + ".bam"); + File outputBam = FileUtil.appendName(ctx.getWorkingDirectory(), FileUtil.makeLegalName(rs.getName()) + ".unmapped." + bamIdx + ".bam"); List args = new ArrayList<>(); args.add("python3"); @@ -641,7 +616,7 @@ public static File runFastqToBam(PipelineStepOutput output, PipelineContext ctx, File outputBam; if (outputBams.size() > 1) { - outputBam = new File(ctx.getWorkingDirectory(), FileUtil.makeLegalName(rs.getName()) + ".unmapped.bam"); + outputBam = FileUtil.appendName(ctx.getWorkingDirectory(), FileUtil.makeLegalName(rs.getName()) + ".unmapped.bam"); outputBams.forEach(output::addIntermediateFile); SamtoolsRunner st = new SamtoolsRunner(ctx.getLogger()); @@ -777,7 +752,7 @@ private String getVersion(PipelineStepOutput output) throws PipelineJobException runUsingDocker(nimbleArgs, output, null); - File outFile = new File(getPipelineCtx().getWorkingDirectory(), "nimbleVersion.txt"); + File outFile = FileUtil.appendName(getPipelineCtx().getWorkingDirectory(), "nimbleVersion.txt"); if (!outFile.exists()) { throw new PipelineJobException("Unable to find file: " + outFile.getPath()); From c060d953a55e0dd069343f7b60153920fda97342 Mon Sep 17 00:00:00 2001 From: bbimber Date: Tue, 20 Jan 2026 11:02:34 -0800 Subject: [PATCH 08/26] Remove sequenceanalysis indexes (#373) --- .../dbscripts/postgresql/SequenceAnalysis-12.330-12.331.sql | 5 +++++ .../dbscripts/sqlserver/SequenceAnalysis-12.330-12.331.sql | 5 +++++ .../org/labkey/sequenceanalysis/SequenceAnalysisModule.java | 2 +- 3 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 SequenceAnalysis/resources/schemas/dbscripts/postgresql/SequenceAnalysis-12.330-12.331.sql create mode 100644 SequenceAnalysis/resources/schemas/dbscripts/sqlserver/SequenceAnalysis-12.330-12.331.sql diff --git a/SequenceAnalysis/resources/schemas/dbscripts/postgresql/SequenceAnalysis-12.330-12.331.sql b/SequenceAnalysis/resources/schemas/dbscripts/postgresql/SequenceAnalysis-12.330-12.331.sql new file mode 100644 index 000000000..cc6722ede --- /dev/null +++ b/SequenceAnalysis/resources/schemas/dbscripts/postgresql/SequenceAnalysis-12.330-12.331.sql @@ -0,0 +1,5 @@ +-- This is a reversal of SequenceAnalysis-12.329-12.330.sql: +DROP INDEX IF EXISTS sequenceanalysis.IDX_asj_status_container_alignment_id_ref_nt_id; +DROP INDEX IF EXISTS sequenceanalysis.IDX_haplotypes_name_date; +DROP INDEX IF EXISTS sequenceanalysis.IDX_haplotype_sequences_name_haplotype_type; +DROP INDEX IF EXISTS sequenceanalysis.IDX_alignment_summary_analysis_id_rowid_container_total; \ No newline at end of file diff --git a/SequenceAnalysis/resources/schemas/dbscripts/sqlserver/SequenceAnalysis-12.330-12.331.sql b/SequenceAnalysis/resources/schemas/dbscripts/sqlserver/SequenceAnalysis-12.330-12.331.sql new file mode 100644 index 000000000..b124ca0dc --- /dev/null +++ b/SequenceAnalysis/resources/schemas/dbscripts/sqlserver/SequenceAnalysis-12.330-12.331.sql @@ -0,0 +1,5 @@ +-- This is a reversal of SequenceAnalysis-12.329-12.330.sql: +DROP INDEX IDX_asj_status_container_alignment_id_ref_nt_id ON sequenceanalysis.alignment_summary_junction; +DROP INDEX IDX_haplotypes_name_date ON sequenceanalysis.haplotypes; +DROP INDEX IDX_haplotype_sequences_name_haplotype_type ON sequenceanalysis.haplotype_sequences; +DROP INDEX IDX_alignment_summary_analysis_id_rowid_container_total ON sequenceanalysis.alignment_summary; \ No newline at end of file diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java index e6ddc71b3..a4044bcae 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java @@ -209,7 +209,7 @@ public String getName() @Override public Double getSchemaVersion() { - return 12.330; + return 12.331; } @Override From 0ec3f23a071786683c689fc25989e41d8a7375a4 Mon Sep 17 00:00:00 2001 From: bbimber Date: Tue, 20 Jan 2026 11:03:25 -0800 Subject: [PATCH 09/26] Update filename case --- .../src/org/labkey/singlecell/run/CellRangerGexCountStep.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/singlecell/src/org/labkey/singlecell/run/CellRangerGexCountStep.java b/singlecell/src/org/labkey/singlecell/run/CellRangerGexCountStep.java index 8c89f82e5..57758cf40 100644 --- a/singlecell/src/org/labkey/singlecell/run/CellRangerGexCountStep.java +++ b/singlecell/src/org/labkey/singlecell/run/CellRangerGexCountStep.java @@ -625,8 +625,8 @@ public enum Chemistry // See: https://kb.10xgenomics.com/s/article/115004506263-What-is-a-barcode-inclusion-list-formerly-barcode-whitelist // cellranger-x.y.z/lib/python/cellranger/barcodes/ FivePE_V3("Single Cell 5' PE v3", "3M-5pgex-jan-2023.txt.gz"), - FivePE_V2("Single Cell 5' PE v2", "737k-august-2016.txt"), - FivePE_V1("Single Cell 5' PE", "737k-august-2016.txt"); + FivePE_V2("Single Cell 5' PE v2", "737K-august-2016.txt"), + FivePE_V1("Single Cell 5' PE", "737K-august-2016.txt"); // Single Cell 3' v1: 737K-april-2014_rc.txt final String _label; From a847538f3612186a953342193b5a5f924d8b11c2 Mon Sep 17 00:00:00 2001 From: bbimber Date: Mon, 26 Jan 2026 22:12:27 -0800 Subject: [PATCH 10/26] Improve SnpEff check for existing index --- .../run/variant/SNPEffStep.java | 17 ++++++++--------- .../run/variant/SnpEffWrapper.java | 13 +++++++------ 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/SNPEffStep.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/SNPEffStep.java index 16903cb05..571a38379 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/SNPEffStep.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/SNPEffStep.java @@ -15,6 +15,7 @@ import org.labkey.api.sequenceanalysis.pipeline.VariantProcessingStep; import org.labkey.api.sequenceanalysis.pipeline.VariantProcessingStepOutputImpl; import org.labkey.api.sequenceanalysis.run.AbstractCommandPipelineStep; +import org.labkey.api.util.FileUtil; import org.labkey.api.util.PageFlowUtil; import org.labkey.api.writer.PrintWriters; import org.labkey.sequenceanalysis.pipeline.SequenceTaskHelper; @@ -75,18 +76,16 @@ public static File checkOrCreateIndex(SequenceAnalysisJobSupport support, Logger SnpEffWrapper wrapper = new SnpEffWrapper(log); File snpEffIndexDir = wrapper.getExpectedIndexDir(snpEffBaseDir, genome.getGenomeId(), geneFileId); - if (snpEffIndexDir.exists()) - { - log.debug("previously created index found, re-using: " + snpEffIndexDir.getPath()); - return snpEffBaseDir; - } - - File binFile = new File(snpEffIndexDir, "snpEffectPredictor.bin"); + File binFile = FileUtil.appendName(snpEffIndexDir, "snpEffectPredictor.bin"); if (!binFile.exists()) { log.debug("existing index not found, expected: " + binFile.getPath()); wrapper.buildIndex(snpEffBaseDir, genome, geneFile, geneFileId); } + else + { + log.debug("previously created index found, re-using: " + snpEffIndexDir.getPath()); + } return snpEffBaseDir; } @@ -100,7 +99,7 @@ public Output processVariants(File inputVCF, File outputDirectory, ReferenceGeno Integer geneFileId = getProvider().getParameterByName(GENE_PARAM).extractValue(getPipelineCtx().getJob(), getProvider(), getStepIdx(), Integer.class); File snpEffBaseDir = checkOrCreateIndex(getPipelineCtx().getSequenceSupport(), getPipelineCtx().getLogger(), genome, geneFileId); - File outputVcf = new File(outputDirectory, SequenceTaskHelper.getUnzippedBaseName(inputVCF) + ".snpEff.vcf.gz"); + File outputVcf = FileUtil.appendName(outputDirectory, SequenceTaskHelper.getUnzippedBaseName(inputVCF) + ".snpEff.vcf.gz"); if (outputVcf.exists()) { getPipelineCtx().getLogger().debug("deleting pre-existing output file: " + outputVcf.getPath()); @@ -110,7 +109,7 @@ public Output processVariants(File inputVCF, File outputDirectory, ReferenceGeno File intFile = null; if (intervals != null) { - intFile = new File(outputVcf.getParentFile(), "snpEffintervals.bed"); + intFile = FileUtil.appendName(outputVcf.getParentFile(), "snpEffintervals.bed"); try (PrintWriter writer = PrintWriters.getPrintWriter(intFile)) { getPipelineCtx().getLogger().debug("Adding SnpEff intervals: " + intervals.size()); diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/SnpEffWrapper.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/SnpEffWrapper.java index 4f82587d4..59364f5e7 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/SnpEffWrapper.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/SnpEffWrapper.java @@ -94,7 +94,7 @@ public String getGenomeBasename(Integer genomeId, Integer geneFileId) public File getExpectedIndexDir(File snpEffBaseDir, Integer genomeId, Integer geneFileId) { String basename = getGenomeBasename(genomeId, geneFileId); - return new File(snpEffBaseDir, basename); + return FileUtil.appendName(snpEffBaseDir, basename); } public void buildIndex(File snpEffBaseDir, ReferenceGenome genome, File genes, Integer geneFileId) throws PipelineJobException @@ -102,7 +102,8 @@ public void buildIndex(File snpEffBaseDir, ReferenceGenome genome, File genes, I getLogger().info("Building SnpEff index for: "+ genome.getGenomeId() + " / " + geneFileId); File genomeDir = getExpectedIndexDir(snpEffBaseDir, genome.getGenomeId(), geneFileId); - if (genomeDir.exists() && genomeDir.list().length > 0) + File doneFile = FileUtil.appendName(snpEffBaseDir, "build.done"); + if (doneFile.exists()) { getLogger().info("directory already exists, will not re-build"); return; @@ -144,8 +145,8 @@ else if ("gbk".equalsIgnoreCase(ext)) try { - Files.createSymbolicLink(new File(genomeDir, "sequences.fa").toPath(), genome.getSourceFastaFile().toPath()); - Files.createSymbolicLink(new File(genomeDir, "genes." + ext).toPath(), genes.toPath()); + Files.createSymbolicLink(FileUtil.appendName(genomeDir, "sequences.fa").toPath(), genome.getSourceFastaFile().toPath()); + Files.createSymbolicLink(FileUtil.appendName(genomeDir, "genes." + ext).toPath(), genes.toPath()); } catch (IOException e) { @@ -183,11 +184,11 @@ private File getJarDir() private File getSnpEffJar() { - return new File(getJarDir(), "snpEff.jar"); + return FileUtil.appendName(getJarDir(), "snpEff.jar"); } private File getSnpEffConfigFile() { - return new File(getJarDir(), "snpEff.config"); + return FileUtil.appendName(getJarDir(), "snpEff.config"); } } From 035009d9c3b6f0997dba699823ef94fe852fd206 Mon Sep 17 00:00:00 2001 From: bbimber Date: Tue, 27 Jan 2026 10:05:07 -0800 Subject: [PATCH 11/26] Support new gene component --- .../resources/chunks/CalculateGeneComponentScores.R | 2 +- .../singlecell/CalculateGeneComponentScores.java | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/singlecell/resources/chunks/CalculateGeneComponentScores.R b/singlecell/resources/chunks/CalculateGeneComponentScores.R index 5471d17f0..b17fdc469 100644 --- a/singlecell/resources/chunks/CalculateGeneComponentScores.R +++ b/singlecell/resources/chunks/CalculateGeneComponentScores.R @@ -10,7 +10,7 @@ for (datasetId in names(seuratObjects)) { for (sc in savedComponent) { logger::log_info(paste0('Processing ', datasetId, ' for ', sc)) - seuratObj <- RIRA::ScoreUsingSavedComponent(seuratObj, componentOrName = sc, fieldName = sc) + seuratObj <- RIRA::ScoreUsingSavedComponent(seuratObj, componentOrName = sc, fieldName = sc, layer = ifelse(useScaledData, yes = 'scale.data', no = 'data')) } saveData(seuratObj, datasetId) diff --git a/singlecell/src/org/labkey/singlecell/pipeline/singlecell/CalculateGeneComponentScores.java b/singlecell/src/org/labkey/singlecell/pipeline/singlecell/CalculateGeneComponentScores.java index 4bc652684..81c72051a 100644 --- a/singlecell/src/org/labkey/singlecell/pipeline/singlecell/CalculateGeneComponentScores.java +++ b/singlecell/src/org/labkey/singlecell/pipeline/singlecell/CalculateGeneComponentScores.java @@ -7,8 +7,8 @@ import org.labkey.api.singlecell.pipeline.SingleCellStep; import org.labkey.api.util.PageFlowUtil; +import java.util.Arrays; import java.util.Collection; -import java.util.Collections; public class CalculateGeneComponentScores extends AbstractRiraStep { @@ -21,16 +21,16 @@ public static class Provider extends AbstractPipelineStepProvider Date: Wed, 28 Jan 2026 09:49:27 -0800 Subject: [PATCH 12/26] Add check for null objects are Pseudobulking --- singlecell/resources/chunks/AvgExpression.R | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/singlecell/resources/chunks/AvgExpression.R b/singlecell/resources/chunks/AvgExpression.R index 5218624f6..5b972a3a4 100644 --- a/singlecell/resources/chunks/AvgExpression.R +++ b/singlecell/resources/chunks/AvgExpression.R @@ -34,6 +34,9 @@ GenerateAveragedData <- function(seuratObj, groupFields, addMetadata) { } a <- CellMembrane::PseudobulkSeurat(seuratObj, groupFields = groupFields, assayToAggregate = assayName, additionalFieldsToAggregate = additionalFieldsToAggregate, nCountRnaStratification = nCountRnaStratification) + if (is.null(a)) { + return(NULL) + } if (addMetadata) { a <- Rdiscvr::QueryAndApplyMetadataUsingCDNA(a) @@ -47,6 +50,9 @@ for (datasetId in names(seuratObjects)) { seuratObj <- readSeuratRDS(seuratObjects[[datasetId]]) seuratObj <- GenerateAveragedData(seuratObj, groupFields = groupFields, addMetadata = addMetadata) + if (is.null(seuratObj)) { + next + } saveData(seuratObj, datasetId) # Cleanup From a41120fc773cc9bf3344a774e2b5d84dc16cc399 Mon Sep 17 00:00:00 2001 From: bbimber Date: Wed, 28 Jan 2026 12:06:24 -0800 Subject: [PATCH 13/26] Enhance regex filtering in TrimmingTextArea --- .../web/SequenceAnalysis/field/TrimmingTextArea.js | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/SequenceAnalysis/resources/web/SequenceAnalysis/field/TrimmingTextArea.js b/SequenceAnalysis/resources/web/SequenceAnalysis/field/TrimmingTextArea.js index 5267c7af7..73fa305d6 100644 --- a/SequenceAnalysis/resources/web/SequenceAnalysis/field/TrimmingTextArea.js +++ b/SequenceAnalysis/resources/web/SequenceAnalysis/field/TrimmingTextArea.js @@ -15,6 +15,14 @@ Ext4.define('SequenceAnalysis.field.TrimmingTextArea', { } }); + if (this.stripCharsRe && Ext4.isString(this.stripCharsRe)) { + this.stripCharsRe = this.stripCharsRe.replaceAll('^/', '') + this.stripCharsRe = this.stripCharsRe.split(/(?=\/)\//) + if (this.stripCharsRe.length && this.stripCharsRe[0] === '') { + this.stripCharsRe.shift() + } + this.stripCharsRe = new RegExp(this.stripCharsRe[0], this.stripCharsRe.length > 1 ? this.stripCharsRe[1] : null) + } this.callParent(); }, @@ -53,6 +61,11 @@ Ext4.define('SequenceAnalysis.field.TrimmingTextArea', { if (val){ val = Ext4.String.trim(val); val = val.replace(/(\r\n|\n|\r)/gm,this.delimiter); + + if (val && this.stripCharsRe) { + val = val.replace(this.stripCharsRe, ''); + } + if (this.replaceAllWhitespace) { val = val.replace(/ /g, ''); } From b70e2b8c03b2bca01a605ad108eced4fb309c76d Mon Sep 17 00:00:00 2001 From: bbimber Date: Wed, 28 Jan 2026 14:55:12 -0800 Subject: [PATCH 14/26] Support expectedDefaultAssay in merge --- singlecell/resources/chunks/MergeSeurat.R | 2 +- .../org/labkey/singlecell/pipeline/singlecell/MergeSeurat.java | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/singlecell/resources/chunks/MergeSeurat.R b/singlecell/resources/chunks/MergeSeurat.R index 56fb0291d..f5e53b2ab 100644 --- a/singlecell/resources/chunks/MergeSeurat.R +++ b/singlecell/resources/chunks/MergeSeurat.R @@ -43,7 +43,7 @@ mergeBatchInMemory <- function(datasetIdToFilePath, saveFile) { stop('There were no passing seurat objects!') } - saveRDS(CellMembrane::MergeSeuratObjs(toMerge, projectName = projectName, doGC = doDiet, errorOnBarcodeSuffix = errorOnBarcodeSuffix), file = saveFile) + saveRDS(CellMembrane::MergeSeuratObjs(toMerge, projectName = projectName, doGC = doDiet, errorOnBarcodeSuffix = errorOnBarcodeSuffix), file = saveFile, expectedDefaultAssay = expectedDefaultAssay) filesToDelete <<- c(filesToDelete, saveFile) logger::log_info(paste0('mem used: ', R.utils::hsize(as.numeric(pryr::mem_used())))) diff --git a/singlecell/src/org/labkey/singlecell/pipeline/singlecell/MergeSeurat.java b/singlecell/src/org/labkey/singlecell/pipeline/singlecell/MergeSeurat.java index 993590b79..26fe9869b 100644 --- a/singlecell/src/org/labkey/singlecell/pipeline/singlecell/MergeSeurat.java +++ b/singlecell/src/org/labkey/singlecell/pipeline/singlecell/MergeSeurat.java @@ -36,7 +36,8 @@ public Provider() put("delimiter", ","); put("stripCharsRe", "/(^['\"]+)|(['\"]+$)/g"); }}, "RNA.orig").delimiter(","), - SeuratToolParameter.create("maxAllowableInputFileSizeMb", "Max Allowable Batch Size (MB)", "The largest allowable amount of data (in MB), measured as the size of the RDS files, to be allowed in one unit of data to merge in memory.", "ldk-integerfield", null, 200, "maxAllowableInputFileSizeMb", true, false) + SeuratToolParameter.create("maxAllowableInputFileSizeMb", "Max Allowable Batch Size (MB)", "The largest allowable amount of data (in MB), measured as the size of the RDS files, to be allowed in one unit of data to merge in memory.", "ldk-integerfield", null, 200, "maxAllowableInputFileSizeMb", true, false), + SeuratToolParameter.create("expectedDefaultAssay", "Default Assay", "The merged objects will be expected to have this assay", "textfield", null, "RNA", null, true, false) ), null, null); } From 7ab62f3a3b2d9ae61256583179817ca5de19c754 Mon Sep 17 00:00:00 2001 From: bbimber Date: Wed, 28 Jan 2026 16:03:50 -0800 Subject: [PATCH 15/26] Bugfix to expectedDefaultAssay in merge --- singlecell/resources/chunks/MergeSeurat.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/singlecell/resources/chunks/MergeSeurat.R b/singlecell/resources/chunks/MergeSeurat.R index f5e53b2ab..4fecab9cf 100644 --- a/singlecell/resources/chunks/MergeSeurat.R +++ b/singlecell/resources/chunks/MergeSeurat.R @@ -43,7 +43,7 @@ mergeBatchInMemory <- function(datasetIdToFilePath, saveFile) { stop('There were no passing seurat objects!') } - saveRDS(CellMembrane::MergeSeuratObjs(toMerge, projectName = projectName, doGC = doDiet, errorOnBarcodeSuffix = errorOnBarcodeSuffix), file = saveFile, expectedDefaultAssay = expectedDefaultAssay) + saveRDS(CellMembrane::MergeSeuratObjs(toMerge, projectName = projectName, doGC = doDiet, errorOnBarcodeSuffix = errorOnBarcodeSuffix, expectedDefaultAssay = expectedDefaultAssay), file = saveFile) filesToDelete <<- c(filesToDelete, saveFile) logger::log_info(paste0('mem used: ', R.utils::hsize(as.numeric(pryr::mem_used())))) From 93bb5df6d749c25d9dbedfdca5fa291d052ba662 Mon Sep 17 00:00:00 2001 From: bbimber Date: Sat, 31 Jan 2026 10:48:12 -0800 Subject: [PATCH 16/26] Bugfix when ADT filter specified by no ADT data exist --- singlecell/resources/chunks/CommonFilters.R | 44 ++++++++++++--------- 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/singlecell/resources/chunks/CommonFilters.R b/singlecell/resources/chunks/CommonFilters.R index 94aef6815..ff00ba013 100644 --- a/singlecell/resources/chunks/CommonFilters.R +++ b/singlecell/resources/chunks/CommonFilters.R @@ -49,27 +49,34 @@ for (datasetId in names(seuratObjects)) { } if (!is.null(saturation.ADT.min) && !is.null(seuratObj)) { - if (!'Saturation.ADT' %in% names(seuratObj@meta.data)) { - stop('Missing field: Saturation.ADT') - } - - expr <- Seurat::FetchData(object = seuratObj, vars = 'Saturation.ADT') - cells <- which(x = expr >= saturation.ADT.min) - if (length(cells) > 0){ - seuratObj <- seuratObj[, cells] - print(paste0('After saturation.ADT.min filter: ', length(colnames(x = seuratObj)))) - if (ncol(seuratObj) == 0) { - seuratObj <- NULL - next - } - } else { - print(paste0('No cells passing saturation.ADT.min filter')) - seuratObj <- NULL - next - } + if (! 'ADT' %in% names(seuratObj@assays)) { + print('No ADT assay, skipping ADT saturation filters') + } else { + if (!'Saturation.ADT' %in% names(seuratObj@meta.data)) { + stop('Missing field: Saturation.ADT') + } + + expr <- Seurat::FetchData(object = seuratObj, vars = 'Saturation.ADT') + cells <- which(x = expr >= saturation.ADT.min) + if (length(cells) > 0){ + seuratObj <- seuratObj[, cells] + print(paste0('After saturation.ADT.min filter: ', length(colnames(x = seuratObj)))) + if (ncol(seuratObj) == 0) { + seuratObj <- NULL + next + } + } else { + print(paste0('No cells passing saturation.ADT.min filter')) + seuratObj <- NULL + next + } + } } if (!is.null(saturation.ADT.max) && !is.null(seuratObj)) { + if (! 'ADT' %in% names(seuratObj@assays)) { + print('No ADT assay, skipping ADT saturation filters') + } else { if (!'Saturation.ADT' %in% names(seuratObj@meta.data)) { stop('Missing field: Saturation.ADT') } @@ -88,6 +95,7 @@ for (datasetId in names(seuratObjects)) { seuratObj <- NULL next } + } } if (dropHashingFail && !is.null(seuratObj)) { From 1d4d7a2dbd993316c3c3e0e8c0b51e66ffed547a Mon Sep 17 00:00:00 2001 From: bbimber Date: Sat, 31 Jan 2026 11:39:59 -0800 Subject: [PATCH 17/26] Bugfix to JBrowse processing with gff/gtf files that are pre-gzipped --- .../org/labkey/jbrowse/model/JsonFile.java | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java index 8a6ef1ca0..9b87bc8ce 100644 --- a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java +++ b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java @@ -179,9 +179,9 @@ public void setSequenceId(Integer sequenceId) public File getBaseDir() { - File jbrowseDir = new File(JBrowseManager.get().getBaseDir(getContainerObj(), needsProcessing()), "resources"); + File jbrowseDir = FileUtil.appendName(JBrowseManager.get().getBaseDir(getContainerObj(), needsProcessing()), "resources"); - return needsProcessing() ? new File(jbrowseDir, getObjectId()) : null; + return needsProcessing() ? FileUtil.appendName(jbrowseDir, getObjectId()) : null; } public String getLabel() @@ -823,28 +823,29 @@ public File prepareResource(User u, Logger log, boolean throwIfNotPrepared, bool throw new PipelineJobException("No ExpData for JsonFile: " + getObjectId()); } + final File processedTrackFile = getLocationOfProcessedTrack(true); + final File processedTrackDir = processedTrackFile.getParentFile(); File targetFile = expData.getFile(); if (needsGzip() && !isGzipped()) { //need to gzip and tabix index: - final File finalLocation = getLocationOfProcessedTrack(true); - if (finalLocation.exists() && !SequencePipelineService.get().hasMinLineCount(finalLocation, 1)) + if (processedTrackFile.exists() && !SequencePipelineService.get().hasMinLineCount(processedTrackFile, 1)) { log.info("File exists but is zero-length, deleting and re-processing:"); forceReprocess = true; } - File idx = new File(finalLocation.getPath() + ".tbi"); - if (finalLocation.exists() && forceReprocess && !targetFile.equals(finalLocation)) + File idx = new File(processedTrackFile.getPath() + ".tbi"); + if (processedTrackFile.exists() && forceReprocess && !targetFile.equals(processedTrackFile)) { - finalLocation.delete(); + processedTrackFile.delete(); if (idx.exists()) { idx.delete(); } } - if (!finalLocation.exists()) + if (!processedTrackFile.exists()) { if (throwIfNotPrepared) { @@ -858,10 +859,10 @@ public File prepareResource(User u, Logger log, boolean throwIfNotPrepared, bool try { - if (!targetFile.getParentFile().equals(finalLocation.getParentFile())) + if (!targetFile.getParentFile().equals(processedTrackFile.getParentFile())) { log.debug("Creating local copy of: " + targetFile.getPath()); - File local = new File(finalLocation.getParentFile(), targetFile.getName()); + File local = FileUtil.appendName(processedTrackFile.getParentFile(), targetFile.getName()); if (local.exists()) { local.delete(); @@ -872,7 +873,7 @@ public File prepareResource(User u, Logger log, boolean throwIfNotPrepared, bool } File bgZipped = SequenceAnalysisService.get().bgzipFile(targetFile, log); - FileUtils.moveFile(bgZipped, finalLocation); + FileUtils.moveFile(bgZipped, processedTrackFile); } catch (IOException e) { @@ -880,7 +881,7 @@ public File prepareResource(User u, Logger log, boolean throwIfNotPrepared, bool } } - targetFile = finalLocation; + targetFile = processedTrackFile; } // Ensure index check runs even if file was already gzipped: @@ -892,7 +893,7 @@ public File prepareResource(User u, Logger log, boolean throwIfNotPrepared, bool if (doIndex()) { - File trixDir = new File(targetFile.getParentFile(), "trix"); + File trixDir = FileUtil.appendName(processedTrackDir, "trix"); if (forceReprocess && trixDir.exists()) { try @@ -923,7 +924,7 @@ public File prepareResource(User u, Logger log, boolean throwIfNotPrepared, bool File exe = JBrowseManager.get().getJbrowseCli(); SimpleScriptWrapper wrapper = new SimpleScriptWrapper(log); - wrapper.setWorkingDir(targetFile.getParentFile()); + wrapper.setWorkingDir(processedTrackDir); wrapper.setThrowNonZeroExits(true); wrapper.execute(Arrays.asList(exe.getPath(), "text-index", "--force", "--quiet", "--attributes", StringUtils.join(attributes, ","), "--prefixSize", "5", "--file", targetFile.getPath())); @@ -1056,7 +1057,7 @@ private boolean doesLuceneIndexExist() } // NOTE: is this the best file to test? - luceneDir = new File(luceneDir, "write.lock"); + luceneDir = FileUtil.appendName(luceneDir, "write.lock"); return luceneDir.exists(); } @@ -1162,7 +1163,7 @@ public File getLocationOfProcessedTrack(boolean createDir) trackDir.mkdirs(); } - return new File(trackDir, FileUtil.makeLegalName(getSourceFileName()).replaceAll(" ", "_") + (needsGzip() && !isGzipped() ? ".gz" : "")); + return FileUtil.appendName(trackDir, FileUtil.makeLegalName(getSourceFileName()).replaceAll(" ", "_") + (needsGzip() && !isGzipped() ? ".gz" : "")); } protected String getSourceFileName() @@ -1184,8 +1185,8 @@ public File getExpectedLocationOfIndexFile(String extension, boolean throwIfNotF return null; } - File ret = new File(basedir.getParentFile(), "trix"); - ret = new File(ret, basedir.getName() + extension); + File ret = FileUtil.appendName(basedir.getParentFile(), "trix"); + ret = FileUtil.appendName(ret, basedir.getName() + extension); if (throwIfNotFound && !ret.exists()) { @@ -1434,7 +1435,7 @@ public File getExpectedLocationOfLuceneIndex(boolean throwIfNotFound) return null; } - File ret = new File(basedir.getParentFile(), "lucene"); + File ret = FileUtil.appendName(basedir.getParentFile(), "lucene"); if (throwIfNotFound && !ret.exists()) { throw new IllegalStateException("Expected search index not found: " + ret.getPath()); From 19e211ec204f4327f136a1c770d9b226f0d076ef Mon Sep 17 00:00:00 2001 From: bbimber Date: Sun, 1 Feb 2026 15:57:07 -0800 Subject: [PATCH 18/26] Fix NPE --- jbrowse/src/org/labkey/jbrowse/model/JsonFile.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java index 9b87bc8ce..1000fa9a5 100644 --- a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java +++ b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java @@ -179,8 +179,13 @@ public void setSequenceId(Integer sequenceId) public File getBaseDir() { - File jbrowseDir = FileUtil.appendName(JBrowseManager.get().getBaseDir(getContainerObj(), needsProcessing()), "resources"); + File baseDir = JBrowseManager.get().getBaseDir(getContainerObj(), needsProcessing()); + if (baseDir == null) + { + return null; + } + File jbrowseDir = FileUtil.appendName(baseDir, "resources"); return needsProcessing() ? FileUtil.appendName(jbrowseDir, getObjectId()) : null; } From 44335b5507c09372383792e5f7027a21974acd9b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Feb 2026 18:31:58 -0800 Subject: [PATCH 19/26] Bump tar in /jbrowse in the npm_and_yarn group across 1 directory (#374) Bumps the npm_and_yarn group with 1 update in the /jbrowse directory: [tar](https://github.com/isaacs/node-tar). Updates `tar` from 7.4.3 to 7.5.4 - [Release notes](https://github.com/isaacs/node-tar/releases) - [Changelog](https://github.com/isaacs/node-tar/blob/main/CHANGELOG.md) - [Commits](https://github.com/isaacs/node-tar/compare/v7.4.3...v7.5.4) --- updated-dependencies: - dependency-name: tar dependency-version: 7.5.4 dependency-type: direct:development dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- jbrowse/package-lock.json | 339 ++------------------------------------ jbrowse/package.json | 2 +- 2 files changed, 12 insertions(+), 329 deletions(-) diff --git a/jbrowse/package-lock.json b/jbrowse/package-lock.json index 8ef817b6b..4c54b33b1 100644 --- a/jbrowse/package-lock.json +++ b/jbrowse/package-lock.json @@ -47,7 +47,7 @@ "@types/react": "^18.3.0", "@types/react-dom": "^18.3.0", "rimraf": "^6.0.1", - "tar": "^7.4.3", + "tar": "^7.5.4", "typescript": "^5.1.6", "unzipper": "^0.12.3" } @@ -2242,102 +2242,6 @@ "node": "20 || >=22" } }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dev": true, - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-styles": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "dev": true - }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dev": true, - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dev": true, - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/@isaacs/fs-minipass": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", @@ -3848,16 +3752,6 @@ "url": "https://opencollective.com/parcel" } }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "optional": true, - "engines": { - "node": ">=14" - } - }, "node_modules/@pmmmwh/react-refresh-webpack-plugin": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/@pmmmwh/react-refresh-webpack-plugin/-/react-refresh-webpack-plugin-0.6.1.tgz", @@ -5086,15 +4980,6 @@ "dev": true, "license": "MIT" }, - "node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, "node_modules/braces": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", @@ -6431,11 +6316,6 @@ "readable-stream": "^2.0.2" } }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "dev": true, - "license": "MIT" - }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -6466,11 +6346,6 @@ "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.1.tgz", "integrity": "sha512-k8TVBiPkPJT9uHLdOKfFpqcfprwBFOAAXXozRubr7R7PfIuKvQlzcI4M0pALeqXN09vdaMbUdUj+pass+uULAg==" }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "dev": true, - "license": "MIT" - }, "node_modules/emojis-list": { "version": "3.0.0", "dev": true, @@ -6901,32 +6776,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/foreground-child": { - "version": "3.3.0", - "dev": true, - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/foreground-child/node_modules/signal-exit": { - "version": "4.1.0", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/fork-ts-checker-webpack-plugin": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-9.1.0.tgz", @@ -7847,14 +7696,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/is-generator-function": { "version": "1.0.10", "license": "MIT", @@ -8473,21 +8314,6 @@ "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", "integrity": "sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg==" }, - "node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/minipass": { "version": "7.1.2", "dev": true, @@ -8497,105 +8323,18 @@ } }, "node_modules/minizlib": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.1.tgz", - "integrity": "sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", "dev": true, + "license": "MIT", "dependencies": { - "minipass": "^7.0.4", - "rimraf": "^5.0.5" + "minipass": "^7.1.2" }, "engines": { "node": ">= 18" } }, - "node_modules/minizlib/node_modules/glob": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", - "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/minizlib/node_modules/jackspeak": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", - "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", - "dev": true, - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/minizlib/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true - }, - "node_modules/minizlib/node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", - "dev": true, - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/minizlib/node_modules/rimraf": { - "version": "5.0.10", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz", - "integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==", - "dev": true, - "dependencies": { - "glob": "^10.3.7" - }, - "bin": { - "rimraf": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "dev": true, - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/mobx": { "version": "6.13.1", "license": "MIT", @@ -11011,33 +10750,6 @@ "safe-buffer": "~5.2.0" } }, - "node_modules/string-width": { - "version": "4.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-ansi": { "version": "6.0.1", "dev": true, @@ -11049,18 +10761,6 @@ "node": ">=8" } }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-indent": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz", @@ -11139,16 +10839,16 @@ } }, "node_modules/tar": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", - "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.4.tgz", + "integrity": "sha512-AN04xbWGrSTDmVwlI4/GTlIIwMFk/XEv7uL8aa57zuvRy6s4hdBed+lVq2fAZ89XDa7Us3ANXcE3Tvqvja1kTA==", "dev": true, + "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", + "minizlib": "^3.1.0", "yallist": "^5.0.0" }, "engines": { @@ -12174,23 +11874,6 @@ "integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==", "dev": true }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/ws": { "version": "8.18.3", "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", diff --git a/jbrowse/package.json b/jbrowse/package.json index 402055fef..3e438c666 100644 --- a/jbrowse/package.json +++ b/jbrowse/package.json @@ -53,7 +53,7 @@ "@types/react": "^18.3.0", "@types/react-dom": "^18.3.0", "rimraf": "^6.0.1", - "tar": "^7.4.3", + "tar": "^7.5.4", "typescript": "^5.1.6", "unzipper": "^0.12.3" } From b199f32806cf44a8d4d645cb013847a7f61b74ea Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Feb 2026 18:36:14 -0800 Subject: [PATCH 20/26] Bump the npm_and_yarn group across 1 directory with 2 updates (#376) Bumps the npm_and_yarn group with 2 updates in the /jbrowse directory: [tar](https://github.com/isaacs/node-tar) and [lodash](https://github.com/lodash/lodash). Updates `tar` from 7.5.4 to 7.5.7 - [Release notes](https://github.com/isaacs/node-tar/releases) - [Changelog](https://github.com/isaacs/node-tar/blob/main/CHANGELOG.md) - [Commits](https://github.com/isaacs/node-tar/compare/v7.5.4...v7.5.7) Updates `lodash` from 4.17.21 to 4.17.23 - [Release notes](https://github.com/lodash/lodash/releases) - [Commits](https://github.com/lodash/lodash/compare/4.17.21...4.17.23) --- updated-dependencies: - dependency-name: tar dependency-version: 7.5.7 dependency-type: direct:development dependency-group: npm_and_yarn - dependency-name: lodash dependency-version: 4.17.23 dependency-type: indirect dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- jbrowse/package-lock.json | 16 ++++++++++------ jbrowse/package.json | 2 +- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/jbrowse/package-lock.json b/jbrowse/package-lock.json index 4c54b33b1..a608f1109 100644 --- a/jbrowse/package-lock.json +++ b/jbrowse/package-lock.json @@ -47,7 +47,7 @@ "@types/react": "^18.3.0", "@types/react-dom": "^18.3.0", "rimraf": "^6.0.1", - "tar": "^7.5.4", + "tar": "^7.5.7", "typescript": "^5.1.6", "unzipper": "^0.12.3" } @@ -8069,11 +8069,15 @@ } }, "node_modules/lodash": { - "version": "4.17.21", + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", "license": "MIT" }, "node_modules/lodash-es": { - "version": "4.17.21", + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz", + "integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==", "license": "MIT" }, "node_modules/lodash.debounce": { @@ -10839,9 +10843,9 @@ } }, "node_modules/tar": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.4.tgz", - "integrity": "sha512-AN04xbWGrSTDmVwlI4/GTlIIwMFk/XEv7uL8aa57zuvRy6s4hdBed+lVq2fAZ89XDa7Us3ANXcE3Tvqvja1kTA==", + "version": "7.5.7", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.7.tgz", + "integrity": "sha512-fov56fJiRuThVFXD6o6/Q354S7pnWMJIVlDBYijsTNx6jKSE4pvrDTs6lUnmGvNyfJwFQQwWy3owKz1ucIhveQ==", "dev": true, "license": "BlueOak-1.0.0", "dependencies": { diff --git a/jbrowse/package.json b/jbrowse/package.json index 3e438c666..baa3472e9 100644 --- a/jbrowse/package.json +++ b/jbrowse/package.json @@ -53,7 +53,7 @@ "@types/react": "^18.3.0", "@types/react-dom": "^18.3.0", "rimraf": "^6.0.1", - "tar": "^7.5.4", + "tar": "^7.5.7", "typescript": "^5.1.6", "unzipper": "^0.12.3" } From 0d1b5f15cc72a6fa88f0c22d063edf631c5ba303 Mon Sep 17 00:00:00 2001 From: bbimber Date: Mon, 2 Feb 2026 09:17:54 -0800 Subject: [PATCH 21/26] Store cdr3WithProductive with response data --- .../resources/chunks/IdentifyAndStoreActiveClonotypes.R | 2 +- singlecell/resources/chunks/PredictTcellActivation.R | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/singlecell/resources/chunks/IdentifyAndStoreActiveClonotypes.R b/singlecell/resources/chunks/IdentifyAndStoreActiveClonotypes.R index 3efdee1b4..d6e8c2141 100644 --- a/singlecell/resources/chunks/IdentifyAndStoreActiveClonotypes.R +++ b/singlecell/resources/chunks/IdentifyAndStoreActiveClonotypes.R @@ -11,7 +11,7 @@ for (datasetId in names(seuratObjects)) { printName(datasetId) seuratObj <- readSeuratRDS(seuratObjects[[datasetId]]) - if (! 'TRB_Segments' %in% names(seuratObj@meta.data)) { + if (! 'TRB_WithProductive' %in% names(seuratObj@meta.data)) { print('Re-running AppendTcr to add segment columns') seuratObj <- Rdiscvr::DownloadAndAppendTcrClonotypes(seuratObj, allowMissing = TRUE) } diff --git a/singlecell/resources/chunks/PredictTcellActivation.R b/singlecell/resources/chunks/PredictTcellActivation.R index 5efc2ab7b..70e660433 100644 --- a/singlecell/resources/chunks/PredictTcellActivation.R +++ b/singlecell/resources/chunks/PredictTcellActivation.R @@ -11,6 +11,11 @@ for (datasetId in names(seuratObjects)) { printName(datasetId) seuratObj <- readSeuratRDS(seuratObjects[[datasetId]]) + if (! 'TRB_WithProductive' %in% names(seuratObj@meta.data)) { + print('Re-running AppendTcr to add segment columns') + seuratObj <- Rdiscvr::DownloadAndAppendTcrClonotypes(seuratObj, allowMissing = TRUE) + } + toDrop <- grep(names(seuratObj@meta.data), pattern = "sPLS", value = TRUE) if (length(toDrop) > 0) { print(paste0('Dropping pre-existing columns: ', paste0(toDrop, collapse = ', '))) From c152ed6a3185e4cee801933ad46bf63636e52cd5 Mon Sep 17 00:00:00 2001 From: bbimber Date: Tue, 3 Feb 2026 13:51:30 -0800 Subject: [PATCH 22/26] Fix NPE (#375) * Bugfix to GTF/GFF and jbrowse --- .../org/labkey/jbrowse/model/JsonFile.java | 73 +++++++++++++++++-- 1 file changed, 66 insertions(+), 7 deletions(-) diff --git a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java index 1000fa9a5..eceec2c9a 100644 --- a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java +++ b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java @@ -42,6 +42,7 @@ import org.labkey.api.sequenceanalysis.SequenceOutputFile; import org.labkey.api.sequenceanalysis.pipeline.ReferenceGenome; import org.labkey.api.sequenceanalysis.pipeline.SequencePipelineService; +import org.labkey.api.sequenceanalysis.run.PicardWrapper; import org.labkey.api.sequenceanalysis.run.SimpleScriptWrapper; import org.labkey.api.settings.AppProps; import org.labkey.api.util.FileType; @@ -587,8 +588,7 @@ public boolean matchesTrackSelector(List toTest) public String getJsonTrackId() { - final File finalLocation = getLocationOfProcessedTrack(false); - return finalLocation == null ? null : finalLocation.getName(); + return getSourceFileName(); } private JSONObject getBamOrCramTrack(Logger log, ExpData targetFile, ReferenceGenome rg) @@ -627,6 +627,11 @@ private JSONObject getBamOrCramTrack(Logger log, ExpData targetFile, ReferenceGe {{ put("location", new JSONObject() {{ + if (!new File(targetFile.getFile() + ".bai").exists()) + { + log.error("Track lacks an index: {}, expected: {}", getObjectId(), targetFile.getFile().getPath() + ".bai"); + } + put("uri", url + ".bai"); }}); put("indexType", "BAI"); @@ -646,6 +651,11 @@ private JSONObject getBamOrCramTrack(Logger log, ExpData targetFile, ReferenceGe }}); put("craiLocation", new JSONObject() {{ + if (!new File(targetFile.getFile() + ".bai").exists()) + { + log.error("Track lacks an index: {}, expected: {}", getObjectId(), targetFile.getFile().getPath() + ".crai"); + } + put("uri", url + ".crai"); }}); put("sequenceAdapter", JBrowseSession.getBgZippedIndexedFastaAdapter(rg)); @@ -771,6 +781,11 @@ private JSONObject getTabixTrack(User u, Logger log, ExpData targetFile, Referen return null; } + if (!new File(gzipped.getPath() + ".tbi").exists()) + { + log.error("Track lacks an index: {}, expected: {}", getObjectId(), gzipped.getPath() + ".tbi"); + } + ret.put("adapter", new JSONObject(){{ put("type", adapterType); put(prefix + "GzLocation", new JSONObject(){{ @@ -790,12 +805,12 @@ private JSONObject getTabixTrack(User u, Logger log, ExpData targetFile, Referen public boolean needsProcessing() { - return (needsGzip() && !isGzipped()) || doIndex() || shouldHaveFreeTextSearch(); + return (needsGzip() && !isGzipped()) || shouldBeReSorted() || doIndex() || shouldHaveFreeTextSearch(); } public boolean shouldBeCopiedToProcessDir() { - return (needsGzip() && !isGzipped()); + return (needsGzip() && !isGzipped()) || shouldBeReSorted(); } public boolean isGzipped() @@ -828,10 +843,16 @@ public File prepareResource(User u, Logger log, boolean throwIfNotPrepared, bool throw new PipelineJobException("No ExpData for JsonFile: " + getObjectId()); } - final File processedTrackFile = getLocationOfProcessedTrack(true); - final File processedTrackDir = processedTrackFile.getParentFile(); + File processedTrackFile = getLocationOfProcessedTrack(true); + final File processedTrackDir = processedTrackFile == null ? null : processedTrackFile.getParentFile(); + if (processedTrackFile == null) + { + processedTrackFile = expData.getFile(); + log.debug("Track does not require processing or indexing, using original location: " + processedTrackFile.getPath()); + } + File targetFile = expData.getFile(); - if (needsGzip() && !isGzipped()) + if ((needsGzip() && !isGzipped()) || shouldBeReSorted()) { //need to gzip and tabix index: if (processedTrackFile.exists() && !SequencePipelineService.get().hasMinLineCount(processedTrackFile, 1)) @@ -896,8 +917,30 @@ public File prepareResource(User u, Logger log, boolean throwIfNotPrepared, bool createIndex(targetFile, log, idx, throwIfNotPrepared); } + if (TRACK_TYPES.bam.getFileType().isType(targetFile) || TRACK_TYPES.cram.getFileType().isType(targetFile)) + { + File fileIdx = SequenceAnalysisService.get().getExpectedBamOrCramIndex(targetFile); + if (!fileIdx.exists()) + { + if (throwIfNotPrepared) + { + throw new IllegalStateException("This track should have been previously indexed: " + targetFile.getName()); + } + + if (PicardWrapper.getPicardJar(false) != null) + { + SequenceAnalysisService.get().ensureBamOrCramIdx(targetFile, log, false); + } + } + } + if (doIndex()) { + if (processedTrackDir == null) + { + throw new PipelineJobException("processedTrackDir should not be null"); + } + File trixDir = FileUtil.appendName(processedTrackDir, "trix"); if (forceReprocess && trixDir.exists()) { @@ -1154,6 +1197,17 @@ else if (TRACK_TYPES.gff.getFileType().isType(finalLocation) || TRACK_TYPES.gtf. } } + private boolean shouldBeReSorted() + { + String sourceFilename = getSourceFileName(); + if (sourceFilename == null) + { + return false; + } + + return TRACK_TYPES.gff.getFileType().isType(sourceFilename) || TRACK_TYPES.gtf.getFileType().isType(sourceFilename) || TRACK_TYPES.bed.getFileType().isType(sourceFilename); + } + public File getLocationOfProcessedTrack(boolean createDir) { ExpData expData = getExpData(); @@ -1163,6 +1217,11 @@ public File getLocationOfProcessedTrack(boolean createDir) } File trackDir = getBaseDir(); + if (trackDir == null) + { + return null; + } + if (createDir && !trackDir.exists()) { trackDir.mkdirs(); From bad6efb394d288cf49f2a414bbe27f91edc3b110 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Feb 2026 14:05:07 -0800 Subject: [PATCH 23/26] Bump jspdf in /jbrowse in the npm_and_yarn group across 1 directory (#377) Bumps the npm_and_yarn group with 1 update in the /jbrowse directory: [jspdf](https://github.com/parallax/jsPDF). Updates `jspdf` from 4.0.0 to 4.1.0 - [Release notes](https://github.com/parallax/jsPDF/releases) - [Changelog](https://github.com/parallax/jsPDF/blob/master/RELEASE.md) - [Commits](https://github.com/parallax/jsPDF/compare/v4.0.0...v4.1.0) --- updated-dependencies: - dependency-name: jspdf dependency-version: 4.1.0 dependency-type: direct:production dependency-group: npm_and_yarn ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: bbimber --- jbrowse/package-lock.json | 17 +++++++++-------- jbrowse/package.json | 2 +- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/jbrowse/package-lock.json b/jbrowse/package-lock.json index a608f1109..6539df623 100644 --- a/jbrowse/package-lock.json +++ b/jbrowse/package-lock.json @@ -24,7 +24,7 @@ "child_process": "^1.0.2", "fs": "^0.0.1-security", "jquery": "^3.7.1", - "jspdf": "^4.0.0", + "jspdf": "^4.1.0", "jspdf-autotable": "^5.0.7", "node-polyfill-webpack-plugin": "4.1.0", "path-browserify": "^1.0.1", @@ -6257,9 +6257,10 @@ } }, "node_modules/dompurify": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.4.tgz", - "integrity": "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", + "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", + "license": "(MPL-2.0 OR Apache-2.0)", "optionalDependencies": { "@types/trusted-types": "^2.0.7" } @@ -7968,9 +7969,9 @@ } }, "node_modules/jspdf": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jspdf/-/jspdf-4.0.0.tgz", - "integrity": "sha512-w12U97Z6edKd2tXDn3LzTLg7C7QLJlx0BPfM3ecjK2BckUl9/81vZ+r5gK4/3KQdhAcEZhENUxRhtgYBj75MqQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/jspdf/-/jspdf-4.1.0.tgz", + "integrity": "sha512-xd1d/XRkwqnsq6FP3zH1Q+Ejqn2ULIJeDZ+FTKpaabVpZREjsJKRJwuokTNgdqOU+fl55KgbvgZ1pRTSWCP2kQ==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.28.4", @@ -7980,7 +7981,7 @@ "optionalDependencies": { "canvg": "^3.0.11", "core-js": "^3.6.0", - "dompurify": "^3.2.4", + "dompurify": "^3.3.1", "html2canvas": "^1.0.0-rc.5" } }, diff --git a/jbrowse/package.json b/jbrowse/package.json index baa3472e9..7eb561f69 100644 --- a/jbrowse/package.json +++ b/jbrowse/package.json @@ -30,7 +30,7 @@ "child_process": "^1.0.2", "fs": "^0.0.1-security", "jquery": "^3.7.1", - "jspdf": "^4.0.0", + "jspdf": "^4.1.0", "jspdf-autotable": "^5.0.7", "node-polyfill-webpack-plugin": "4.1.0", "path-browserify": "^1.0.1", From 36019721d796249c94817d16b23e94bf31c78271 Mon Sep 17 00:00:00 2001 From: bbimber Date: Thu, 5 Feb 2026 07:57:48 -0800 Subject: [PATCH 24/26] Bugfix to GLNexus --- .../sequenceanalysis/run/DockerWrapper.java | 5 +++++ .../analysis/GLNexusHandler.java | 18 ++++++++------- .../org/labkey/jbrowse/model/JsonFile.java | 22 +++++++++++++++++++ 3 files changed, 37 insertions(+), 8 deletions(-) diff --git a/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/run/DockerWrapper.java b/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/run/DockerWrapper.java index 8163b1f95..b8d56b90a 100644 --- a/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/run/DockerWrapper.java +++ b/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/run/DockerWrapper.java @@ -221,6 +221,11 @@ public int getMaxRetries() return _maxRetries; } + public void setMaxRetries(int maxRetries) + { + _maxRetries = maxRetries; + } + // NOTE: when running on a shared/cluster environment with multiple containers initializing concurrently, conflicts can result in these error codes. // As a convenience, build in auto-retry behavior if one of these occurs private final List ALLOWABLE_FAIL_CODES = new UnmodifiableList<>(Arrays.asList(125, 127)); diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/GLNexusHandler.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/GLNexusHandler.java index dea268e15..07dac7881 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/GLNexusHandler.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/GLNexusHandler.java @@ -15,7 +15,6 @@ import org.labkey.api.sequenceanalysis.SequenceAnalysisService; import org.labkey.api.sequenceanalysis.SequenceOutputFile; import org.labkey.api.sequenceanalysis.pipeline.AbstractParameterizedOutputHandler; -import org.labkey.api.sequenceanalysis.pipeline.BcftoolsRunner; import org.labkey.api.sequenceanalysis.pipeline.PipelineOutputTracker; import org.labkey.api.sequenceanalysis.pipeline.ReferenceGenome; import org.labkey.api.sequenceanalysis.pipeline.SequenceAnalysisJobSupport; @@ -25,9 +24,9 @@ import org.labkey.api.sequenceanalysis.run.AbstractCommandWrapper; import org.labkey.api.sequenceanalysis.run.DockerWrapper; import org.labkey.api.util.FileType; +import org.labkey.api.util.FileUtil; import org.labkey.api.writer.PrintWriters; import org.labkey.sequenceanalysis.SequenceAnalysisModule; -import org.labkey.sequenceanalysis.run.util.BgzipRunner; import org.labkey.sequenceanalysis.util.SequenceUtil; import java.io.File; @@ -172,7 +171,7 @@ else if (genomeIds.isEmpty()) SAMSequenceDictionary dict = SAMSequenceDictionaryExtractor.extractDictionary(rg.getSequenceDictionary().toPath()); for (SAMSequenceRecord r : dict.getSequences()) { - File contigVcf = new File(ctx.getOutputDir(), basename + "." + r.getSequenceName() + ".vcf.gz"); + File contigVcf = FileUtil.appendName(ctx.getOutputDir(), basename + "." + r.getSequenceName() + ".vcf.gz"); File contigVcfIdx = new File(contigVcf.getPath() + ".tbi"); File doneFile = new File(contigVcf.getPath() + ".done"); ctx.getFileManager().addIntermediateFile(contigVcf); @@ -241,8 +240,9 @@ public void execute(List inputGvcfs, File outputVcf, PipelineOutputTracker DockerWrapper wrapper = new DockerWrapper("ghcr.io/dnanexus-rnd/glnexus:" + binVersion, ctx.getLogger(), ctx); wrapper.setTmpDir(new File(SequencePipelineService.get().getJavaTempDir())); wrapper.setWorkingDir(ctx.getWorkingDirectory()); + wrapper.setMaxRetries(0); - File bed = new File(ctx.getWorkingDirectory(), "contig.bed"); + File bed = FileUtil.appendName(ctx.getWorkingDirectory(), "contig.bed"); tracker.addIntermediateFile(bed); try (PrintWriter bedWriter = PrintWriters.getPrintWriter(bed)) { @@ -277,12 +277,10 @@ public void execute(List inputGvcfs, File outputVcf, PipelineOutputTracker dockerArgs.add(f.getPath()); }); - File bcftools = BcftoolsRunner.getBcfToolsPath(); - File bgzip = BgzipRunner.getExe(); - dockerArgs.add(" | " + bcftools.getPath() + " view | " + bgzip.getPath() + " -c > " + outputVcf.getPath()); + dockerArgs.add(" | bcftools view | bgzip -f -c > " + outputVcf.getPath()); // Command will fail if this exists: - File dbDir = new File (ctx.getWorkingDirectory(), "GLnexus.DB"); + File dbDir = FileUtil.appendName(ctx.getWorkingDirectory(), "GLnexus.DB"); tracker.addIntermediateFile(dbDir); if (dbDir.exists()) { @@ -296,6 +294,10 @@ public void execute(List inputGvcfs, File outputVcf, PipelineOutputTracker throw new PipelineJobException(e); } } + else + { + getLogger().debug("GLnexus.DB does not exist: " + dbDir.getPath()); + } wrapper.executeWithDocker(dockerArgs, ctx.getWorkingDirectory(), tracker, inputGvcfs); diff --git a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java index eceec2c9a..f943ff996 100644 --- a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java +++ b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java @@ -1,5 +1,8 @@ package org.labkey.jbrowse.model; +import htsjdk.samtools.BAMIndexer; +import htsjdk.samtools.SamReader; +import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.util.FileExtensions; import htsjdk.tribble.bed.BEDCodec; import htsjdk.tribble.gff.Gff3Codec; @@ -931,6 +934,25 @@ public File prepareResource(User u, Logger log, boolean throwIfNotPrepared, bool { SequenceAnalysisService.get().ensureBamOrCramIdx(targetFile, log, false); } + else if (TRACK_TYPES.bam.getFileType().isType(targetFile)) + { + if (targetFile.length() < 5e6) + { + log.debug("Creating BAM index: " + targetFile.getPath()); + try (SamReader samReader = SamReaderFactory.make().open(targetFile.toPath())) + { + BAMIndexer.createIndex(samReader, fileIdx); + } + catch (IOException e) + { + throw new PipelineJobException(e); + } + } + else + { + log.debug("BAM lacks an index but is too large to auto-create: " + targetFile.length()); + } + } } } From 43f46037642e94669fc9e91a24f0c88cdca53915 Mon Sep 17 00:00:00 2001 From: bbimber Date: Thu, 5 Feb 2026 09:43:16 -0800 Subject: [PATCH 25/26] Fix to fileSource required for indexing bug --- jbrowse/src/org/labkey/jbrowse/model/JsonFile.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java index f943ff996..756e22cd6 100644 --- a/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java +++ b/jbrowse/src/org/labkey/jbrowse/model/JsonFile.java @@ -939,7 +939,7 @@ else if (TRACK_TYPES.bam.getFileType().isType(targetFile)) if (targetFile.length() < 5e6) { log.debug("Creating BAM index: " + targetFile.getPath()); - try (SamReader samReader = SamReaderFactory.make().open(targetFile.toPath())) + try (SamReader samReader = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS).open(targetFile.toPath())) { BAMIndexer.createIndex(samReader, fileIdx); } From f98c8ea7f2ca904dee127432ecfb639faa0ad18c Mon Sep 17 00:00:00 2001 From: bbimber Date: Sat, 7 Feb 2026 11:24:15 -0800 Subject: [PATCH 26/26] Migrate NT sequence files from flat dir to hashed structure (#378) * Migrate NT sequence files from flat dir to hashed structure --- .../sequenceanalysis/RefNtSequenceModel.java | 56 +++++++++------- .../SequenceAnalysis-12.331-12.332.sql | 1 + .../SequenceAnalysis-12.331-12.332.sql | 1 + .../SequenceAnalysisMaintenanceTask.java | 52 ++++++++++----- .../SequenceAnalysisModule.java | 2 +- .../SequenceAnalysisUpgradeCode.java | 65 +++++++++++++++++++ 6 files changed, 134 insertions(+), 43 deletions(-) create mode 100644 SequenceAnalysis/resources/schemas/dbscripts/postgresql/SequenceAnalysis-12.331-12.332.sql create mode 100644 SequenceAnalysis/resources/schemas/dbscripts/sqlserver/SequenceAnalysis-12.331-12.332.sql diff --git a/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/RefNtSequenceModel.java b/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/RefNtSequenceModel.java index 4d7afe5c6..71d236c70 100644 --- a/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/RefNtSequenceModel.java +++ b/SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/RefNtSequenceModel.java @@ -18,7 +18,6 @@ import htsjdk.samtools.util.StringUtil; import org.apache.commons.io.IOUtils; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.LogManager; import org.jetbrains.annotations.Nullable; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; @@ -32,8 +31,11 @@ import org.labkey.api.exp.api.ExpData; import org.labkey.api.exp.api.ExperimentService; import org.labkey.api.files.FileContentService; +import org.labkey.api.security.Crypt; import org.labkey.api.security.User; +import org.labkey.api.util.FileUtil; import org.labkey.api.util.MemTracker; +import org.labkey.api.util.logging.LogHelper; import org.labkey.api.writer.PrintWriters; import java.io.File; @@ -55,7 +57,9 @@ */ public class RefNtSequenceModel implements Serializable { - private static final Logger _log = LogManager.getLogger(RefNtSequenceModel.class); + private static final Logger _log = LogHelper.getLogger(RefNtSequenceModel.class, "Messages related to Reference NT Sequences"); + + public static String BASE_DIRNAME = ".sequences"; private int _rowid; private String _name; @@ -414,7 +418,7 @@ public byte[] getSequenceBases() public void createFileForSequence(User u, String sequence, @Nullable File outDir) throws IOException { - File output = getExpectedSequenceFile(outDir); + File output = getExpectedSequenceFile(); if (output.exists()) { output.delete(); @@ -439,9 +443,9 @@ public void createFileForSequence(User u, String sequence, @Nullable File outDir Table.update(u, ti, this, _rowid); } - private File getExpectedSequenceFile(@Nullable File outDir) throws IllegalArgumentException + public File getExpectedSequenceFile() throws IllegalArgumentException { - return new File(getSequenceDir(true, outDir), _rowid + ".txt.gz"); + return FileUtil.appendName(getHashedDir(true), _rowid + ".txt.gz"); } private Container getLabKeyContainer() @@ -455,20 +459,9 @@ private Container getLabKeyContainer() return c; } - private File getSequenceDir(boolean create, @Nullable File outDir) throws IllegalArgumentException + private File getBaseSequenceDir() throws IllegalArgumentException { Container c = getLabKeyContainer(); - File ret = outDir == null ? getReferenceSequenceDir(c) : outDir; - if (create && !ret.exists()) - { - ret.mkdirs(); - } - - return ret; - } - - private File getReferenceSequenceDir(Container c) throws IllegalArgumentException - { FileContentService fileService = FileContentService.get(); File root = fileService == null ? null : fileService.getFileRoot(c, FileContentService.ContentType.files); if (root == null) @@ -476,12 +469,7 @@ private File getReferenceSequenceDir(Container c) throws IllegalArgumentExceptio throw new IllegalArgumentException("File root not defined for container: " + c.getPath()); } - return new File(root, ".sequences"); - } - - public void writeSequence(Writer writer, int lineLength) throws IOException - { - writeSequence(writer, lineLength, null, null); + return FileUtil.appendName(root, BASE_DIRNAME); } public void writeSequence(Writer writer, int lineLength, Integer start, Integer end) throws IOException @@ -562,6 +550,26 @@ public File getOffsetsFile() return null; } - return new File(d.getFile().getParentFile(), getRowid() + "_offsets.txt"); + return FileUtil.appendName(d.getFile().getParentFile(), getRowid() + "_offsets.txt"); + } + + private File getHashedDir(boolean create) + { + File baseDir = getBaseSequenceDir(); + String digest = Crypt.MD5.digest(String.valueOf(getRowid())); + + baseDir = FileUtil.appendName(baseDir, digest.substring(0,4)); + baseDir = FileUtil.appendName(baseDir, digest.substring(4,8)); + baseDir = FileUtil.appendName(baseDir, digest.substring(8,12)); + baseDir = FileUtil.appendName(baseDir, digest.substring(12,20)); + baseDir = FileUtil.appendName(baseDir, digest.substring(20,28)); + baseDir = FileUtil.appendName(baseDir, digest.substring(28,32)); + + if (create) + { + baseDir.mkdirs(); + } + + return baseDir; } } diff --git a/SequenceAnalysis/resources/schemas/dbscripts/postgresql/SequenceAnalysis-12.331-12.332.sql b/SequenceAnalysis/resources/schemas/dbscripts/postgresql/SequenceAnalysis-12.331-12.332.sql new file mode 100644 index 000000000..2c2517351 --- /dev/null +++ b/SequenceAnalysis/resources/schemas/dbscripts/postgresql/SequenceAnalysis-12.331-12.332.sql @@ -0,0 +1 @@ +SELECT core.executeJavaUpgradeCode('migrateSequenceDirs'); \ No newline at end of file diff --git a/SequenceAnalysis/resources/schemas/dbscripts/sqlserver/SequenceAnalysis-12.331-12.332.sql b/SequenceAnalysis/resources/schemas/dbscripts/sqlserver/SequenceAnalysis-12.331-12.332.sql new file mode 100644 index 000000000..b24244d15 --- /dev/null +++ b/SequenceAnalysis/resources/schemas/dbscripts/sqlserver/SequenceAnalysis-12.331-12.332.sql @@ -0,0 +1 @@ +EXEC core.executeJavaUpgradeCode 'migrateSequenceDirs'; \ No newline at end of file diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisMaintenanceTask.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisMaintenanceTask.java index 6ea1c01a9..a87859e5e 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisMaintenanceTask.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisMaintenanceTask.java @@ -46,6 +46,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -301,10 +302,10 @@ private void processContainer(Container c, Logger log) throws IOException, Pipel { //first sequences log.debug("Inspecting sequences"); - File sequenceDir = new File(root.getRootPath(), ".sequences"); + File sequenceDir = FileUtil.appendName(root.getRootPath(), ".sequences"); TableInfo tableRefNtSequences = SequenceAnalysisSchema.getTable(SequenceAnalysisSchema.TABLE_REF_NT_SEQUENCES); TableSelector ntTs = new TableSelector(tableRefNtSequences, new SimpleFilter(FieldKey.fromString("container"), c.getId()), null); - final Set expectedSequences = new HashSet<>(10000, 1000); + final Set expectedSequences = new HashSet<>(10000, 1000); ntTs.forEach(RefNtSequenceModel.class, m -> { if (m.getSequenceFile() == null || m.getSequenceFile() == 0) { @@ -319,26 +320,23 @@ private void processContainer(Container c, Logger log) throws IOException, Pipel return; } - if (!d.getFile().exists()) - { - log.error("expected sequence file does not exist for sequence: " + m.getRowid() + " " + m.getName() + ", expected: " + d.getFile().getPath()); - return; - } - if (d.getFile().getAbsolutePath().toLowerCase().startsWith(sequenceDir.getAbsolutePath().toLowerCase())) { - expectedSequences.add(d.getFile().getName()); + expectedSequences.add(d.getFile()); } }); if (sequenceDir.exists()) { - for (File child : sequenceDir.listFiles()) + inspectSequenceDir(sequenceDir, expectedSequences, log); + } + + if (!expectedSequences.isEmpty()) + { + for (File missing : expectedSequences) { - if (!expectedSequences.contains(child.getName())) - { - deleteFile(child, log); - } + log.error("expected sequence file does not exist: " + missing.getPath()); + return; } } @@ -446,12 +444,12 @@ private void processContainer(Container c, Logger log) throws IOException, Pipel continue; } - deleteFile(new File(child, fileName), log); + deleteFile(FileUtil.appendName(child, fileName), log); } } //check/verify tracks - File trackDir = new File(child, "tracks"); + File trackDir = FileUtil.appendName(child, "tracks"); if (trackDir.exists()) { Set expectedTracks = new HashSet<>(); @@ -486,7 +484,7 @@ private void processContainer(Container c, Logger log) throws IOException, Pipel } //check/verify chainFiles - File chainDir = new File(child, "chainFiles"); + File chainDir = FileUtil.appendName(child, "chainFiles"); if (chainDir.exists()) { Set expectedChains = new HashSet<>(); @@ -555,7 +553,7 @@ private void processContainer(Container c, Logger log) throws IOException, Pipel } } - File sequenceOutputsDir = new File(root.getRootPath(), "sequenceOutputs"); + File sequenceOutputsDir = FileUtil.appendName(root.getRootPath(), "sequenceOutputs"); if (sequenceOutputsDir.exists()) { for (File child : sequenceOutputsDir.listFiles()) @@ -576,6 +574,24 @@ private void processContainer(Container c, Logger log) throws IOException, Pipel } } + private void inspectSequenceDir(File sequenceDir, Set expectedSequences, Logger log) throws IOException + { + for (File child : Objects.requireNonNull(sequenceDir.listFiles())) + { + if (child.isDirectory()) + { + inspectSequenceDir(child, expectedSequences, log); + } + else + { + if (!expectedSequences.remove(child)) + { + deleteFile(child, log); + } + } + } + } + private void deleteFile(File f, Logger log) throws IOException { log.info("deleting sequence file: " + f.getPath()); diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java index a4044bcae..60186f5ee 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java @@ -209,7 +209,7 @@ public String getName() @Override public Double getSchemaVersion() { - return 12.331; + return 12.332; } @Override diff --git a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisUpgradeCode.java b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisUpgradeCode.java index 858684d11..40b221c70 100644 --- a/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisUpgradeCode.java +++ b/SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisUpgradeCode.java @@ -229,4 +229,69 @@ public void updateBarcodeRC(final ModuleContext moduleContext) }); } } + + /** called at 12.331-12.332*/ + @SuppressWarnings({"UnusedDeclaration"}) + @DeferredUpgrade + public void migrateSequenceDirs(final ModuleContext moduleContext) + { + try + { + TableInfo ti = SequenceAnalysisSchema.getTable(SequenceAnalysisSchema.TABLE_REF_NT_SEQUENCES); + TableSelector ts = new TableSelector(ti); + List nts = ts.getArrayList(RefNtSequenceModel.class); + _log.info(nts.size() + " total sequences to migrate"); + int processed = 0; + for (RefNtSequenceModel nt : nts) + { + processed++; + + if (processed % 1000 == 0) + { + _log.info("{} of {} sequence files migrated", processed, nts.size()); + } + + ExpData legacyExpData = ExperimentService.get().getExpData(nt.getSequenceFile()); + if (legacyExpData == null) + { + _log.error("Missing ExpData for NT sequence: {}", nt.getSequenceFile()); + continue; + } + + File legacyFile = legacyExpData.getFile(); + if (!legacyFile.exists()) + { + _log.error("Missing file for NT sequence: {}", legacyFile.getPath()); + continue; + } + + if (!RefNtSequenceModel.BASE_DIRNAME.equals(legacyFile.getParentFile().getName())) + { + _log.error("Sequence appears to have already been migrated, this might indicate a retry after a failed move: {}", legacyFile.getPath()); + continue; + } + + File newLocation = nt.getExpectedSequenceFile(); + if (!newLocation.getParentFile().exists()) + { + newLocation.getParentFile().mkdirs(); + } + + if (newLocation.exists()) + { + _log.error("Target location for migrated sequence file exists, this might indicate a retry after a filed move: {}", newLocation.getPath()); + continue; + } + + FileUtils.copyFile(legacyFile, newLocation); + legacyExpData.setDataFileURI(newLocation.toURI()); + legacyExpData.save(moduleContext.getUpgradeUser()); + legacyFile.delete(); + } + } + catch (Exception e) + { + _log.error("Error upgrading sequenceanalysis module", e); + } + } }