Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
60 commits
Select commit Hold shift + click to select a range
f0bc6b8
Update view sort
bbimber Sep 3, 2025
f3ffedb
Add wrapper for IdentifyAndStoreActiveClonotypes
bbimber Sep 4, 2025
35b8849
Error checking for CommonFilters
bbimber Sep 4, 2025
fb102b9
Update defaults
bbimber Sep 4, 2025
2e6c233
Set baseUrl in script
bbimber Sep 5, 2025
f005c76
Do not storeStimLevelData for TRA
bbimber Sep 8, 2025
43fe5e2
Do not force seurat object name to match readset
bbimber Sep 9, 2025
feba968
Do not force seurat object name to match readset
bbimber Sep 9, 2025
6469964
Drop CS-Core and Tricycle
bbimber Sep 11, 2025
4427a6a
Expand data cleanup
bbimber Sep 12, 2025
d0053f8
Minor code cleanup
bbimber Sep 12, 2025
ddcfe08
Add subadapter + reflection pattern to ExtendedVariantAdapter in orde…
hextraza Sep 16, 2025
f1b7e52
Correct HTML syntax
bbimber Sep 18, 2025
0c0da7d
Expand study triggers and update cohort fields
bbimber Sep 19, 2025
2f7566a
Expose getter for StudiesTriggerFactory
bbimber Sep 19, 2025
04e2b40
Clean up trigger/customizer layer code
bbimber Sep 19, 2025
5688170
Create fields to coalesce name/label for studies
bbimber Sep 19, 2025
14ed86a
Add null check
bbimber Sep 22, 2025
733f92d
Switch default
bbimber Sep 22, 2025
f4aebaf
Bugfix to StudiesTriggerFactory
bbimber Sep 24, 2025
26db835
Update default
bbimber Sep 30, 2025
608cbe4
Improve SnpEff index check
bbimber Oct 1, 2025
3f372f0
Switch sequence init tasks to use webserver-high-priority
bbimber Oct 4, 2025
b4eea84
Build short delay into github triggers to aid cross-repo commits
bbimber Oct 4, 2025
d78b6d2
Switch ETLs to log row count discrepancies
bbimber Oct 6, 2025
689af3e
Update dependencies
bbimber Oct 7, 2025
35b3ae0
Support sawfish --sample-csv arg
bbimber Oct 15, 2025
1fac863
Option to create readsets from SRA (#355)
bbimber Oct 16, 2025
b084e00
Error check
bbimber Oct 16, 2025
604ee34
Add nimble/bulk step
bbimber Oct 16, 2025
6039efb
Allow nimble step to use cached barcodes
bbimber Oct 16, 2025
8921d0a
Bugfix to NimbleAlignmentStep
bbimber Oct 17, 2025
2253e02
Bugfix to NimbleAlignmentStep
bbimber Oct 17, 2025
600cda3
Bugfix to NimbleAlignmentStep
bbimber Oct 17, 2025
2a5c789
Bugfix to NimbleAlignmentStep
bbimber Oct 17, 2025
f75ffaa
Add CD4_Activation_Axis
bbimber Oct 17, 2025
e50b12c
Better support readsets created directly from SRA
bbimber Oct 17, 2025
4930f5f
Expand BAM header
bbimber Oct 17, 2025
2ef4efb
Bugfix to RestoreSraDataHandler
bbimber Oct 17, 2025
4b3fd43
Bugfix to RestoreSraDataHandler for new SRA datasets
bbimber Oct 17, 2025
9a6abc8
Bugfix to RestoreSraDataHandler for new SRA datasets
bbimber Oct 17, 2025
ede3b1e
Bugfix to RestoreSraDataHandler for new SRA datasets
bbimber Oct 17, 2025
f98d6e4
Bugfix to RestoreSraDataHandler for new SRA datasets
bbimber Oct 17, 2025
79c1b4b
Reduce logging
bbimber Oct 19, 2025
f8029db
Update sawfish install
bbimber Oct 24, 2025
3cc6db1
Better error handling
bbimber Oct 29, 2025
8c35a13
Updates to Save10xBarcodes
bbimber Oct 29, 2025
0166aa4
Expand StudyMetadata cohorts
bbimber Oct 29, 2025
14ffbcf
Throw exception when existing file present
bbimber Oct 29, 2025
504846c
Improve resume for ReadsetInitTask
bbimber Oct 30, 2025
ec8135c
Improve resume for ReadsetInitTask
bbimber Oct 30, 2025
fbe9a13
Bugfix to Save10xBarcodes
bbimber Nov 2, 2025
5d58764
Bugfix to handling of 10x barcodes
bbimber Nov 3, 2025
544c4ea
Switch nimble/CR barcodes to CB alone
bbimber Nov 4, 2025
a58aa88
Omit writing to 10x barcodes
bbimber Nov 4, 2025
2b95f2f
Add another coalesce() term in case name and label are blank
bbimber Nov 5, 2025
476ac90
Bugfix to study import
bbimber Nov 5, 2025
1066b9a
Fix merge conflicts
bbimber Nov 5, 2025
3ffa2d4
Build fixes
bbimber Nov 6, 2025
d98b42f
Build fixes
bbimber Nov 6, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,11 @@ jobs:
echo "DEFAULT_BRANCH=${DEFAULT_BRANCH}" >> $GITHUB_ENV
id: default-branch

# Note: use slight delay in case there are associated commits across repos
- name: "Sleep for 30 seconds"
run: sleep 30s
shell: bash

- name: "Build DISCVR"
uses: bimberlabinternal/DevOps/githubActions/discvr-build@master
with:
Expand Down
8 changes: 4 additions & 4 deletions SequenceAnalysis/pipeline_code/extra_tools_install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -325,11 +325,11 @@ then
echo "Cleaning up previous installs"
rm -Rf $LKTOOLS_DIR/sawfish*

wget https://github.com/PacificBiosciences/sawfish/releases/download/v2.0.0/sawfish-v2.0.0-x86_64-unknown-linux-gnu.tar.gz
tar -xzf sawfish-v2.0.0-x86_64-unknown-linux-gnu.tar.gz
wget https://github.com/PacificBiosciences/sawfish/releases/download/v2.2.0/sawfish-v2.2.0-x86_64-unknown-linux-gnu.tar.gz
tar -xzf sawfish-v2.2.0-x86_64-unknown-linux-gnu.tar.gz

mv sawfish-v2.0.0-x86_64-unknown-linux-gnu $LKTOOLS_DIR/
ln -s $LKTOOLS_DIR/sawfish-v2.0.0/bin/sawfish $LKTOOLS_DIR/
mv sawfish-v2.2.0-x86_64-unknown-linux-gnu $LKTOOLS_DIR/
ln -s $LKTOOLS_DIR/sawfish-v2.2.0-x86_64-unknown-linux-gnu/bin/sawfish $LKTOOLS_DIR/
else
echo "Already installed"
fi
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
/*
* Copyright (c) 2012 LabKey Corporation
*
* Licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0
*/
var LABKEY = require("labkey");

var triggerHelper = new org.labkey.sequenceanalysis.query.SequenceTriggerHelper(LABKEY.Security.currentUser.id, LABKEY.Security.currentContainer.id);

function beforeDelete(row, errors){
if (!this.extraContext.deleteFromServer){
errors._form = 'You cannot directly delete readsets. To delete these records, use the delete button above the readset grid.';
}
}

function afterInsert(row, errors) {
if (row.sraAccessions) {
triggerHelper.createReaddataForSra(row.rowid, row.sraAccessions);
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<customView xmlns="http://labkey.org/data/xml/queryCustomView" canOverride="true">
<sorts>
<sort column="name" descending="true"/>
<sort column="instrument_run_id/name" descending="false"/>
</sorts>
<filters>
<filter column="totalFiles" operator="eq" value="0"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2436,7 +2436,7 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
},{
xtype: 'textfield',
fieldLabel: 'Delimiter',
value: '_',
value: '[_-]',
itemId: 'delimiter'
}],
buttons: [{
Expand All @@ -2455,7 +2455,7 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
if (prefix) {
fg = fg.replace(new RegExp('^' + prefix), '');
}
fg = fg.split(delim);
fg = fg.split(RegExp(delim));
var id = fg[0];
if (Ext4.isNumeric(id)) {
r.set('readset', id);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ public static class Factory extends AbstractSequenceTaskFactory<Factory>
public Factory()
{
super(AlignmentInitTask.class);
setLocation("webserver-high-priority");
setJoin(true);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,11 @@ public RecordedActionSet run() throws PipelineJobException
libraryMembers = new TableSelector(libraryMembersTable, new SimpleFilter(FieldKey.fromString("library_id"), getPipelineJob().getLibraryId()), new Sort("ref_nt_id/name")).getArrayList(ReferenceLibraryMember.class);
}

if (libraryMembers == null)
{
throw new PipelineJobException("There are no sequences in the library: " + getPipelineJob().getLibraryId());
}

getJob().getLogger().info("there are " + libraryMembers.size() + " sequences to process");

//make sure sequence names are unique
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -430,6 +430,10 @@ else if (TaskFileManager.InputFileTreatment.compress == inputFileTreatment)
moveInputToAnalysisDir(compressed, job, actions, unalteredInputs, outputFiles);
}
}
else
{
job.getLogger().debug("Input file does not exist, may have already been moved: " + input.getPath());
}
}
}
else
Expand All @@ -450,23 +454,40 @@ private static void moveInputToAnalysisDir(File input, SequenceJob job, Collecti
File outputDir = job.getAnalysisDirectory();
File output = new File(outputDir, input.getName());
job.getLogger().debug("Destination: " + output.getPath());
boolean alreadyMoved = false;
if (output.exists())
{
job.getLogger().debug("output already exists");
if (unalteredInputs != null && unalteredInputs.contains(output))
{
job.getLogger().debug("\tThis input was unaltered during normalization and a copy already exists in the analysis folder so the original will be discarded");
input.delete();
TaskFileManagerImpl.swapFilesInRecordedActions(job.getLogger(), input, output, actions, job, null);
return;
alreadyMoved = true;
}
else
{
output = new File(outputDir, FileUtil.getBaseName(input.getName()) + ".orig.gz");
job.getLogger().debug("\tA file with the expected output name already exists, so the original will be renamed: " + output.getPath());
if (input.length() == output.length() && input.lastModified() == output.lastModified())
{
job.getLogger().info("Output exists, but has the same size/modified timestamp. Deleting original");
input.delete();
alreadyMoved = true;
}
else if (input.exists() && input.length() > output.length() && input.lastModified() == output.lastModified())
{
job.getLogger().info("Output exists with same timestamp, but with smaller file size. This probably indicates a truncated/failed copy. Deleting this file.");
output.delete();
}
else
{
throw new PipelineJobException("A file with the expected output name already exists: " + output.getPath());
}
}
}

FileUtils.moveFile(input, output);
if (!alreadyMoved)
{
FileUtils.moveFile(input, output);
}
if (!output.exists())
{
throw new PipelineJobException("Unable to move file: " + input.getPath());
Expand All @@ -488,7 +509,7 @@ private static void moveInputToAnalysisDir(File input, SequenceJob job, Collecti

TaskFileManagerImpl.swapFilesInRecordedActions(job.getLogger(), input, output, actions, job, null);
}
catch (IOException e)
catch (Exception e)
{
throw new PipelineJobException(e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public static class Factory extends AbstractTaskFactory<AbstractTaskFactorySetti
public Factory()
{
super(SequenceOutputHandlerInitTask.class);
setLocation("webserver");
setLocation("webserver-high-priority");
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public static class Factory extends AbstractTaskFactory<AbstractTaskFactorySetti
public Factory()
{
super(SequenceReadsetHandlerInitTask.class);
setLocation("webserver");
setLocation("webserver-high-priority");
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,11 @@
import org.junit.Assert;
import org.junit.Test;
import org.labkey.api.collections.IntHashMap;
import org.labkey.api.assay.AssayFileWriter;
import org.labkey.api.data.Container;
import org.labkey.api.data.ContainerManager;
import org.labkey.api.data.SimpleFilter;
import org.labkey.api.data.Table;
import org.labkey.api.data.TableInfo;
import org.labkey.api.data.TableSelector;
import org.labkey.api.exp.api.DataType;
Expand All @@ -29,12 +31,21 @@
import org.labkey.api.security.User;
import org.labkey.api.security.UserManager;
import org.labkey.api.sequenceanalysis.RefNtSequenceModel;
import org.labkey.api.util.FileUtil;
import org.labkey.api.util.Path;
import org.labkey.sequenceanalysis.ReadDataImpl;
import org.labkey.sequenceanalysis.SequenceAnalysisSchema;
import org.labkey.sequenceanalysis.SequenceAnalysisServiceImpl;
import org.labkey.sequenceanalysis.SequenceReadsetImpl;
import org.labkey.sequenceanalysis.pipeline.ReadsetImportJob;
import org.labkey.vfs.FileLike;

import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -254,4 +265,65 @@ public long createExpData(String relPath) {

return d.getRowId();
}

public void createReaddataForSra(int readsetId, String sraAccessions)
{
SequenceReadsetImpl rs = SequenceAnalysisServiceImpl.get().getReadset(Long.valueOf(readsetId), _user);
if (rs == null)
{
throw new IllegalArgumentException("Unable to find readset: " + readsetId);
}

TableInfo rd = SequenceAnalysisSchema.getTable(SequenceAnalysisSchema.TABLE_READ_DATA);

String[] tokens = StringUtils.split(sraAccessions, ",");
for (String token : tokens)
{
if (rs.getReadData() != null && !rs.getReadData().isEmpty())
{
throw new IllegalArgumentException("Did not expect readset to have existing readdata: " + rs.getReadsetId());
}

// Create new:
ReadDataImpl rd1 = new ReadDataImpl();
rd1.setReadset(Long.valueOf(readsetId));
rd1.setContainer(rs.getContainer());
rd1.setCreated(new Date());
rd1.setModified(new Date());
rd1.setCreatedBy(_user.getUserId());
rd1.setModifiedBy(_user.getUserId());
rd1.setSra_accession(token);
rd1.setArchived(true);

// NOTE: this is a fragile assumption. We might need to eventually query SRA to figure out whether data is paired:
Container c = ContainerManager.getForId(rs.getContainer());
PipeRoot pr = PipelineService.get().findPipelineRoot(c);
if (pr == null)
{
throw new IllegalStateException("Unable to find pipeline root for: " + c.getPath());
}

String folderName = "SequenceImport_RS" + rs.getRowId() + "_" + FileUtil.getTimestamp();
FileLike sequenceImport = FileUtil.appendPath(pr.getRootFileLike(), Path.parse(ReadsetImportJob.NAME));
FileLike outDir = FileUtil.findUniqueFileName(folderName, sequenceImport);

FileLike expectedFile1 = FileUtil.appendPath(outDir, Path.parse(token + "_1.fastq.gz"));
ExpData exp1 = ExperimentService.get().createData(c, new DataType("Data"));
exp1.setDataFileURI(expectedFile1.toURI());
exp1.setContainer(c);
exp1.setName(expectedFile1.getName());
exp1.save(_user);
rd1.setFileId1(exp1.getRowId());

FileLike expectedFile2 = FileUtil.appendPath(outDir, Path.parse(token + "_2.fastq.gz"));
ExpData exp2 = ExperimentService.get().createData(c, new DataType("Data"));
exp2.setDataFileURI(expectedFile2.toURI());
exp2.setContainer(c);
exp2.setName(expectedFile2.getName());
exp2.save(_user);
rd1.setFileId2(exp2.getRowId());

Table.insert(_user, rd, rd1);
}
}
}
Loading