Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion SequenceAnalysis/pipeline_code/extra_tools_install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
echo "Install hifiasm"
echo ""
cd $LKSRC_DIR
if [[ ! -e ${LKTOOLS_DIR}/primer3_core || ! -z $FORCE_REINSTALL ]];
if [[ ! -e ${LKTOOLS_DIR}/hifiasm || ! -z $FORCE_REINSTALL ]];
then
echo "Cleaning up previous installs"
rm -Rf $LKTOOLS_DIR/hifiasm*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public static class VariantProcessingTest extends SequenceIntegrationTests.Abstr
private static final String PROJECT_NAME = "VariantProcessingTestProject";

@BeforeClass
public static void initialSetUp() throws Exception
public static void initialSetUp()
{
doInitialSetUp(PROJECT_NAME);
}
Expand Down Expand Up @@ -91,12 +91,12 @@ public void testVariantProcessing() throws Exception

//create VCF, import as outputfile
String basename = "TestFile_" + FileUtil.getTimestamp();
File vcf = new File(_pipelineRoot, basename + ".vcf.gz");
File vcf = FileUtil.appendName(getPipelineRoot(_project), basename + ".vcf.gz");
Integer outputFileId = createTestVcf(genomeId, vcf);

//make job params
String jobName = "TestVariantProcessing";
JSONObject config = substituteParams(new File(_sampleData, VARIANT_JOB), jobName);
JSONObject config = substituteParams(FileUtil.appendName(_sampleData, VARIANT_JOB), jobName);
Set<Integer> outputFileIds = Collections.singleton(outputFileId);

TableInfo ti = QueryService.get().getUserSchema(TestContext.get().getUser(), _project, SequenceAnalysisSchema.SCHEMA_NAME).getTable(SequenceAnalysisSchema.TABLE_OUTPUTFILES, null);
Expand Down Expand Up @@ -129,7 +129,7 @@ public void testVariantProcessing() throws Exception
}
}

protected Set<PipelineJob> createOutputHandlerJob(String jobName, JSONObject config, Class handlerClass, Set<Integer> outputFileIDs) throws Exception
protected Set<PipelineJob> createOutputHandlerJob(String jobName, JSONObject config, Class<?> handlerClass, Set<Integer> outputFileIDs) throws Exception
{
Map<String, Object> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
Expand Down Expand Up @@ -200,7 +200,7 @@ private int createTestVcf(int genomeId, File vcf)
Integer dataId = new TableSelector(SequenceAnalysisSchema.getTable(SequenceAnalysisSchema.TABLE_REF_LIBRARIES), PageFlowUtil.set("fasta_file"), new SimpleFilter(FieldKey.fromString("rowid"), genomeId), null).getObject(Integer.class);
ExpData data = ExperimentService.get().getExpData(dataId);

File dictFile = new File(data.getFile().getParent(), FileUtil.getBaseName(data.getFile().getName()) + ".dict");
File dictFile = FileUtil.appendName(data.getFile().getParentFile(), FileUtil.getBaseName(data.getFile().getName()) + ".dict");
if (dictFile.exists())
{
SAMSequenceDictionary dict = SAMSequenceDictionaryExtractor.extractDictionary(dictFile.toPath());
Expand All @@ -222,7 +222,7 @@ private int createTestVcf(int genomeId, File vcf)
writer.add(vcb.make());
}

ExpData d = createExpData(vcf);
ExpData d = createExpData(vcf, _project);
Map<String, Object> params = new CaseInsensitiveHashMap<>();
params.put("name", "TestVcf");
params.put("description", "Description");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1924,7 +1924,7 @@ else if (o.has("relPath") || o.has("fileName"))

if (f == null || !f.exists())
{
throw new PipelineValidationException("Unknown file: " + o.getString("relPath") + " / " + o.getString("fileName"));
throw new PipelineValidationException("Unknown file: " + o.optString("relPath") + " / " + o.optString("fileName"));
}

ret.add(f);
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.labkey.api.pipeline.TaskId;
import org.labkey.api.pipeline.WorkDirectory;
import org.labkey.api.reader.Readers;
import org.labkey.api.util.FileUtil;
import org.labkey.api.writer.PrintWriters;
import org.labkey.sequenceanalysis.pipeline.AlignmentInitTask;
import org.labkey.sequenceanalysis.pipeline.PrepareAlignerIndexesTask;
Expand All @@ -25,6 +26,7 @@
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import java.util.Objects;

import static org.labkey.api.sequenceanalysis.pipeline.SequencePipelineService.SEQUENCE_TOOLS_PARAM;

Expand All @@ -40,35 +42,35 @@ public void setUp() throws Exception
}

@BeforeClass
public static void initialSetUp() throws Exception
public static void initialSetUp()
{
doInitialSetUp(PROJECT_NAME);
}

private File setupConfigDir(File outDir) throws IOException
{
File baseDir = new File(outDir, "config");
File baseDir = FileUtil.appendName(outDir, "config");
if (baseDir.exists())
{
FileUtils.deleteDirectory(baseDir);
}

baseDir.mkdirs();
FileUtil.mkdirs(baseDir);

if (_sampleData == null)
{
throw new IOException("_sampleData was null");
}

File source = new File(_sampleData, "remotePipeline");
File source = FileUtil.appendName(_sampleData, "remotePipeline");
if (!source.exists())
{
throw new IOException("Unable to find file: " + source.getPath());
}

FileUtils.copyFile(new File(source, "sequenceanalysisConfig.xml"), new File(baseDir, "sequenceanalysisConfig.xml"));
FileUtils.copyFile(FileUtil.appendName(source, "sequenceanalysisConfig.xml"), FileUtil.appendName(baseDir, "sequenceanalysisConfig.xml"));

try (PrintWriter writer = PrintWriters.getPrintWriter(new File(baseDir, "pipelineConfig.xml")); BufferedReader reader = Readers.getReader(new File(source, "pipelineConfig.xml")))
try (PrintWriter writer = PrintWriters.getPrintWriter(FileUtil.appendName(baseDir, "pipelineConfig.xml")); BufferedReader reader = Readers.getReader(FileUtil.appendName(source, "pipelineConfig.xml")))
{
String line;
while ((line = reader.readLine()) != null)
Expand All @@ -83,12 +85,10 @@ private File setupConfigDir(File outDir) throws IOException

path = path.replaceAll("\\\\", "/");
line = line.replaceAll("@@SEQUENCEANALYSIS_TOOLS@@", path);
_log.info("Writing to pipelineConfig.xml: " + line);
}
else if (line.contains("@@WORK_DIR@@"))
{
line = line.replaceAll("@@WORK_DIR@@", outDir.getPath().replaceAll("\\\\", "/"));
_log.info("Writing to pipelineConfig.xml: " + line);
}

writer.println(line);
Expand All @@ -113,13 +113,13 @@ protected String getProjectName()
@Test
public void BasicRemoteJob() throws Exception
{
File outDir = new File(_pipelineRoot, "clusterBootstrap");
File outDir = FileUtil.appendName(getPipelineRoot(_project), "clusterBootstrap");
if (outDir.exists())
{
FileUtils.deleteDirectory(outDir);
}

outDir.mkdirs();
FileUtil.mkdirs(outDir);

executeJobRemote(outDir, null);

Expand All @@ -143,19 +143,19 @@ public void RunBwaRemote() throws Exception
return;

String jobName = "TestBWAMem_" + System.currentTimeMillis();
JSONObject config = substituteParams(new File(_sampleData, ALIGNMENT_JOB), jobName);
JSONObject config = substituteParams(FileUtil.appendName(_sampleData, ALIGNMENT_JOB), jobName);
config.put("alignment", "BWA-Mem");
appendSamplesForAlignment(config, _readsets);

SequenceAlignmentJob job = SequenceAlignmentJob.createForReadsets(_project, _context.getUser(), "RemoteJob1", "Test of remote pipeline", config, config.getJSONArray("readsetIds"), false).get(0);
File outDir = new File(_pipelineRoot, "remoteBwa");
File outDir = FileUtil.appendName(getPipelineRoot(_project), "remoteBwa");
if (outDir.exists())
{
FileUtils.deleteDirectory(outDir);
}

outDir.mkdirs();
job.getLogFile().getParentFile().mkdirs();
FileUtil.mkdirs(outDir);
FileUtil.mkdirs(job.getLogFile().getParentFile());

_readsets.forEach(rs -> job.getSequenceSupport().cacheReadset(rs));

Expand All @@ -171,7 +171,7 @@ public void RunBwaRemote() throws Exception
//Now move to remote tasks
job.setActiveTaskId(new TaskId(PrepareAlignerIndexesTask.class));

File jobFile = new File(outDir, "bwaRemote.job.json.txt");
File jobFile = FileUtil.appendName(outDir, "bwaRemote.job.json.txt");
job.writeToFile(jobFile);

executeJobRemote(outDir, jobFile);
Expand All @@ -191,7 +191,7 @@ public void RunBwaRemote() throws Exception
writeJobLogToLog(job);

_log.info("Files in job folder: " + job.getLogFile().getParentFile().getPath());
for (File f : job.getLogFile().getParentFile().listFiles())
for (File f : Objects.requireNonNull(job.getLogFile().getParentFile().listFiles()))
{
_log.info(f.getName());
}
Expand All @@ -215,14 +215,14 @@ protected void executeJobRemote(File workDir, @Nullable File jobJson) throws IOE
ProcessBuilder pb = new ProcessBuilder(args);
pb.directory(workDir);

_log.info("Executing job in '" + pb.directory().getAbsolutePath() + "': " + String.join(" ", pb.command()));
_log.info("Executing job in '{}': {}", pb.directory().getAbsolutePath(), String.join(" ", pb.command()));

Process proc;
try
{
pb.redirectErrorStream(true);
proc = pb.start();
File logFile = new File(workDir, "clusterBootstrap.txt");
File logFile = FileUtil.appendName(workDir, "clusterBootstrap.txt");
try (BufferedReader procReader = Readers.getReader(proc.getInputStream());PrintWriter writer = PrintWriters.getPrintWriter(logFile))
{
String line;
Expand Down
2 changes: 1 addition & 1 deletion singlecell/resources/chunks/CalculateUCellScores.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ for (datasetId in names(seuratObjects)) {
seuratObj <- readSeuratRDS(seuratObjects[[datasetId]])

message(paste0('Loading dataset ', datasetId, ', with total cells: ', ncol(seuratObj)))
seuratObj <- RIRA::CalculateUCellScores(seuratObj, storeRanks = storeRanks, assayName = assayName, forceRecalculate = forceRecalculate, ncores = nCores)
seuratObj <- RIRA::CalculateUCellScores(seuratObj, storeRanks = storeRanks, assayName = assayName, forceRecalculate = forceRecalculate, ncores = nCores, dropAllExistingUcells = dropAllExistingUcells)

saveData(seuratObj, datasetId)

Expand Down
34 changes: 34 additions & 0 deletions singlecell/resources/chunks/PerformTcrClustering.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
for (datasetId in names(seuratObjects)) {
printName(datasetId)
seuratObj <- readSeuratRDS(seuratObjects[[datasetId]])

seuratObj <- tcrClustR::CalculateTcrDistances(
inputData = seuratObj,
chains = c('TRA', 'TRB', 'TRG', 'TRD'),
organism = organism,
minimumCloneSize = 2,
calculateChainPairs = TRUE
)

seuratObj <- tcrClustR::RunTcrClustering(
seuratObj_TCR = seuratObj,
dianaHeight = 20,
clusterSizeThreshold = 1
)

print(paste0('Summary of distances: '))
if (!'TCR_Distances' %in% names(seuratObj@misc)) {
warning('No TCR_Distances were found, this could indicate a problem with processing')
} else {
for (an in names(seuratObj@misc$TCR_Distances)) {
ad <- seuratObj@misc$TCR_Distances[[an]]
print(paste0('Assay: ', an, ', total clones: ', nrow(ad)))
}
}

saveData(seuratObj, datasetId)

# Cleanup
rm(seuratObj)
gc()
}
3 changes: 2 additions & 1 deletion singlecell/src/org/labkey/singlecell/SingleCellModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
import org.labkey.api.singlecell.CellHashingService;
import org.labkey.api.singlecell.pipeline.SingleCellRawDataStep;
import org.labkey.api.singlecell.pipeline.SingleCellStep;
import org.labkey.api.util.PageFlowUtil;
import org.labkey.api.view.WebPartFactory;
import org.labkey.singlecell.analysis.AbstractSingleCellHandler;
import org.labkey.singlecell.analysis.CellRangerRawDataHandler;
Expand Down Expand Up @@ -76,6 +75,7 @@
import org.labkey.singlecell.pipeline.singlecell.NormalizeAndScale;
import org.labkey.singlecell.pipeline.singlecell.PerformDefaultNimbleAppend;
import org.labkey.singlecell.pipeline.singlecell.PerformMhcDimRedux;
import org.labkey.singlecell.pipeline.singlecell.PerformTcrClustering;
import org.labkey.singlecell.pipeline.singlecell.PhenotypePlots;
import org.labkey.singlecell.pipeline.singlecell.PlotAssayFeatures;
import org.labkey.singlecell.pipeline.singlecell.PlotAverageCiteSeqCounts;
Expand Down Expand Up @@ -305,6 +305,7 @@ public static void registerPipelineSteps()
SequencePipelineService.get().registerPipelineStep(new CalculateTcrRepertoireStats.Provider());
SequencePipelineService.get().registerPipelineStep(new PredictTcellActivation.Provider());
SequencePipelineService.get().registerPipelineStep(new IdentifyAndStoreActiveClonotypes.Provider());
SequencePipelineService.get().registerPipelineStep(new PerformTcrClustering.Provider());

SequenceAnalysisService.get().registerReadsetListener(new SingleCellReadsetListener());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -963,6 +963,7 @@ public static String getOutputDescription(JSONObject jsonParams, Logger log, Fil
int lowOrNegative = 0;
int totalDoublet = 0;
double totalSaturation = 0.0;
Set<String> subjectIds = new HashSet<>();

int hashingIdx = -1;
int saturationIdx = -1;
Expand All @@ -972,6 +973,7 @@ public static String getOutputDescription(JSONObject jsonParams, Logger log, Fil
int trbIdx = -1;
int trdIdx = -1;
int trgIdx = -1;
int subjectIdIdx = -1;

int totalTNK = 0;
int cellsWithTRA = 0;
Expand All @@ -998,6 +1000,7 @@ public static String getOutputDescription(JSONObject jsonParams, Logger log, Fil
trdIdx = Arrays.asList(line).indexOf("TRD");
trgIdx = Arrays.asList(line).indexOf("TRG");
riraIdx = Arrays.asList(line).indexOf("RIRA_Immune_v2.cellclass");
subjectIdIdx = Arrays.asList(line).indexOf("SubjectIdId");
}
else
{
Expand Down Expand Up @@ -1085,6 +1088,15 @@ else if ("NotUsed".equals(val))
}
}
}

if (subjectIdIdx > 0)
{
String subjectId = StringUtils.trimToNull(line[subjectIdIdx]);
if (subjectId != null && !"NA".equals(subjectId))
{
subjectIds.add(subjectId);
}
}
}
}

Expand Down Expand Up @@ -1126,6 +1138,11 @@ else if (riraIdx == -1 || traIdx == -1)
{
descriptions.add("TCR information not present");
}

if (!subjectIds.isEmpty())
{
descriptions.add("Distinct SubjectIds: " + subjectIds.size());
}
}
catch (IOException e)
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
package org.labkey.singlecell.pipeline.singlecell;

import org.labkey.api.sequenceanalysis.pipeline.PipelineContext;
import org.labkey.api.sequenceanalysis.pipeline.PipelineStepProvider;
import org.labkey.api.singlecell.pipeline.AbstractSingleCellPipelineStep;
import org.labkey.api.util.PageFlowUtil;

import java.util.Collection;

abstract public class AbstractTcrClustRStep extends AbstractSingleCellPipelineStep
{
public static String CONTAINER_NAME = "ghcr.io/bimberlabinternal/tcrclustr:latest";

public AbstractTcrClustRStep(PipelineStepProvider<?> provider, PipelineContext ctx)
{
super(provider, ctx);
}

@Override
public Collection<String> getRLibraries()
{
return PageFlowUtil.set("tcrClustR");
}

@Override
public String getDockerContainerName()
{
return CONTAINER_NAME;
}

// NOTE: ExperimentHub and similar packages default to saving data to the user's home dir. Set a directory, to avoid issues when not running the container as root
@Override
public String getDockerHomeDir()
{
return "/dockerHomeDir";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,16 @@ public Provider()
{
super("CalculateUCellScores", "Calculate UCell Scores", "Seurat", "This will generate UCell scores for a set of pre-defined gene modules", Arrays.asList(
SeuratToolParameter.create("storeRanks", "Store Ranks", "Passed directly to UCell::AddModuleScore_UCell.", "checkbox", new JSONObject(){{
put("checked", true);
put("checked", false);
}}, true),
SeuratToolParameter.create("assayName", "Assay Name", "Passed directly to UCell::AddModuleScore_UCell.", "textfield", new JSONObject(){{

}}, "RNA"),
SeuratToolParameter.create("forceRecalculate", "Force Recalculate", "If checked, the UCell score will always be re-calculated.", "checkbox", new JSONObject(){{

}}, false),
SeuratToolParameter.create("dropAllExistingUcells", "Drop Existing UCells?", "If checked, this will drop all columns ending in _UCell. This implies forceRecalculate.", "checkbox", new JSONObject(){{

}}, false)
), null, null);
}
Expand Down
Loading