Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 59 additions & 0 deletions conf/test_baysor_multi_patches.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running minimal tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Defines input files and everything required to run a fast and simple pipeline test.

Use as follows:
nextflow run nf-core/sopa -profile test_baysor,<docker/singularity> --outdir <OUTDIR>

----------------------------------------------------------------------------------------
*/

process {
resourceLimits = [
cpus: 4,
memory: '15.GB',
time: '1.h',
]
}

params {
config_profile_name = 'Test profile'
config_profile_description = 'Minimal test dataset to check pipeline function'

// Input data
input = "${baseDir}/tests/samplesheet.csv"

// Reader
technology = "toy_dataset"

// Patches options
patch_width_microns = 150
patch_overlap_microns = 10

// Baysor segmentation
use_baysor = true
baysor_scale = 3
min_molecules_per_cell = 10
min_molecules_per_gene = 0
min_molecules_per_segment = 3
confidence_nn_id = 6
prior_segmentation_confidence = 0

// Filtering
min_area_microns2 = 10
min_transcripts = 5

// Agregation
aggregate_channels = true

// Annotation
use_tangram = true
sc_reference_path = "https://github.com/gustaveroussy/sopa/raw/refs/heads/main/tests/toy_tangram_ref.h5ad"
tangram_cell_type_key = "ct"

// Explorer
ram_threshold_gb = 4
pixel_size = 0.1
}
51 changes: 51 additions & 0 deletions conf/test_cellpose_multi_patches.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running minimal tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Defines input files and everything required to run a fast and simple pipeline test.

Use as follows:
nextflow run nf-core/sopa -profile test_cellpose,<docker/singularity> --outdir <OUTDIR>

----------------------------------------------------------------------------------------
*/

process {
resourceLimits = [
cpus: 4,
memory: '15.GB',
time: '1.h',
]
}

params {
config_profile_name = 'Test profile with Cellpose'
config_profile_description = 'Minimal test dataset to check pipeline function'

// Input data
input = "${baseDir}/tests/samplesheet.csv"

// Reader
technology = "toy_dataset"

// Patches options
patch_width_pixel = 1500
patch_overlap_pixel = 50

// Cellpose segmentation
use_cellpose = true
cellpose_diameter = 35
cellpose_channels = 'DAPI'
flow_threshold = 2
cellprob_threshold = -6

// Filtering
min_area_pixels2 = 2500

// Aggregation
aggregate_channels = true

// Explorer
ram_threshold_gb = 4
pixel_size = 0.1
}
53 changes: 53 additions & 0 deletions conf/test_comseg_multi_patches.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nextflow config file for running minimal tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Defines input files and everything required to run a fast and simple pipeline test.

Use as follows:
nextflow run nf-core/sopa -profile test_comseg,<docker/singularity> --outdir <OUTDIR>

----------------------------------------------------------------------------------------
*/

process {
resourceLimits = [
cpus: 4,
memory: '15.GB',
time: '1.h',
]
}

params {
config_profile_name = 'Test profile with Comseg'
config_profile_description = 'Minimal test dataset to check pipeline function'

// Input data
input = "${baseDir}/tests/samplesheet.csv"

// Reader
technology = "toy_dataset"

// Patches options
patch_width_microns = 150
patch_overlap_microns = 10
prior_shapes_key = "cells"

// Comseg segmentation
use_comseg = true
mean_cell_diameter = 15
max_cell_radius = 25
alpha = 0.5
min_rna_per_cell = 5

// Filtering
min_area_microns2 = 10
min_transcripts = 5

// Aggregation
aggregate_channels = true

// Explorer
ram_threshold_gb = 4
pixel_size = 0.1
}
33 changes: 13 additions & 20 deletions conf/test_full.config
Original file line number Diff line number Diff line change
Expand Up @@ -11,34 +11,27 @@
*/

params {
config_profile_name = 'Full test profile'
config_profile_description = 'Full test dataset to check pipeline function'
config_profile_name = 'Test VisiumHD profile'
config_profile_description = 'Test pipeline functionality on Visium HD file'

// Input data
input = "${baseDir}/tests/samplesheet.csv"
// Input
input = "https://raw.githubusercontent.com/nf-core/test-datasets/spatialvi/testdata/human-lung-cancer-post-xenium_hd_ffpe/samplesheet_spaceranger_hd.csv"
spaceranger_probeset = "https://raw.githubusercontent.com/nf-core/test-datasets/spatialvi/testdata/human-lung-cancer-post-xenium_hd_ffpe/Visium_HD_Human_Lung_Cancer_HD_Only_Experiment2_probe_set.csv"
spaceranger_reference = "https://raw.githubusercontent.com/nf-core/test-datasets/spatialvi/testdata/GRCh38.tar.gz"

// Reader
technology = "toy_dataset"
technology = "visium_hd"

// Patches
patch_width_microns = -1
prior_shapes_key = "auto"

// Proseg segmentation
// Proseg
use_proseg = true
visium_hd_prior_shapes_key = "auto"

// Filtering
min_transcripts = 10

// Aggregation
aggregate_channels = true
min_transcripts = 5

// Annotation
use_fluorescence_annotation = true
marker_cell_dict = '{"CK": "Tumoral cell", "CD3": "T cell", "CD20": "B cell"}'

// Scanpy preprocessing
use_scanpy_preprocessing = true

// Explorer
ram_threshold_gb = 4
pixel_size = 0.1
ram_threshold_gb = 1
}
5 changes: 4 additions & 1 deletion modules/local/patch_segmentation_baysor/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,15 @@ process PATCH_SEGMENTATION_BAYSOR {
tuple val(meta), path(sdata_path), val(cli_arguments), val(index), val(n_patches)

output:
tuple val(meta), path(sdata_path), path("${sdata_path}/.sopa_cache/transcript_patches/${index}/segmentation_counts.loom"), val(n_patches)
tuple val(meta), path(sdata_path), path("${index}-segmentation_counts.loom"), path("${index}-segmentation_polygons_2d.json"), val(n_patches)

script:
"""
export JULIA_NUM_THREADS=${task.cpus} # parallelize within each patch for Baysor >= v0.7

sopa segmentation baysor ${sdata_path} --patch-index ${index} ${cli_arguments}

mv ${sdata_path}/.sopa_cache/transcript_patches/${index}/segmentation_counts.loom ${index}-segmentation_counts.loom
mv ${sdata_path}/.sopa_cache/transcript_patches/${index}/segmentation_polygons_2d.json ${index}-segmentation_polygons_2d.json
"""
}
4 changes: 3 additions & 1 deletion modules/local/patch_segmentation_cellpose/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,15 @@ process PATCH_SEGMENTATION_CELLPOSE {
tuple val(meta), path(sdata_path), val(cli_arguments), val(index), val(n_patches)

output:
tuple val(meta), path(sdata_path), path("${sdata_path}/.sopa_cache/cellpose_boundaries/${index}.parquet"), val(n_patches)
tuple val(meta), path(sdata_path), path("${index}.parquet"), val(n_patches)

script:
"""
mkdir ./cellpose_cache
export CELLPOSE_LOCAL_MODELS_PATH=./cellpose_cache

sopa segmentation cellpose ${sdata_path} --patch-index ${index} ${cli_arguments}

mv ${sdata_path}/.sopa_cache/cellpose_boundaries/${index}.parquet ${index}.parquet
"""
}
5 changes: 4 additions & 1 deletion modules/local/patch_segmentation_comseg/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,13 @@ process PATCH_SEGMENTATION_COMSEG {
tuple val(meta), path(sdata_path), val(cli_arguments), val(index), val(n_patches)

output:
tuple val(meta), path(sdata_path), path("${sdata_path}/.sopa_cache/transcript_patches/${index}/segmentation_counts.h5ad"), path("${sdata_path}/.sopa_cache/transcript_patches/${index}/segmentation_polygons.json"), val(n_patches)
tuple val(meta), path(sdata_path), path("${index}-segmentation_counts.h5ad"), path("${index}-segmentation_polygons.json"), val(n_patches)

script:
"""
sopa segmentation comseg ${sdata_path} --patch-index ${index} ${cli_arguments}

mv ${sdata_path}/.sopa_cache/transcript_patches/${index}/segmentation_counts.h5ad ${index}-segmentation_counts.h5ad
mv ${sdata_path}/.sopa_cache/transcript_patches/${index}/segmentation_polygons.json ${index}-segmentation_polygons.json
"""
}
4 changes: 3 additions & 1 deletion modules/local/patch_segmentation_stardist/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,12 @@ process PATCH_SEGMENTATION_STARDIST {
tuple val(meta), path(sdata_path), val(cli_arguments), val(index), val(n_patches)

output:
tuple val(meta), path(sdata_path), path("${sdata_path}/.sopa_cache/stardist_boundaries/${index}.parquet"), val(n_patches)
tuple val(meta), path(sdata_path), path("${index}.parquet"), val(n_patches)

script:
"""
sopa segmentation stardist ${sdata_path} --patch-index ${index} ${cli_arguments}

mv ${sdata_path}/.sopa_cache/stardist_boundaries/${index}.parquet ${index}.parquet
"""
}
22 changes: 21 additions & 1 deletion modules/local/resolve_baysor/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ process RESOLVE_BAYSOR {
: 'docker.io/quentinblampey/sopa:2.1.11-baysor'}"

input:
tuple val(meta), path(sdata_path)
tuple val(meta), path(sdata_path), path(counts), path(polygons)
val cli_arguments

output:
Expand All @@ -17,6 +17,26 @@ process RESOLVE_BAYSOR {

script:
"""
for f in $counts; do
index=\${f%%-*}

mkdir -p ${sdata_path}/.sopa_cache/transcript_patches/\$index

if [ ! -f "${sdata_path}/.sopa_cache/transcript_patches/\$index/segmentation_counts.loom" ]; then
mv \$f ${sdata_path}/.sopa_cache/transcript_patches/\$index/segmentation_counts.loom
fi
done

for f in $polygons; do
index=\${f%%-*}

mkdir -p ${sdata_path}/.sopa_cache/transcript_patches/\$index

if [ ! -f "${sdata_path}/.sopa_cache/transcript_patches/\$index/segmentation_polygons_2d.json" ]; then
mv \$f ${sdata_path}/.sopa_cache/transcript_patches/\$index/segmentation_polygons_2d.json
fi
done

sopa resolve baysor ${sdata_path} ${cli_arguments}

rm -r ${sdata_path}/.sopa_cache/transcript_patches || true # cleanup large baysor files
Expand Down
7 changes: 6 additions & 1 deletion modules/local/resolve_cellpose/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ process RESOLVE_CELLPOSE {
: 'docker.io/quentinblampey/sopa:2.1.11-cellpose'}"

input:
tuple val(meta), path(sdata_path)
tuple val(meta), path(sdata_path), path(parquets)

output:
tuple val(meta), path(sdata_path)
Expand All @@ -16,6 +16,11 @@ process RESOLVE_CELLPOSE {

script:
"""
mkdir -p ${sdata_path}/.sopa_cache/cellpose_boundaries
for f in ${parquets}; do
mv "\$f" "${sdata_path}/.sopa_cache/cellpose_boundaries/"
done

sopa resolve cellpose ${sdata_path}

cat <<-END_VERSIONS > versions.yml
Expand Down
22 changes: 21 additions & 1 deletion modules/local/resolve_comseg/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ process RESOLVE_COMSEG {
: 'docker.io/quentinblampey/sopa:2.1.11-comseg'}"

input:
tuple val(meta), path(sdata_path)
tuple val(meta), path(sdata_path), path(counts), path(polygons)
val cli_arguments

output:
Expand All @@ -17,6 +17,26 @@ process RESOLVE_COMSEG {

script:
"""
for f in $counts; do
index=\${f%%-*}

mkdir -p ${sdata_path}/.sopa_cache/transcript_patches/\$index

if [ ! -f "${sdata_path}/.sopa_cache/transcript_patches/\$index/segmentation_counts.h5ad" ]; then
mv \$f ${sdata_path}/.sopa_cache/transcript_patches/\$index/segmentation_counts.h5ad
fi
done

for f in $polygons; do
index=\${f%%-*}

mkdir -p ${sdata_path}/.sopa_cache/transcript_patches/\$index

if [ ! -f "${sdata_path}/.sopa_cache/transcript_patches/\$index/segmentation_polygons.json" ]; then
mv \$f ${sdata_path}/.sopa_cache/transcript_patches/\$index/segmentation_polygons.json
fi
done

sopa resolve comseg ${sdata_path} ${cli_arguments}

rm -r ${sdata_path}/.sopa_cache/transcript_patches || true # cleanup large comseg files
Expand Down
7 changes: 6 additions & 1 deletion modules/local/resolve_stardist/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ process RESOLVE_STARDIST {
: 'docker.io/quentinblampey/sopa:2.1.11-stardist'}"

input:
tuple val(meta), path(sdata_path)
tuple val(meta), path(sdata_path), path(parquets)

output:
tuple val(meta), path(sdata_path)
Expand All @@ -16,6 +16,11 @@ process RESOLVE_STARDIST {

script:
"""
mkdir -p ${sdata_path}/.sopa_cache/stardist_boundaries
for f in ${parquets}; do
mv "\$f" "${sdata_path}/.sopa_cache/stardist_boundaries/"
done

sopa resolve stardist ${sdata_path}

cat <<-END_VERSIONS > versions.yml
Expand Down
Loading
Loading