Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@

All notable changes to this project will be documented in this file.

## v0.1.2

- fix #34
- fix #33
- Update to pull test data from a new s3 bucket

## v0.1.1

Added more informative error messages if an FDR distribution cannot be made or there is not enough coverage.
Expand Down
82 changes: 42 additions & 40 deletions pixi.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,56 +4,58 @@ channels = ["conda-forge", "bioconda"]
description = "Add a short description here"
name = "FIRE"
platforms = ["osx-64", "linux-64"]
version = "0.1.1"
version = "0.1.2"

[tasks]
fmt = "ruff format . && taplo format pixi.toml && snakefmt workflow/"
test-data = { cmd = [
"cd",
"$INIT_CWD",
"&&",
"mkdir",
"-p",
"fire-test-data",
"&&",
"aws",
"s3",
"--no-sign-request",
"sync",
"s3://stergachis-public1/FIRE/test-data",
"fire-test-data/",
"cd",
"$INIT_CWD",
"&&",
"mkdir",
"-p",
"fire-test-data",
"&&",
"aws",
"s3",
"--no-sign-request",
"sync",
"--endpoint-url",
"https://s3.kopah.orci.washington.edu",
"s3://stergachis/public/FIRE/test-data",
"fire-test-data/",
] }
test = { cmd = [
"cd",
"$INIT_CWD/fire-test-data",
"&&",
"snakemake",
"-s",
"$PIXI_PROJECT_ROOT/workflow/Snakefile",
"--configfile",
"test.yaml",
"-k",
], depends_on = [
"test-data",
"cd",
"$INIT_CWD/fire-test-data",
"&&",
"snakemake",
"-s",
"$PIXI_PROJECT_ROOT/workflow/Snakefile",
"--configfile",
"test.yaml",
"-k",
], depends-on = [
"test-data",
], clean-env = true }

fire = { cmd = [
"cd",
"$INIT_CWD",
"&&",
"snakemake",
"-s",
"$PIXI_PROJECT_ROOT/workflow/Snakefile",
"cd",
"$INIT_CWD",
"&&",
"snakemake",
"-s",
"$PIXI_PROJECT_ROOT/workflow/Snakefile",
] }
slurm = { cmd = [
"cd",
"$INIT_CWD",
"&&",
"snakemake",
"-s",
"$PIXI_PROJECT_ROOT/workflow/Snakefile",
"--profile",
"$PIXI_PROJECT_ROOT/profiles/slurm-executor",
"cd",
"$INIT_CWD",
"&&",
"snakemake",
"-s",
"$PIXI_PROJECT_ROOT/workflow/Snakefile",
"--profile",
"$PIXI_PROJECT_ROOT/profiles/slurm-executor",
] }

[dependencies]
Expand All @@ -62,7 +64,7 @@ snakemake = "==8.21"
snakemake-executor-plugin-slurm = ">=0.11.2"
snakefmt = "*"
ruff = "*"
awscli = "2.22"
awscli = "2.22.*"
taplo = "*"

[pypi-dependencies]
7 changes: 7 additions & 0 deletions workflow/rules/apply-model.smk
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,13 @@ rule fire:
--output-fmt-option embed_ref=1 \
--input-fmt-option required_fields=0x1bff \
--write-index -o {output.cram}

# check if the cram file has zero reads
reads_in_header=$(samtools view {output.cram} | head | wc -l || true)
if [ $reads_in_header -eq 0 ]; then
printf "\nNo reads passed filters exiting...\n\nPlease review https://fiberseq.github.io/quick-start.html to make sure the input BAM has been correctly processed.\n\n"
exit 1
fi
"""


Expand Down
8 changes: 5 additions & 3 deletions workflow/rules/coverages.smk
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ rule unreliable_coverage_regions:
output:
bed="results/{sm}/additional-outputs-{v}/coverage/unreliable-coverage-regions.bed.gz",
bed_tbi="results/{sm}/additional-outputs-{v}/coverage/unreliable-coverage-regions.bed.gz.tbi",
tmp=temp("temp/{sm}/additional-outputs-{v}/unreliable-coverage-regions.bed"),
bb="results/{sm}/trackHub-{v}/bb/unreliable-coverage-regions.bb",
threads: 4
params:
Expand All @@ -171,10 +172,11 @@ rule unreliable_coverage_regions:
> {output.bed}

# bigbed
bgzip -cd {output.bed} -@ {threads} \
| bigtools bedtobigbed \
# for some reason bigtools gives a too many files open error when reading from stdin
bedtools merge -i {output.bed} > {output.tmp}
bigtools bedtobigbed \
-s start -a {params.bed3_as} \
- {input.fai} {output.bb}
{output.tmp} {input.fai} {output.bb}

# index
tabix -f -p bed {output.bed}
Expand Down