Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update working dir to everwatch-workflow #7

Merged
merged 2 commits into from
May 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
*tokens.py
.snakemake/**
logs/**
lightning_logs
*.DS_Store*
*.idea*
*.pytest_cache*
*__pycache__*
*_cache*
*tokens.py
.snakemake/**
.vscode/launch.json
App/Zooniverse/*
lightning_logs
logs/**
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,5 +69,5 @@ The output shapefiles for (4) contain the predicted nest polygon, site, date and

## Logs

The logs are located in `/blue/ewhite/everglades/EvergladesTools/logs`
Checkout the current cronjob in `/blue/ewhite/everglades/EvergladesTools/everglades_workflow.sh`
The logs are located in `/blue/ewhite/everglades/everwatch-workflow/logs`
Checkout the current cronjob in `/blue/ewhite/everglades/everwatch-workflow/everglades_workflow.sh`
2 changes: 1 addition & 1 deletion SLURM/mapbox_upload.sbatch
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@

conda activate DeepTreeAttention

cd /home/b.weinstein/EvergladesTools/Zooniverse
cd /home/b.weinstein/everwatch-workflow
python upload_mapbox.py
26 changes: 13 additions & 13 deletions Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import glob
import os
import tools

configfile: "/blue/ewhite/everglades/EvergladesTools/snakemake_config.yml"
configfile: "/blue/ewhite/everglades/everwatch-workflow/snakemake_config.yml"

# Check if the test environment variable exists
test_env_name = "TEST_ENV"
Expand Down Expand Up @@ -32,8 +32,8 @@ def flights_in_year_site(wildcards):

rule all:
input:
f"{working_dir}/EvergladesTools/App/Zooniverse/data/PredictedBirds.zip",
f"{working_dir}/EvergladesTools/App/Zooniverse/data/nest_detections_processed.zip",
f"{working_dir}/everwatch-workflow/App/Zooniverse/data/PredictedBirds.zip",
f"{working_dir}/everwatch-workflow/App/Zooniverse/data/nest_detections_processed.zip",
expand(f"{working_dir}/predictions/{{year}}/{{site}}/{{flight}}_projected.shp",
zip, site=SITES, year=YEARS, flight=FLIGHTS),
expand(f"{working_dir}/processed_nests/{{year}}/{{site}}/{{site}}_{{year}}_processed_nests.shp",
Expand All @@ -49,7 +49,7 @@ rule project_mosaics:
projected=f"{working_dir}/projected_mosaics/{{year}}/{{site}}/{{flight}}_projected.tif",
webmercator=f"{working_dir}/projected_mosaics/webmercator/{{year}}/{{site}}/{{flight}}_projected.tif"
conda:
"EvergladesTools"
"everwatch"
shell:
"python project_orthos.py {input.orthomosaic}"

Expand All @@ -59,7 +59,7 @@ rule predict_birds:
output:
f"{working_dir}/predictions/{{year}}/{{site}}/{{flight}}_projected.shp"
conda:
"EvergladesTools"
"everwatch"
resources:
gpu=1
shell:
Expand All @@ -71,7 +71,7 @@ rule combine_birds_site_year:
output:
f"{working_dir}/predictions/{{year}}/{{site}}/{{site}}_{{year}}_combined.shp"
conda:
"EvergladesTools"
"everwatch"
shell:
"python combine_birds_site_year.py {input}"

Expand All @@ -80,9 +80,9 @@ rule combine_predicted_birds:
expand(f"{working_dir}/predictions/{{year}}/{{site}}/{{site}}_{{year}}_combined.shp",
zip, site=SITES_SY, year=YEARS_SY)
output:
f"{working_dir}/EvergladesTools/App/Zooniverse/data/PredictedBirds.zip"
f"{working_dir}/everwatch-workflow/App/Zooniverse/data/PredictedBirds.zip"
conda:
"EvergladesTools"
"everwatch"
shell:
"python combine_bird_predictions.py {input}"

Expand All @@ -92,7 +92,7 @@ rule detect_nests:
output:
f"{working_dir}/detected_nests/{{year}}/{{site}}/{{site}}_{{year}}_detected_nests.shp"
conda:
"EvergladesTools"
"everwatch"
shell:
"python nest_detection.py {input}"

Expand All @@ -102,7 +102,7 @@ rule process_nests:
output:
f"{working_dir}/processed_nests/{{year}}/{{site}}/{{site}}_{{year}}_processed_nests.shp"
conda:
"EvergladesTools"
"everwatch"
shell:
"python process_nests.py {input}"

Expand All @@ -111,9 +111,9 @@ rule combine_nests:
expand(f"{working_dir}/processed_nests/{{year}}/{{site}}/{{site}}_{{year}}_processed_nests.shp",
zip, site=SITES_SY, year=YEARS_SY)
output:
f"{working_dir}/EvergladesTools/App/Zooniverse/data/nest_detections_processed.zip"
f"{working_dir}/everwatch-workflow/App/Zooniverse/data/nest_detections_processed.zip"
conda:
"EvergladesTools"
"everwatch"
shell:
"python combine_nests.py {input}"

Expand All @@ -133,6 +133,6 @@ rule upload_mapbox:
output:
touch(f"{working_dir}/mapbox/last_uploaded/{{year}}/{{site}}/{{flight}}.mbtiles")
conda:
"EvergladesTools"
"everwatch"
shell:
"python upload_mapbox.py {input}"
15 changes: 14 additions & 1 deletion combine_bird_predictions.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,29 @@ def combine(paths):
working_dir = tools.get_working_dir()
predictions_path = f"{working_dir}/predictions/"
output_path = f"{working_dir}/EvergladesTools/App/Zooniverse/data"
output_zip = os.path.join(output_path, "PredictedBirds.zip")

predictions = sys.argv[1:]
# write output to zooniverse app
df = combine(predictions)
df.to_file(os.path.join(output_path, "PredictedBirds.shp"))

# Zip the shapefile for storage efficiency
with ZipFile(os.path.join(output_path, "PredictedBirds.zip"), 'w', ZIP_DEFLATED) as zip:
with ZipFile(output_zip, 'w', ZIP_DEFLATED) as zip:
for ext in ['cpg', 'dbf', 'prj', 'shp', 'shx']:
focal_file = os.path.join(output_path, f"PredictedBirds.{ext}")
file_name = os.path.basename(focal_file)
zip.write(focal_file, arcname=file_name)
os.remove(focal_file)

# Copy PredictedBirds.zip to everglades-forecast-web repo
dest_path = "/blue/ewhite/everglades/everglades-forecast-web/data"
if not os.path.exists(dest_path):
os.makedirs(dest_path)
dest_file = os.path.join(dest_path, "PredictedBirds.zip")

if os.path.exists(output_zip):
shutil.copy(output_zip, dest_file)
print(f"{output_zip} copied to {dest_file}.")
else:
print("{output_zip} file does not exist.")
2 changes: 1 addition & 1 deletion combine_nests.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def combine(paths):
if __name__ == "__main__":
working_dir = tools.get_working_dir()
nests_path = f"{working_dir}/processed_nests/"
output_path = f"{working_dir}/EvergladesTools/App/Zooniverse/data/"
output_path = f"{working_dir}/everwatch-workflow/App/Zooniverse/data/"

nest_files = sys.argv[1:]
# write output to zooniverse app
Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: EvergladesTools
name: everwatch
channels:
- defaults
- conda-forge
Expand Down
12 changes: 6 additions & 6 deletions everglades_dryrun_workflow.sh
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
#!/bin/bash
#SBATCH --job-name=everglades_workflow
#SBATCH [email protected]
#SBATCH --job-name=everwatch_workflow_dryrun
#SBATCH [email protected]
#SBATCH --mail-type=FAIL
#SBATCH --gpus=a100:1
#SBATCH --cpus-per-task=3
#SBATCH --mem=200gb
#SBATCH --time=01:30:00
#SBATCH --partition=gpu
#SBATCH --output=/blue/ewhite/everglades/EvergladesTools/logs/everglades_dryrun_workflow.out
#SBATCH --error=/blue/ewhite/everglades/EvergladesTools/logs/everglades_dryrun_workflow.err
#SBATCH --output=/blue/ewhite/everglades/everwatch-workflow/logs/everglades_dryrun_workflow.out
#SBATCH --error=/blue/ewhite/everglades/everwatch-workflow/logs/everglades_dryrun_workflow.err

echo "INFO: [$(date "+%Y-%m-%d %H:%M:%S")] Starting everglades workflow on $(hostname) in $(pwd)"

echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] Loading required modules"
source /etc/profile.d/modules.sh

ml conda
conda activate EvergladesTools
conda activate everwatch
export TEST_ENV=True

cd /blue/ewhite/everglades/EvergladesTools/Zooniverse
cd /blue/ewhite/everglades/everwatch-workflow/

snakemake --unlock
echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] Starting Snakemake pipeline"
Expand Down
8 changes: 4 additions & 4 deletions everglades_workflow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,18 @@
#SBATCH --mem=600gb
#SBATCH --time=80:00:00
#SBATCH --partition=gpu
#SBATCH --output=/blue/ewhite/everglades/EvergladesTools/logs/everglades_workflow.out
#SBATCH --error=/blue/ewhite/everglades/EvergladesTools/logs/everglades_workflow.err
#SBATCH --output=/blue/ewhite/everglades/everwatch-workflow/logs/everglades_workflow.out
#SBATCH --error=/blue/ewhite/everglades/everwatch-workflow/logs/everglades_workflow.err

echo "INFO: [$(date "+%Y-%m-%d %H:%M:%S")] Starting everglades workflow on $(hostname) in $(pwd)"

echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] Loading required modules"
source /etc/profile.d/modules.sh

ml conda
conda activate EvergladesTools
conda activate everwatch

cd /blue/ewhite/everglades/EvergladesTools/Zooniverse
cd /blue/ewhite/everglades/everwatch-workflow/

snakemake --unlock
echo "INFO [$(date "+%Y-%m-%d %H:%M:%S")] Starting Snakemake pipeline"
Expand Down
8 changes: 4 additions & 4 deletions nest_detection.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@ sbatch <<EOT
#SBATCH --cpus-per-task=8
#SBATCH --mem=62GB
#SBATCH --time=72:00:00 #Time limit hrs:min:sec
#SBATCH --output=/blue/ewhite/everglades/EvergladesTools/Zooniverse/logs/nest_detector_%j.out # Standard output and error log
#SBATCH --error=/blue/ewhite/everglades/EvergladesTools/Zooniverse/logs/nest_detector_%j.err
#SBATCH --output=/blue/ewhite/everglades/everwatch-workflow/logs/nest_detector_%j.out # Standard output and error log
#SBATCH --error=/blue/ewhite/everglades/everwatch-workflow/logs/nest_detector_%j.err
#SBATCH --partition=hpg-default
ulimit -c 0
cd /blue/ewhite/everglades/EvergladesTools/Zooniverse
source activate EvergladesTools
cd /blue/ewhite/everglades/everwatch-workflow
source activate everwatch
python nest_detection.py
EOT
8 changes: 4 additions & 4 deletions predict.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ sbatch <<EOT
#SBATCH --cpus-per-task=8
#SBATCH --mem=62GB
#SBATCH --time=96:00:00 #Time limit hrs:min:sec
#SBATCH --output=/blue/ewhite/everglades/EvergladesTools/Zooniverse/logs/bird_detector_%j.out # Standard output and error log
#SBATCH --error=/blue/ewhite/everglades/EvergladesTools/Zooniverse/logs/bird_detector_%j.err
#SBATCH --output=/blue/ewhite/everglades/everwatch-workflow/logs/bird_detector_%j.out # Standard output and error log
#SBATCH --error=/blue/ewhite/everglades/everwatch-workflow/logs/bird_detector_%j.err
#SBATCH --partition=gpu
#SBATCH --gpus=1
ulimit -c 0
cd /blue/ewhite/everglades/EvergladesTools/Zooniverse
source activate EvergladesTools
cd /blue/ewhite/everglades/everwatch-workflow
source activate everwatch
python predict.py
EOT
Loading