diff --git a/.github/workflows/conda-build.yml b/.github/workflows/conda-build.yml index 1509d1eb..70896595 100644 --- a/.github/workflows/conda-build.yml +++ b/.github/workflows/conda-build.yml @@ -9,21 +9,20 @@ on: branches: [master] jobs: - build-linux: - runs-on: ubuntu-latest - + build: + strategy: + fail-fast: false + matrix: + os: [macos-latest, windows-latest, ubuntu-latest] + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 - - name: Set pytorch-3dunet version - run: echo "RELEASE_VERSION=$(date +%Y%m%d%H%M%S)" >> $GITHUB_ENV - - name: Print pytorch-3dunet version - run: echo $RELEASE_VERSION - uses: conda-incubator/setup-miniconda@v2 with: auto-activate-base: true activate-environment: "" - channels: local,conda-forge,defaults - channel-priority: false + channel-priority: flexible + miniforge-variant: Miniforge3 - shell: bash -l {0} run: conda info --envs - name: Build pytorch-3dunet @@ -33,7 +32,7 @@ jobs: conda build -c pytorch -c nvidia -c conda-forge conda-recipe - name: Create pytorch3dunet env run: | - conda create -n pytorch3dunet -c pytorch -c nvidia -c conda-forge pytorch-3dunet pytest + conda create -n pytorch3dunet -c local -c pytorch -c nvidia -c conda-forge pytorch-3dunet pytest - name: Run pytest shell: bash -l {0} run: | @@ -41,7 +40,7 @@ jobs: pytest conda deactivate - name: Deploy on conda - if: ${{ startsWith( github.ref, 'refs/tags/') && success() }} + if: ${{ matrix.os == 'ubuntu-latest' && startsWith( github.ref, 'refs/tags/') && success() }} env: ANACONDA_SECRET: ${{ secrets.ANACONDA_TOKEN }} shell: bash -l {0} diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index 43605e71..3c903671 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -1,6 +1,8 @@ +{% set setup_py_data = load_setup_py_data() %} + package: name: pytorch-3dunet - version: {{ RELEASE_VERSION }} + version: {{ setup_py_data['version'] }} source: path: .. diff --git a/resources/3DUnet_multiclass/train_config.yaml b/resources/3DUnet_multiclass/train_config.yaml index 86a3efae..4d4ca0bc 100644 --- a/resources/3DUnet_multiclass/train_config.yaml +++ b/resources/3DUnet_multiclass/train_config.yaml @@ -129,7 +129,7 @@ loaders: # convert target volume to binary mask - name: ToTensor expand_dims: false - dtype: long + dtype: int64 # configuration of the val loader val: @@ -158,4 +158,4 @@ loaders: label: - name: ToTensor expand_dims: false - dtype: long + dtype: int64 diff --git a/tests/resources/config_train.yml b/tests/resources/config_train.yml index 7cd3a044..cf6f031d 100644 --- a/tests/resources/config_train.yml +++ b/tests/resources/config_train.yml @@ -54,8 +54,8 @@ loaders: - name: ToTensor # do not expand dims for cross-entropy loss expand_dims: false - # cross-entropy loss requires target to be of type 'long' - dtype: 'long' + # cross-entropy loss requires target to be of type 'int64' + dtype: 'int64' weight: - name: ToTensor expand_dims: false @@ -76,7 +76,7 @@ loaders: label: - name: ToTensor expand_dims: false - dtype: 'long' + dtype: 'int64' weight: - name: ToTensor expand_dims: false \ No newline at end of file diff --git a/tests/resources/config_train_2d.yml b/tests/resources/config_train_2d.yml index 80a41378..2ef38557 100644 --- a/tests/resources/config_train_2d.yml +++ b/tests/resources/config_train_2d.yml @@ -76,8 +76,8 @@ loaders: - name: ToTensor # do not expand dims for cross-entropy loss expand_dims: false - # cross-entropy loss requires target to be of type 'long' - dtype: 'long' + # cross-entropy loss requires target to be of type 'int64' + dtype: 'int64' weight: - name: ToTensor expand_dims: false @@ -99,7 +99,7 @@ loaders: label: - name: ToTensor expand_dims: false - dtype: 'long' + dtype: 'int64' weight: - name: ToTensor expand_dims: false \ No newline at end of file diff --git a/tests/test_dataset.py b/tests/test_dataset.py index e0107771..46e5d519 100644 --- a/tests/test_dataset.py +++ b/tests/test_dataset.py @@ -1,4 +1,5 @@ import os +from pathlib import Path from tempfile import NamedTemporaryFile import h5py @@ -75,20 +76,19 @@ def test_lazy_hdf5_dataset(self, transformer_config): assert np.all(visit_raw) assert np.all(visit_label) - def test_augmentation(self, transformer_config): + def test_augmentation(self, transformer_config, tmpdir): raw = np.random.rand(32, 96, 96) # assign raw to label's channels for ease of comparison label = np.stack([raw for _ in range(3)]) # create temporary h5 file - tmp_file = NamedTemporaryFile() - tmp_path = tmp_file.name - with h5py.File(tmp_path, 'w') as f: + tmp_file = tmpdir / "test.h5" + with h5py.File(tmp_file, 'w') as f: f.create_dataset('raw', data=raw) f.create_dataset('label', data=label) # set phase='train' in order to execute the train transformers phase = 'train' - dataset = StandardHDF5Dataset(tmp_path, phase=phase, + dataset = StandardHDF5Dataset(tmp_file, phase=phase, slice_builder_config=_slice_builder_conf((16, 64, 64), (8, 32, 32)), transformer_config=transformer_config[phase]['transformer']) @@ -120,19 +120,19 @@ def test_traverse_file_paths(self, tmpdir): assert expected_files == actual_files - def test_halo(self): + def test_halo(self, tmpdir: Path): halo_shape = (1, 2, 3) # create temporary h5 file raw = np.random.rand(32, 96, 96) - tmp_file = NamedTemporaryFile() - tmp_path = tmp_file.name - with h5py.File(tmp_path, 'w') as f: + tmp_file = tmpdir / "test.h5" + + with h5py.File(tmp_file, 'w') as f: f.create_dataset('raw', data=raw) # halo only implemented with test phase phase = 'test' - dataset = StandardHDF5Dataset(tmp_path, phase=phase, + dataset = StandardHDF5Dataset(tmp_file, phase=phase, slice_builder_config=_slice_builder_conf((16, 64, 64), (8, 32, 32), halo_shape), transformer_config=_transformer_test_conf()) data_loader = DataLoader(dataset, batch_size=1, num_workers=4, shuffle=True, diff --git a/tests/test_transforms.py b/tests/test_transforms.py index cc182705..3d977ca0 100644 --- a/tests/test_transforms.py +++ b/tests/test_transforms.py @@ -5,7 +5,7 @@ class TestTransforms: - config = {'dtype': 'long'} + config = {'dtype': 'int64'} def test_random_label_to_boundary(self): size = 20 @@ -62,7 +62,7 @@ def test_relabel(self): def test_BaseTransformer(self): config = { 'raw': [{'name': 'Standardize'}, {'name': 'ToTensor', 'expand_dims': True}], - 'label': [{'name': 'ToTensor', 'expand_dims': False, 'dtype': 'long'}], + 'label': [{'name': 'ToTensor', 'expand_dims': False, 'dtype': 'int64'}], 'weight': [{'name': 'ToTensor', 'expand_dims': False}] } base_config = {'mean': 0, 'std': 1} @@ -73,7 +73,7 @@ def test_BaseTransformer(self): assert raw_transforms[1].expand_dims label_transforms = transformer.label_transform().transforms assert not label_transforms[0].expand_dims - assert label_transforms[0].dtype == 'long' + assert label_transforms[0].dtype == 'int64' weight_transforms = transformer.weight_transform().transforms assert not weight_transforms[0].expand_dims @@ -89,7 +89,7 @@ def test_StandardTransformer(self): 'label': [ {'name': 'RandomFlip'}, {'name': 'RandomRotate90'}, - {'name': 'ToTensor', 'expand_dims': False, 'dtype': 'long'} + {'name': 'ToTensor', 'expand_dims': False, 'dtype': 'int64'} ] } base_config = {'mean': 0, 'std': 1} @@ -116,7 +116,7 @@ def test_AnisotropicRotationTransformer(self): {'name': 'RandomFlip'}, {'name': 'RandomRotate90'}, {'name': 'RandomRotate', 'angle_spectrum': 17, 'axes': [[2, 1]]}, - {'name': 'ToTensor', 'expand_dims': False, 'dtype': 'long'} + {'name': 'ToTensor', 'expand_dims': False, 'dtype': 'int64'} ] } base_config = {'mean': 0, 'std': 1} @@ -145,7 +145,7 @@ def test_LabelToBoundaryTransformer(self): {'name': 'RandomRotate90'}, {'name': 'RandomRotate', 'angle_spectrum': 17, 'axes': [[2, 1]], 'mode': 'reflect'}, {'name': 'LabelToAffinities', 'offsets': [2, 4, 6, 8]}, - {'name': 'ToTensor', 'expand_dims': False, 'dtype': 'long'} + {'name': 'ToTensor', 'expand_dims': False, 'dtype': 'int64'} ] } base_config = {'mean': 0, 'std': 1} @@ -179,7 +179,7 @@ def test_RandomLabelToBoundaryTransformer(self): {'name': 'RandomRotate90'}, {'name': 'RandomRotate', 'angle_spectrum': 17, 'axes': [[2, 1]], 'mode': 'reflect'}, {'name': 'RandomLabelToAffinities', 'max_offset': 4}, - {'name': 'ToTensor', 'expand_dims': False, 'dtype': 'long'} + {'name': 'ToTensor', 'expand_dims': False, 'dtype': 'int64'} ] } base_config = {'mean': 0, 'std': 1}