Skip to content

Commit

Permalink
Merge pull request #209 from MICA-MNI/development
Browse files Browse the repository at this point in the history
Version 0.2.3
  • Loading branch information
ReinderVosDeWael authored Aug 2, 2021
2 parents b414527 + 7cc9eac commit bedf96f
Show file tree
Hide file tree
Showing 26 changed files with 592 additions and 132 deletions.
4 changes: 4 additions & 0 deletions .github/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,14 @@ Documentation:
- docs/**/*
- ./*.rst
- ./*.md
- LICENSE
- .readthedocs.yml

Python:
- brainstat/*
- brainstat/**/*
- setup.py
- requirements.txt

MATLAB:
- brainstat_matlab/*
Expand Down
7 changes: 2 additions & 5 deletions .github/workflows/python_unittests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ on:
push:
branches:
- master
- development
pull_request:

jobs:
Expand All @@ -13,7 +12,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: [3.6, 3.7, 3.8, 3.9]
python-version: [3.7, 3.8, 3.9]
os: [ubuntu-latest, windows-latest, macos-latest]

runs-on: ${{ matrix.os }}
Expand All @@ -29,9 +28,7 @@ jobs:
run: |
[[ -z $(git remote show origin | grep "Fetch URL:" | grep [email protected]:MICA-MNI/BrainStat.git) ]] && git config remote.upstream.fetch refs/heads/*:refs/remotes/upstream/* || git config remote.origin.fetch refs/heads/*:refs/origin/upstream/*
git fetch origin test-data-2.0
python -m pip install --upgrade pip
pip install pytest mypy gitpython
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
python -m pip install -e .[dev]
- name: Test with pytest
shell: bash
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tagged_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
- name: Modify MATLAB version and path
run: |
current_release=$(cat setup.py | grep "version=" | grep -Eo "[0-9]\.[0-9]\.[0-9]")
current_release=$(cat brainstat/__init__.py | grep "__version__ =" | grep -Eo "[0-9]\.[0-9]\.[0-9]")
brainstat_dir=$(pwd)
sed -i "s:<param\.version>.*</param\.version>:<param\.version>${current_release}</param\.version>:" .github/matlab_toolbox/BrainStat.prj
sed -i "s:BRAINSTAT_DIR/:${brainstat_dir}/:g" .github/matlab_toolbox/BrainStat.prj
Expand Down
2 changes: 1 addition & 1 deletion CONTRIBUTING.rst
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
Contributing to BrainStat
====================
=========================

.. start-marker-cont
Expand Down
377 changes: 374 additions & 3 deletions LICENSE

Large diffs are not rendered by default.

11 changes: 11 additions & 0 deletions brainstat/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,12 @@
"""Neuroimaging statistics toolbox."""
import sys
import warnings

__version__ = "0.2.3"

if sys.version_info[1] == 6:
warnings.simplefilter("always", DeprecationWarning)
warnings.warn(
"Support for Python3.6 has been dropped. Future versions may not install on Python3.6.",
DeprecationWarning,
)
6 changes: 3 additions & 3 deletions brainstat/context/genetics.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"""Genetic decoding using abagen."""
from typing import List, Optional, Tuple, Union
from typing import List, Optional, Sequence, Union

import numpy as np
import pandas as pd
from abagen import check_atlas, get_expression_data


def surface_genetic_expression(
labels: Union[List[str], np.ndarray],
surfaces: Union[List[str], Tuple[str, ...]] = None,
labels: Union[Sequence[str], np.ndarray],
surfaces: Optional[Union[str, Sequence[str]]] = None,
space: Optional[str] = None,
*,
atlas_info: str = None,
Expand Down
66 changes: 39 additions & 27 deletions brainstat/context/meta_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,38 +3,44 @@
import os
import tempfile
from pathlib import Path
from typing import Optional, Sequence, Tuple, Union

import nimare
import numpy as np
import pandas as pd
from brainspace.vtk_interface.wrappers.data_object import BSPolyData
from neurosynth.base.dataset import Dataset, download
from nibabel.nifti1 import NiftiImage
from nilearn.datasets import load_mni152_brain_mask
from nilearn.input_data import NiftiMasker

from .utils import multi_surface_to_volume


def surface_decode_nimare(
pial,
white,
stat_labels,
mask_labels,
interpolation="linear",
data_dir=None,
feature_group=None,
features=None,
):
pial: Union[str, BSPolyData, Sequence[Union[str, BSPolyData]]],
white: Union[str, BSPolyData, Sequence[Union[str, BSPolyData]]],
stat_labels: Union[str, np.ndarray, Sequence[Union[str, np.ndarray]]],
mask_labels: Union[str, np.ndarray, Sequence[Union[str, np.ndarray]]],
interpolation: str = "linear",
data_dir: str = None,
feature_group: str = None,
features: Sequence[str] = None,
) -> pd.DataFrame:
"""Meta-analytic decoding of surface maps using NeuroSynth or Brainmap.
Parameters
----------
pial : str, BSPolyData, list
pial : str, BSPolyData, sequence of str or BSPolyData
Path of a pial surface file, BSPolyData of a pial surface or a list
containing multiple of the aforementioned.
white : str, BSPolyData, list
white : str, BSPolyData, sequence of str or BSPolyData
Path of a white matter surface file, BSPolyData of a pial surface or a
list containing multiple of the aforementioned.
stat_labels : str, numpy.ndarray, list
stat_labels : str, numpy.ndarray, sequence of str or numpy.ndarray
Path to a label file for the surfaces, numpy array containing the
labels, or a list containing multiple of the aforementioned.
mask_labels : str, numpy.ndarray, list
mask_labels : str, numpy.ndarray, sequence of str of or numpy.ndarray
Path to a mask file for the surfaces, numpy array containing the
mask, or a list containing multiple of the aforementioned. If None
all vertices are included in the mask. Defaults to None.
Expand Down Expand Up @@ -66,20 +72,20 @@ def surface_decode_nimare(
mask_image = tempfile.NamedTemporaryFile(suffix=".nii.gz")

multi_surface_to_volume(
pial,
white,
mni152,
stat_labels,
stat_image.name,
pial=pial,
white=white,
volume_template=mni152,
output_file=stat_image.name,
labels=stat_labels,
interpolation=interpolation,
)
multi_surface_to_volume(
pial,
white,
mni152,
mask_labels,
mask_image.name,
interpolation="nearest",
pial=pial,
white=white,
volume_template=mni152,
output_file=mask_image.name,
labels=mask_labels,
interpolation=interpolation,
)

dataset = fetch_nimare_dataset(data_dir, mask=mask_image.name, keep_neurosynth=True)
Expand All @@ -96,7 +102,11 @@ def surface_decode_nimare(
return decoder.transform(stat_image.name)


def fetch_nimare_dataset(data_dir, mask=None, keep_neurosynth=True):
def fetch_nimare_dataset(
data_dir: str,
mask: Optional[Union[str, NiftiImage, NiftiMasker]] = None,
keep_neurosynth: bool = True,
) -> str:
"""Downloads the nimare dataset and fetches its path.
Parameters
Expand Down Expand Up @@ -126,7 +136,7 @@ def fetch_nimare_dataset(data_dir, mask=None, keep_neurosynth=True):
D = tempfile.TemporaryDirectory()
ns_dir = D.name

ns_data_file, ns_feature_file = fetch_neurosynth_dataset(ns_dir, return_pkl=False)
ns_data_file, ns_feature_file = fetch_neurosynth_dataset(ns_dir, return_pkl=False) # type: ignore

ns_dict = nimare.io.convert_neurosynth_to_dict(
ns_data_file, annotations_file=ns_feature_file
Expand All @@ -138,7 +148,9 @@ def fetch_nimare_dataset(data_dir, mask=None, keep_neurosynth=True):
return dset


def fetch_neurosynth_dataset(data_dir, return_pkl=True):
def fetch_neurosynth_dataset(
data_dir: str, return_pkl: bool = True
) -> Union[Tuple[str, str], str]:
"""Downloads the Neurosynth dataset
Parameters
Expand Down
20 changes: 10 additions & 10 deletions brainstat/context/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import os
import shutil
import tempfile
from typing import List, Tuple, Union
from typing import List, Sequence, Tuple, Union

import nibabel as nib
import numpy as np
Expand All @@ -26,7 +26,7 @@ def multi_surface_to_volume(
white: valid_surfaces,
volume_template: Union[str, nib.nifti1.Nifti1Image],
output_file: str,
labels: Union[str, np.ndarray, List[Union[np.ndarray, str]]],
labels: Union[str, np.ndarray, Sequence[Union[np.ndarray, str]]],
interpolation: str = "nearest",
) -> None:
"""Interpolates multiple surfaces to the volume.
Expand All @@ -39,14 +39,14 @@ def multi_surface_to_volume(
white : str, BSPolyData, list, tuple
Path of a white matter surface file, BSPolyData of a pial surface or a
list containing multiple of the aforementioned.
labels : str, numpy.ndarray, list, tuple
Path to a label file for the surfaces, numpy array containing the
labels, or a list containing multiple of the aforementioned.
output_file: str
Path to the output file, must end in .nii or .nii.gz.
volume_template : str, nibabel.nifti1.Nifti1Image
Path to a nifti file to use as a template for the surface to volume
procedure, or a loaded NIfTI image.
output_file: str
Path to the output file, must end in .nii or .nii.gz.
labels : str, numpy.ndarray, list, tuple
Path to a label file for the surfaces, numpy array containing the
labels, or a list containing multiple of the aforementioned.
interpolation : str
Either 'nearest' for nearest neighbor interpolation, or 'linear'
for trilinear interpolation, defaults to 'nearest'.
Expand All @@ -60,14 +60,14 @@ def multi_surface_to_volume(

# Deal with variety of ways to provide input.
if type(pial) is not type(white):
ValueError("Pial and white must be of the same type.")
raise ValueError("Pial and white must be of the same type.")

pial_list = _input_to_list(pial)
white_list = _input_to_list(white)
labels_list = _input_to_list(labels)

if len(pial_list) is not len(white):
ValueError("The same number of pial and white surfces must be provided.")
raise ValueError("The same number of pial and white surfces must be provided.")

for i in range(len(pial_list)):
if not isinstance(pial_list[i], BSPolyData):
Expand Down Expand Up @@ -154,7 +154,7 @@ def load_mesh_labels(label_file: str, as_int: bool = True) -> np.ndarray:
elif label_file.endswith(".csv"):
labels = np.loadtxt(label_file)
else:
ValueError("Unrecognized label file type.")
raise ValueError("Unrecognized label file type.")

if as_int:
labels = np.round(labels).astype(int)
Expand Down
8 changes: 4 additions & 4 deletions brainstat/datasets/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@


def fetch_parcellation(
template: str,
atlas: str,
n_regions: int,
template: str = "fsaverage5",
join: bool = True,
seven_networks: bool = True,
data_dir: Optional[str] = None,
Expand All @@ -23,15 +23,15 @@ def fetch_parcellation(
Parameters
----------
template : str,
The surface template. Valid values are "fsaverage", "fsaverage5",
"fsaverage6", "fslr32k", by default "fsaverage5".
atlas : str
Name of the atlas. Valid names are "schaefer", "cammoun".
n_regions : int
Number of regions of the requested atlas. Valid values for the "schaefer " atlas are
100, 200, 300, 400, 500, 600, 800, 1000. Valid values for the cammoun atlas are 33,
60, 125, 250, 500.
template : str, optional
The surface template. Valid values are "fsaverage", "fsaverage5",
"fsaverage6", "fslr32k", by default "fsaverage5".
join : bool, optional
If true, returns parcellation as a single array, if false, returns an
array per hemisphere, by default True.
Expand Down
Loading

0 comments on commit bedf96f

Please sign in to comment.