Skip to content

Commit

Permalink
Merge pull request #231 from MICA-MNI/development
Browse files Browse the repository at this point in the history
Version 0.2.7
  • Loading branch information
ReinderVosDeWael authored Sep 30, 2021
2 parents be5b896 + 2f5936a commit 111ad1c
Show file tree
Hide file tree
Showing 44 changed files with 2,955 additions and 215 deletions.
30 changes: 30 additions & 0 deletions LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -402,3 +402,33 @@ whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.

-------------------
|spider_plot_class|
-------------------

Copyright (c) 2020-2021, Moses
All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:

* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.

2 changes: 1 addition & 1 deletion brainstat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import sys
import warnings

__version__ = "0.2.6"
__version__ = "0.2.7"


if sys.version_info[1] == 6:
Expand Down
8 changes: 7 additions & 1 deletion brainstat/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"BRAINSTAT_DATA_DIR": BRAINSTAT_DATA_DIR,
"ABIDE_DATA_DIR": BRAINSTAT_DATA_DIR / "abide_data",
"BIGBRAIN_DATA_DIR": BRAINSTAT_DATA_DIR / "bigbrain_data",
"MICS_DATA_DIR": BRAINSTAT_DATA_DIR / "mics_data",
"GRADIENT_DATA_DIR": BRAINSTAT_DATA_DIR / "gradient_data",
"NEUROSYNTH_DATA_DIR": BRAINSTAT_DATA_DIR / "neurosynth_data",
"PARCELLATION_DATA_DIR": BRAINSTAT_DATA_DIR / "parcellation_data",
"SURFACE_DATA_DIR": BRAINSTAT_DATA_DIR / "surface_data",
Expand All @@ -42,6 +42,11 @@ def generate_data_fetcher_json() -> None:
"url": "https://box.bic.mni.mcgill.ca/s/6zKHcg9xXu5inPR/download",
},
},
"gradients": {
"margulies2016": {
"url": "https://box.bic.mni.mcgill.ca/s/LWFaQlOxUWmRlc0/download",
}
},
"neurosynth_precomputed": {
"url": "https://box.bic.mni.mcgill.ca/s/GvislmLffbCIZoI/download",
"n_files": 3228,
Expand All @@ -67,6 +72,7 @@ def generate_data_fetcher_json() -> None:
),
},
},
"yeo": {"url": "https://box.bic.mni.mcgill.ca/s/vcSXEk1wx0jN86N/download"},
},
"masks": {
"civet41k": {
Expand Down
29 changes: 18 additions & 11 deletions brainstat/context/genetics.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
"""Genetic decoding using abagen."""
import tempfile
from pathlib import Path
from typing import List, Optional, Sequence, Union
from typing import Optional, Sequence, Union

import nibabel as nib
import numpy as np
import pandas as pd
import nibabel as nib
from abagen import check_atlas, get_expression_data
from brainspace.mesh.mesh_io import read_surface, write_surface
from sklearn.model_selection import ParameterGrid

from brainstat._utils import data_directories
from brainstat._utils import data_directories, logger
from brainstat.datasets.base import (
_fetch_template_surface_files,
_valid_parcellations,
Expand Down Expand Up @@ -88,20 +90,25 @@ def surface_genetic_expression(
elif surfaces is None:
surfaces = []

temp_surfaces: List[Path] = []
for i, surface in enumerate(surfaces):
surfaces_gii = []
for surface in surfaces:
if not isinstance(surface, str) and not isinstance(surface, Path):
temp_surfaces.append(tempfile.NamedTemporaryFile(suffix=".gii"))
write_surface(surface, temp_surfaces[i].name, otype="gii")

if temp_surfaces:
surfaces = [x.name for x in temp_surfaces]
# Rather roundabout deletion of the temporary file for Windows compatibility.
try:
with tempfile.NamedTemporaryFile(suffix=".gii", delete=False) as f:
name = f.name
write_surface(surface, name, otype="gii")
surfaces_gii.append(nib.load(name))
finally:
Path(name).unlink()
else:
surfaces_gii.append(nib.load(surface))

# Use abagen to grab expression data.
print(
logger.info(
"If you use BrainStat's genetics functionality, please cite abagen (https://abagen.readthedocs.io/en/stable/citing.html)."
)
atlas = check_atlas(labels, geometry=surfaces, space=space)
atlas = check_atlas(labels, geometry=surfaces_gii, space=space)
expression = get_expression_data(
atlas,
atlas_info=atlas_info,
Expand Down
48 changes: 29 additions & 19 deletions brainstat/context/meta_analysis.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
""" Meta-analytic decoding based on NiMARE """
import re
import tempfile
import urllib
import zipfile
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Generator, Optional, Sequence, Union

import nibabel as nib
Expand Down Expand Up @@ -68,19 +68,24 @@ def surface_decoder(

mni152 = load_mni152_brain_mask()

stat_image = tempfile.NamedTemporaryFile(suffix=".nii.gz")
multi_surface_to_volume(
pial=pial,
white=white,
volume_template=mni152,
output_file=stat_image.name,
labels=stat_labels,
interpolation=interpolation,
)
with NamedTemporaryFile(suffix=".nii.gz", delete=False) as f:
name = f.name
try:
multi_surface_to_volume(
pial=pial,
white=white,
volume_template=mni152,
output_file=name,
labels=stat_labels,
interpolation=interpolation,
)

stat_volume = nib.load(stat_image.name)
mask = (stat_volume.get_fdata() != 0) & (mni152.get_fdata() != 0)
stat_vector = stat_volume.get_fdata()[mask]
stat_volume = nib.load(name)

mask = (stat_volume.get_fdata() != 0) & (mni152.get_fdata() != 0)
stat_vector = stat_volume.get_fdata()[mask]
finally:
Path(name).unlink()

feature_names = []
correlations = np.zeros(len(feature_files))
Expand Down Expand Up @@ -143,11 +148,16 @@ def _fetch_precomputed_neurosynth(data_dir: Path) -> Generator[Path, None, None]
logger.info("Downloading Neurosynth data files.")
response = urllib.request.urlopen(url)

zip_file = tempfile.NamedTemporaryFile(prefix=str(data_dir), suffix=".zip")
with open(zip_file.name, "wb") as fw:
fw.write(response.read())

with zipfile.ZipFile(zip_file.name, "r") as fr:
fr.extractall(data_dir)
# Open, close, and reopen file to deal with Windows permission issues.
with NamedTemporaryFile(prefix=str(data_dir), suffix=".zip", delete=False) as f:
name = f.name
try:
with open(name, "wb") as fw:
fw.write(response.read())

with zipfile.ZipFile(name, "r") as fr:
fr.extractall(data_dir)
finally:
(Path(name)).unlink()

return data_dir.glob("Neurosynth_TFIDF__*z_desc-consistency.nii.gz")
138 changes: 138 additions & 0 deletions brainstat/context/resting.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
from pathlib import Path
from typing import Callable, Optional, Union

import numpy as np
from brainspace.utils.parcellation import reduce_by_labels

from brainstat._typing import ArrayLike
from brainstat.datasets import fetch_gradients, fetch_parcellation


def yeo_networks_associations(
data: ArrayLike,
template: str = "fsaverage5",
seven_networks: bool = True,
data_dir: Optional[Union[str, Path]] = None,
reduction_operation: Union[str, Callable] = np.nanmean,
) -> np.ndarray:
"""Computes association
Parameters
----------
data : ArrayLike
Data to be summarized in the Yeo networks in a sample-by-feature format.
template : str, optional
Surface template. Valid values are "fsaverage5", "fsaverage", and
"fslr32k", by default "fsaverage5".
seven_networks : bool, optional
If true, uses the 7 network parcellation, otherwise uses the 17 network
parcellation, by default True.
data_dir : str, Path, optional
Data directory to store the Yeo network files, by default $HOME_DIR/brainstat_data/parcellation_data.
reduction_operation : str, callable, optional
How to summarize data. If str, options are: {‘min’, ‘max’, ‘sum’,
‘mean’, ‘median’, ‘mode’, ‘average’}. If callable, it should receive a
1D array of values, array of weights (or None) and return a scalar
value. Default is ‘mean’.
Returns
-------
np.ndarray
Summary statistic in the yeo networks.
"""
n_regions = 7 if seven_networks else 17
yeo_networks = fetch_parcellation(
template=template,
atlas="yeo",
n_regions=n_regions,
join=True,
data_dir=data_dir,
)

if np.array(data).ndim == 1:
data_2d = np.array(data)[:, None]
else:
data_2d = np.array(data)

n_features = data_2d.shape[1]

yeo_mean = np.zeros((n_regions + 1, n_features))
for i in range(n_features):
yeo_mean[:, i] = reduce_by_labels(
data_2d[:, i], yeo_networks, red_op=reduction_operation
)
return yeo_mean[1:, :]


def gradients_corr(
data: ArrayLike,
name: str = "margulies2016",
template: str = "fsaverage5",
data_dir: Optional[Union[str, Path]] = None,
overwrite: bool = False,
) -> np.ndarray:
"""Comptues the correlation of the input data with the Margulies gradients.
Parameters
----------
data : ArrayLike
The data to be compared to the Margulies gradients. Data must be in the
shape of vertices-by-features.
name : str, optional
Name of the gradients. Valid values are "margulies2016", defaults to
"margulies2016".
template : str, optional
Name of the template surface. Valid values are "fsaverage5",
"fsaverage7", "fslr32k", defaults to "fsaverage5".
data_dir : str, Path, optional
Path to the directory to store the Margulies gradient data files, by
default $HOME_DIR/brainstat_data/functional_data.
overwrite : bool, optional
If true, overwrites existing files, by default False.
Returns
-------
np.ndarray
Correlations between the input data and the Margulies gradients.
"""
gradients = fetch_gradients(
name=name, template=template, data_dir=data_dir, overwrite=overwrite
)
return _columnwise_correlate(data, gradients)


def _columnwise_correlate(x: ArrayLike, y: Optional[ArrayLike] = None) -> np.ndarray:
"""Implements MATLAB's corr function for Pearson correlations of each column
in x with each column in y. If y is not provided, computes correlation with
x onto itself.
Parameters
----------
x : ArrayLike
2D data matrix.
y : ArrayLike, optional
2D data matrix.
Implements the function R = (1/n-1) @ X_s' @ Y_s.
- n is the number of samples.
- X_s and Y_s are standardized versions of X and Y i.e. X_s = C @ X @ D.
- C is the centering matrix.
- D is a scaling matrix i.e. a diagonal matrix with std(x, axis=0) on the
diagonal.
Returns
-------
np.ndarray
Pearson correlation matrix.
"""

n_samples, n_features = np.shape(x)
centering = np.identity(n_samples) - 1 / n_samples
scaling = lambda v: np.identity(n_features) * np.std(v, axis=0, ddof=1)
centered_scaled = lambda v: centering @ v @ np.linalg.inv(scaling(v))

if y is None:
X_s = centered_scaled(x)
return 1 / (n_samples - 1) * X_s.T @ X_s
else:
return 1 / (n_samples - 1) * centered_scaled(x).T @ centered_scaled(y)
8 changes: 8 additions & 0 deletions brainstat/data_urls.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,11 @@
"url": "https://box.bic.mni.mcgill.ca/s/6zKHcg9xXu5inPR/download"
}
},
"gradients": {
"margulies2016": {
"url": "https://box.bic.mni.mcgill.ca/s/LWFaQlOxUWmRlc0/download"
}
},
"masks": {
"civet164k": {
"url": "https://box.bic.mni.mcgill.ca/s/rei5HtTDvexlEPA/download"
Expand Down Expand Up @@ -47,6 +52,9 @@
"https://box.bic.mni.mcgill.ca/s/Y0Fmd2tIF69Mqpt/download"
]
}
},
"yeo": {
"url": "https://box.bic.mni.mcgill.ca/s/vcSXEk1wx0jN86N/download"
}
}
}
8 changes: 7 additions & 1 deletion brainstat/datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,8 @@
"""Data included with BrainStat."""
from .base import fetch_mask, fetch_parcellation, fetch_template_surface
from .base import (
fetch_gradients,
fetch_mask,
fetch_parcellation,
fetch_template_surface,
fetch_yeo_networks_metadata,
)
Loading

0 comments on commit 111ad1c

Please sign in to comment.