Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

RecursionError for executing sensitivities_from_surrogate() function #103

Open
FariborzDaneshvar-NOAA opened this issue Jul 10, 2023 · 0 comments

Comments

@FariborzDaneshvar-NOAA
Copy link
Collaborator

FariborzDaneshvar-NOAA commented Jul 10, 2023

Here is the full message for executing sensitivities_from_surrogate() for an ensemble of 5 perturbed tracks for florence 2018:

---------------------------------------------------------------------------
RecursionError                            Traceback (most recent call last)
Cell In[11], line 2
      1 if make_sensitivities_plot:
----> 2     sensitivities = sensitivities_from_surrogate(
      3         surrogate_model=surrogate_model,
      4         distribution=distribution,
      5         variables=perturbations['variable'],
      6         nodes=subset,
      7         element_table=elements if point_spacing is None else None,
      8         filename=sensitivities_filename,
      9     )
     10     plot_sensitivities(
     11         sensitivities=sensitivities,
     12         storm=storm_name,
     13         output_filename=output_directory / 'sensitivities.png' if save_plots else None,
     14     )
     16 if make_validation_plot:

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/ensembleperturbation/uncertainty_quantification/surrogate.py:239, in sensitivities_from_surrogate(surrogate_model, distribution, variables, nodes, element_table, filename)
    234 if filename is None or not filename.exists():
    235     LOGGER.info(f'extracting sensitivities from surrogate model and distribution')
    237     sensitivities = [
    238         chaospy.Sens_m(surrogate_model, distribution),
--> 239         chaospy.Sens_t(surrogate_model, distribution),
    240     ]
    242     sensitivities = numpy.stack(sensitivities)
    244     sensitivities = xarray.DataArray(
    245         sensitivities,
    246         coords={
   (...)
    254         dims=('order', 'variable', 'node'),
    255     ).T

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/chaospy/descriptives/sensitivity/total.py:43, in Sens_t(poly, dist, **kws)
     41 valids = variance != 0
     42 if not numpy.all(valids):
---> 43     out[:, valids] = Sens_t(poly[valids], dist, **kws)
     44     return out
     46 out[:] = variance

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/chaospy/descriptives/sensitivity/total.py:43, in Sens_t(poly, dist, **kws)
     41 valids = variance != 0
     42 if not numpy.all(valids):
---> 43     out[:, valids] = Sens_t(poly[valids], dist, **kws)
     44     return out
     46 out[:] = variance

    [... skipping similar frames: Sens_t at line 43 (2956 times)]

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/chaospy/descriptives/sensitivity/total.py:43, in Sens_t(poly, dist, **kws)
     41 valids = variance != 0
     42 if not numpy.all(valids):
---> 43     out[:, valids] = Sens_t(poly[valids], dist, **kws)
     44     return out
     46 out[:] = variance

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/chaospy/descriptives/sensitivity/total.py:36, in Sens_t(poly, dist, **kws)
     10 """
     11 Variance-based decomposition
     12 AKA Sobol' indices
   (...)
     33            [0.        , 0.        , 1.        , 0.57142857]])
     34 """
     35 dim = len(dist)
---> 36 poly = numpoly.set_dimensions(poly, dim)
     38 out = numpy.zeros((dim,) + poly.shape, dtype=float)
     39 variance = Var(poly, dist, **kws)

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/numpoly/poly_function/set_dimensions.py:67, in set_dimensions(poly, dimensions)
     64 else:
     65     return poly
---> 67 return numpoly.polynomial_from_attributes(
     68     exponents=exponents,
     69     coefficients=coefficients,
     70     names=names,
     71     dtype=poly.dtype,
     72     allocation=poly.allocation,
     73     retain_names=True,
     74 )

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/numpoly/construct/from_attributes.py:72, in polynomial_from_attributes(exponents, coefficients, names, dtype, allocation, retain_coefficients, retain_names)
     13 def polynomial_from_attributes(
     14     exponents: numpy.typing.ArrayLike,
     15     coefficients: Sequence[numpy.typing.ArrayLike],
   (...)
     20     retain_names: Optional[bool] = None,
     21 ) -> ndpoly:
     22     """
     23     Construct polynomial from polynomial attributes.
     24 
   (...)
     70 
     71     """
---> 72     exponents, coefficients, names = clean.postprocess_attributes(
     73         exponents=exponents,
     74         coefficients=coefficients,
     75         names=names,
     76         retain_coefficients=retain_coefficients,
     77         retain_names=retain_names,
     78     )
     79     if coefficients:
     80         dtype = coefficients[0].dtype if dtype is None else dtype

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/numpoly/construct/clean.py:139, in postprocess_attributes(exponents, coefficients, names, retain_coefficients, retain_names)
    136 if not retain_names:
    137     exponents, names = remove_redundant_names(exponents, names)
--> 139 exponents_, count = numpy.unique(exponents, return_counts=True, axis=0)
    140 if numpy.any(count > 1):
    141     raise PolynomialConstructionError(
    142         f"Duplicate exponent keys found: {exponents_[count > 1][0]}"
    143     )

File <__array_function__ internals>:200, in unique(*args, **kwargs)

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/numpy/lib/arraysetops.py:319, in unique(ar, return_index, return_inverse, return_counts, axis, equal_nan)
    315     return uniq
    317 output = _unique1d(consolidated, return_index,
    318                    return_inverse, return_counts, equal_nan=equal_nan)
--> 319 output = (reshape_uniq(output[0]),) + output[1:]
    320 return _unpack_tuple(output)

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/numpy/lib/arraysetops.py:314, in unique.<locals>.reshape_uniq(uniq)
    312 uniq = uniq.view(orig_dtype)
    313 uniq = uniq.reshape(n, *orig_shape[1:])
--> 314 uniq = np.moveaxis(uniq, 0, axis)
    315 return uniq

File <__array_function__ internals>:200, in moveaxis(*args, **kwargs)

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/numpy/core/numeric.py:1467, in moveaxis(a, source, destination)
   1464     a = asarray(a)
   1465     transpose = a.transpose
-> 1467 source = normalize_axis_tuple(source, a.ndim, 'source')
   1468 destination = normalize_axis_tuple(destination, a.ndim, 'destination')
   1469 if len(source) != len(destination):

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/numpy/core/numeric.py:1398, in normalize_axis_tuple(axis, ndim, argname, allow_duplicate)
   1396         pass
   1397 # Going via an iterator directly is slower than via list comprehension.
-> 1398 axis = tuple([normalize_axis_index(ax, ndim, argname) for ax in axis])
   1399 if not allow_duplicate and len(set(axis)) != len(axis):
   1400     if argname:

File ~/miniconda3/envs/perturbation/lib/python3.10/site-packages/numpy/core/numeric.py:1398, in <listcomp>(.0)
   1396         pass
   1397 # Going via an iterator directly is slower than via list comprehension.
-> 1398 axis = tuple([normalize_axis_index(ax, ndim, argname) for ax in axis])
   1399 if not allow_duplicate and len(set(axis)) != len(axis):
   1400     if argname:

RecursionError: maximum recursion depth exceeded while calling a Python object
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant