diff --git a/pymc/logprob/abstract.py b/pymc/logprob/abstract.py index 5c7f28e661..62bf4999a6 100644 --- a/pymc/logprob/abstract.py +++ b/pymc/logprob/abstract.py @@ -242,7 +242,8 @@ def make_node(self, rv, value): return Apply(self, [rv, value], [rv.type(name=rv.name)]) def perform(self, node, inputs, out): - raise NotImplementedError("ValuedVar should not be present in the final graph!") + warnings.warn("ValuedVar should not be present in the final graph!") + out[0][0] = inputs[0] def infer_shape(self, fgraph, node, input_shapes): return [input_shapes[0]] diff --git a/pymc/logprob/transform_value.py b/pymc/logprob/transform_value.py index 4a28d5cd4a..2e1b96d343 100644 --- a/pymc/logprob/transform_value.py +++ b/pymc/logprob/transform_value.py @@ -11,6 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import warnings from collections.abc import Sequence @@ -40,7 +41,8 @@ def make_node(self, tran_value: TensorVariable, value: TensorVariable): return Apply(self, [tran_value, value], [tran_value.type()]) def perform(self, node, inputs, outputs): - raise NotImplementedError("These `Op`s should be removed from graphs used for computation.") + warnings.warn("TransformedValue should not be present in the final graph!") + outputs[0][0] = inputs[0] def infer_shape(self, fgraph, node, input_shapes): return [input_shapes[0]] diff --git a/tests/logprob/test_basic.py b/tests/logprob/test_basic.py index 64cbf63b3e..3117d8f1aa 100644 --- a/tests/logprob/test_basic.py +++ b/tests/logprob/test_basic.py @@ -436,3 +436,26 @@ def test_ir_rewrite_does_not_disconnect_valued_rvs(): logp_b.eval({a_value: np.pi, b_value: np.e}), stats.norm.logpdf(np.e, np.pi * 8, 1), ) + + +def test_ir_ops_can_be_evaluated_with_warning(): + _eval_values = [None, None] + + def my_logp(value, lam): + nonlocal _eval_values + _eval_values[0] = value.eval() + _eval_values[1] = lam.eval({"lam_log__": -1.5}) + return value * lam + + with pm.Model() as m: + lam = pm.Exponential("lam") + pm.CustomDist("y", lam, logp=my_logp, observed=[0, 1, 2]) + + with pytest.warns( + UserWarning, match="TransformedValue should not be present in the final graph" + ): + with pytest.warns(UserWarning, match="ValuedVar should not be present in the final graph"): + m.logp() + + assert _eval_values[0].sum() == 3 + assert _eval_values[1] == np.exp(-1.5)