From 499273172dc64b68dd41d06ace935bd6ee970fe4 Mon Sep 17 00:00:00 2001 From: Stefan Webb Date: Thu, 3 Mar 2022 11:49:18 -0800 Subject: [PATCH] Fixed bug in `bijectors.ops.spline.Spline` and unit test for log(detJ)) (#100) Summary: ### Motivation From other work, I discovered that training Neural Spline Flows was not working as expected, being unable to learn simple toy distributions... This PR fixes this, as well as the reason that the unit tests were not picking it up. ### Changes proposed I changed the sign of the `log(det(J))` in the `inverse` method of `bijectors.ops.spline.Spline`, and ensured the unit tests are not using cached values of `log(det(J))` (via `BijectiveTensor`) when comparing `log(det(J))` from the forward method to that of the inverse one. Pull Request resolved: https://github.com/facebookincubator/flowtorch/pull/100 Test Plan: Run `pytest tests/` and try the Neural Spline Flow example in the theory tutorials Reviewed By: ToddSmall Differential Revision: D34616134 Pulled By: stefanwebb fbshipit-source-id: d29a94fd80b2e32a920da194a3c75c05c382e461 --- flowtorch/bijectors/ops/spline.py | 5 +---- tests/test_bijector.py | 8 ++++++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/flowtorch/bijectors/ops/spline.py b/flowtorch/bijectors/ops/spline.py index 687d1bac..9145cad3 100644 --- a/flowtorch/bijectors/ops/spline.py +++ b/flowtorch/bijectors/ops/spline.py @@ -53,10 +53,7 @@ def _inverse( self, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]] ) -> Tuple[torch.Tensor, torch.Tensor]: x_new, log_detJ = self._op(y, params, inverse=True) - - # TODO: Should I invert the sign of log_detJ? - # TODO: A unit test that compares log_detJ from _forward and _inverse - return x_new, _sum_rightmost(log_detJ, self.domain.event_dim) + return x_new, _sum_rightmost(-log_detJ, self.domain.event_dim) def _log_abs_det_jacobian( self, diff --git a/tests/test_bijector.py b/tests/test_bijector.py index adb4b68f..6f10b76f 100644 --- a/tests/test_bijector.py +++ b/tests/test_bijector.py @@ -108,12 +108,16 @@ def test_inverse(flow, epsilon=1e-5): x_true = torch.distributions.transform_to(bij.domain)(x_true) y = bij.forward(x_true) + J_1 = y.log_detJ + y = y.detach_from_flow() + x_calculated = bij.inverse(y) + J_2 = x_calculated.log_detJ + x_calculated = x_calculated.detach_from_flow() + assert (x_true - x_calculated).abs().max().item() < epsilon # Test that Jacobian after inverse op is same as after forward - J_1 = bij.log_abs_det_jacobian(x_true, y) - J_2 = bij.log_abs_det_jacobian(x_calculated, y) assert (J_1 - J_2).abs().max().item() < epsilon