I want to run MCMC with 203 parameters and want to apply different bijectors to the first, second, third and the rest of the parameters separately. How do I do this without running into errors? I am currently using a Blockwise
-Bijector with block_sizes=[1, 1, 1, 200]
and want to use it as given in the following minimal example, but cannot run without errors:
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
tfb = tfp.bijectors
# dummy log probability
def log_prob_fn(state):
return tfd.Normal(state, 1.).log_prob(tf.ones_like(state))
# create kernel
kernel = tfp.mcmc.NoUTurnSampler(
target_log_prob_fn=log_prob_fn,
step_size=1.0,
)
# this is the bijector that does not work in MCMC
bijector = tfb.Blockwise([
tfb.Softplus(),
tfb.Sigmoid(),
tfb.Softplus(),
tfb.Softplus()],
block_sizes=[1, 1, 1, 200],
maybe_changes_size=False)
# dummy state
init_state = tfd.Normal(tf.ones(203), 1.).sample()
transformed_kernel = tfp.mcmc.TransformedTransitionKernel(
inner_kernel=kernel,
bijector=bijector
)
adaptive_kernel = tfp.mcmc.DualAveragingStepSizeAdaptation(
transformed_kernel,
num_adaptation_steps=1600,
target_accept_prob=.60)
# Run the MCMC chains
samples = tfp.mcmc.sample_chain(
num_results=10000,
num_burnin_steps=2000,
current_state=init_state,
kernel=adaptive_kernel,
trace_fn=None
)
I cannot use it in tfp.mcmc.sample_chain()
without getting the following warnings and error.
WARNING: Nested component "jointmap" in composition "blockwise_of_softplus_and_sigmoid_and_softplus_and_softplus" operates on inputs with increased degrees of freedom. This may result in an incorrect log_det_jacobian.
WARNING: Nested component "concat" in composition "blockwise_of_softplus_and_sigmoid_and_softplus_and_softplus" operates on inputs with increased degrees of freedom. This may result in an incorrect log_det_jacobian.
Traceback (most recent call last):
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\sample.py", line 330, in sample_chain
previous_kernel_results = kernel.bootstrap_results(current_state)
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\dual_averaging_step_size_adaptation.py", line 538, in bootstrap_results
inner_results = self.inner_kernel.bootstrap_results(init_state)
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\transformed_kernel.py", line 492, in bootstrap_results
inner_results=self._inner_kernel.bootstrap_results(
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\nuts.py", line 468, in bootstrap_results
] = leapfrog_impl.process_args(self.target_log_prob_fn, dummy_momentum,
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\internal\leapfrog_integrator.py", line 378, in process_args
[target, target_grad_parts] = mcmc_util.maybe_call_fn_and_grads(
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\internal\util.py", line 297, in maybe_call_fn_and_grads
result, grads = _value_and_gradients(fn, fn_arg_list, result, grads)
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\internal\util.py", line 268, in _value_and_gradients
result = fn(*fn_arg_list)
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\transformed_kernel.py", line 126, in transformed_log_prob_fn
return tlp + ldj_fn(state_parts, event_ndims)
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\transformed_kernel.py", line 58, in fn
return sum([
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\mcmc\transformed_kernel.py", line 59, in <listcomp>
getattr(b, attr)(sp, event_ndims=e)
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\bijectors\bijector.py", line 1640, in forward_log_det_jacobian
return self._call_forward_log_det_jacobian(x, event_ndims, name, **kwargs)
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\bijectors\composition.py", line 501, in _call_forward_log_det_jacobian
return self._forward_log_det_jacobian(x, event_ndims, **kwargs)
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\bijectors\composition.py", line 513, in _forward_log_det_jacobian
bm.bijector.forward_log_det_jacobian(bm.x, bm.x_event_ndims,
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\bijectors\bijector.py", line 1640, in forward_log_det_jacobian
return self._call_forward_log_det_jacobian(x, event_ndims, name, **kwargs)
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\bijectors\bijector.py", line 1557, in _call_forward_log_det_jacobian
reduce_shape, assertions = ldj_reduction_shape(
File "C:\Users\tobia\miniconda3\envs\idm\lib\site-packages\tensorflow_probability\python\bijectors\bijector.py", line 2219, in ldj_reduction_shape
raise ValueError('`event_ndims must be at least {}. Saw: {}.'
ValueError: `event_ndims must be at least 1. Saw: 0.
python-BaseException
Process finished with exit code 1
I checked that I can do forward and inverse passes with this bijector by passing the init_state for testing. This works as expected. Only in MCMC it fails. I also tried other bijectors like JointMap
or Composition
but it didn't work.