Skip to content

Commit

Permalink
add for cgs
Browse files Browse the repository at this point in the history
  • Loading branch information
dustinvtran committed Mar 1, 2018
1 parent cf0de43 commit 74be58c
Show file tree
Hide file tree
Showing 2 changed files with 113 additions and 0 deletions.
59 changes: 59 additions & 0 deletions examples/eager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import edward as ed
import numpy as np
import tensorflow as tf

from edward.models import Gamma, Normal

import tensorflow.contrib.eager as tfe
tfe.enable_eager_execution()

def model():
z = Normal(loc=0., scale=1., name='z')
x = Gamma(tf.nn.softplus(z), 1., sample_shape=1000, name='x')
return x

def variational():
qz = Normal(loc=tf.get_variable("loc", shape=[]),
scale=tf.nn.softplus(tf.get_variable("scale", shape=[])), name='qz')
return qz

variational = tf.make_template("variational", variational)

x_data = np.random.gamma(5.2, 1.2, size=1000).astype(np.float32)

optimizer = tf.train.AdamOptimizer(1e-2)

# loss, surrogate_loss = ed.klqp(
# model,
# variational,
# align_latent=lambda name: {'z': 'qz'}.get(name),
# align_data=lambda name: {'x': 'x'}.get(name),
# x=x_data)
# grads_and_vars = optimizer.compute_gradients(surrogate_loss)
# train_op = optimizer.apply_gradients(grads_and_vars)

# sess = tf.Session()
# sess.run(tf.global_variables_initializer())
# for _ in range(2000):
# sess.run(train_op)

loss_fn = lambda *args: ed.klqp(
model,
variational,
lambda name: {'z': 'qz'}.get(name),
lambda name: {'x': 'x'}.get(name),
*args)[1]

value_and_gradients_fn = tfe.implicit_value_and_gradients(loss_fn)

for _ in range(100):
loss, gradients_and_variables = value_and_gradients_fn(x_data)
optimizer.apply_gradients(gradients_and_variables)

qz = variational()
print("Posterior mean: {}".format(qz.loc))
print("Posterior variance: {}".format(qz.scale))
54 changes: 54 additions & 0 deletions examples/normal_normal_eager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
"""Normal-normal model using Hamiltonian Monte Carlo."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import edward as ed
import numpy as np
import tensorflow as tf

from edward.models import Normal


def model():
"""Normal-Normal with known variance."""
loc = Normal(loc=0.0, scale=1.0, name="loc")
x = Normal(loc=loc, scale=1.0, sample_shape=50, name="x")
return x


def variational():
qloc = Normal(loc=tf.get_variable("loc", []),
scale=tf.nn.softplus(tf.get_variable("shape", [])),
name="qloc")
return qloc


variational = tf.make_template("variational", variational)

tf.set_random_seed(42)
x_data = np.array([0.0] * 50)

# analytic solution: N(loc=0.0, scale=\sqrt{1/51}=0.140)
loss, surrogate_loss = ed.klqp(
model,
variational,
align_latent=lambda name: 'qloc' if name == 'loc' else None,
align_data=lambda name: 'x_data' if name == 'x' else None,
x_data=x_data)

optimizer = tf.train.AdamOptimizer(1e-2)
grads_and_vars = optimizer.compute_gradients(surrogate_loss)
train_op = optimizer.apply_gradients(grads_and_vars)

qloc = variational()
sess = tf.Session()

sess.run(tf.global_variables_initializer())
for t in range(1, 5001):
loss_val, _ = sess.run([loss, train_op])
if t % 50 == 0:
mean, stddev = sess.run([qloc.mean(), qloc.stddev()])
print({"Loss": loss_val,
"Posterior mean": mean,
"Posterior stddev": stddev})

0 comments on commit 74be58c

Please sign in to comment.