Skip to content

Commit

Permalink
Merge pull request #153 from masa-su/develop/v0.3.0
Browse files Browse the repository at this point in the history
Develop/v0.3.0
  • Loading branch information
masa-su authored Nov 6, 2020
2 parents d7f126c + 5c6d84f commit ec2faf6
Show file tree
Hide file tree
Showing 54 changed files with 12,034 additions and 532 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ and
>>> # inference model (encoder) q(z|x)
>>> class Inference(Normal):
... def __init__(self):
... super(Inference, self).__init__(cond_var=["x"], var=["z"], name="q") # var: variables of this distribution, cond_var: coditional variables.
... super(Inference, self).__init__(var=["z"],cond_var=["x"],name="q") # var: variables of this distribution, cond_var: coditional variables.
... self.fc1 = nn.Linear(784, 512)
... self.fc21 = nn.Linear(512, 64)
... self.fc22 = nn.Linear(512, 64)
Expand All @@ -99,7 +99,7 @@ and
>>> # generative model (decoder) p(x|z)
>>> class Generator(Bernoulli):
... def __init__(self):
... super(Generator, self).__init__(cond_var=["z"], var=["x"], name="p")
... super(Generator, self).__init__(var=["x"], cond_var=["z"], name="p")
... self.fc1 = nn.Linear(64, 512)
... self.fc2 = nn.Linear(512, 128)
...
Expand Down Expand Up @@ -196,7 +196,7 @@ Next, we set the objective (loss) function of the model with defined distributio
```python
>>> from pixyz.losses import KullbackLeibler, LogProb, Expectation as E
>>> reconst = -E(q, LogProb(p)) # the reconstruction loss (it can also be written as `-p.log_prob().expectation()`)
>>> kl = KullbackLeibler(q, prior) # Kullback-Leibler divergence
>>> kl = KullbackLeibler(q,prior) # Kullback-Leibler divergence
>>> loss_cls = (kl + reconst).mean()
```

Expand Down
4 changes: 2 additions & 2 deletions docs/source/distributions.rst
Original file line number Diff line number Diff line change
Expand Up @@ -141,10 +141,10 @@ Deterministic
:members:
:undoc-members:

DataDistribution
EmpiricalDistribution
~~~~~~~~~~~~~~~~~~~~~~~~~~

.. autoclass:: DataDistribution
.. autoclass:: EmpiricalDistribution
:members:
:undoc-members:

Expand Down
8 changes: 8 additions & 0 deletions docs/source/losses.rst
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,14 @@ ValueLoss
:members:
:undoc-members:

ConstantVar
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

.. currentmodule:: pixyz.losses.losses
.. autoclass:: ConstantVar
:members:
:undoc-members:


Operators
----------------------------
Expand Down
4 changes: 2 additions & 2 deletions examples/cvae.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@
"# inference model q(z|x,y)\n",
"class Inference(Normal):\n",
" def __init__(self):\n",
" super(Inference, self).__init__(cond_var=[\"x\",\"y\"], var=[\"z\"], name=\"q\")\n",
" super(Inference, self).__init__(var=[\"z\"], cond_var=[\"x\",\"y\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim+y_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -95,7 +95,7 @@
"# generative model p(x|z,y) \n",
"class Generator(Bernoulli):\n",
" def __init__(self):\n",
" super(Generator, self).__init__(cond_var=[\"z\",\"y\"], var=[\"x\"], name=\"p\")\n",
" super(Generator, self).__init__(var=[\"x\"], cond_var=[\"z\",\"y\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim+y_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand Down
106 changes: 103 additions & 3 deletions examples/distributions.ipynb

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions examples/gan.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@
"# generator model p(x|z) \n",
"class Generator(Deterministic):\n",
" def __init__(self):\n",
" super(Generator, self).__init__(cond_var=[\"z\"], var=[\"x\"], name=\"p\")\n",
" super(Generator, self).__init__(var=[\"x\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" def block(in_feat, out_feat, normalize=True):\n",
" layers = [nn.Linear(in_feat, out_feat)]\n",
Expand Down Expand Up @@ -174,7 +174,7 @@
"# discriminator model p(t|x)\n",
"class Discriminator(Deterministic):\n",
" def __init__(self):\n",
" super(Discriminator, self).__init__(cond_var=[\"x\"], var=[\"t\"], name=\"d\")\n",
" super(Discriminator, self).__init__(var=[\"t\"], cond_var=[\"x\"], name=\"d\")\n",
"\n",
" self.model = nn.Sequential(\n",
" nn.Linear(x_dim, 512),\n",
Expand Down
8 changes: 4 additions & 4 deletions examples/hierarchical_variational_inference.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@
"# inference models\n",
"class Q1(Normal):\n",
" def __init__(self):\n",
" super(Q1, self).__init__(cond_var=[\"x\"], var=[\"a\"], name=\"q\")\n",
" super(Q1, self).__init__(var=[\"a\"], cond_var=[\"x\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -93,7 +93,7 @@
"\n",
"class Q2(Normal):\n",
" def __init__(self):\n",
" super(Q2, self).__init__(cond_var=[\"x\"], var=[\"z\"], name=\"q\")\n",
" super(Q2, self).__init__(var=[\"z\"], cond_var=[\"x\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -115,7 +115,7 @@
"# generative models\n",
"class P2(Normal):\n",
" def __init__(self):\n",
" super(P2, self).__init__(cond_var=[\"z\"], var=[\"a\"], name=\"p\")\n",
" super(P2, self).__init__(var=[\"a\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -130,7 +130,7 @@
" \n",
"class P3(Bernoulli):\n",
" def __init__(self):\n",
" super(P3, self).__init__(cond_var=[\"a\"], var=[\"x\"], name=\"p\")\n",
" super(P3, self).__init__(var=[\"x\"], cond_var=[\"a\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(a_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand Down
10 changes: 5 additions & 5 deletions examples/jmvae.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@
"# inference model q(z|x,y)\n",
"class Inference(Normal):\n",
" def __init__(self):\n",
" super(Inference, self).__init__(cond_var=[\"x\",\"y\"], var=[\"z\"], name=\"q\")\n",
" super(Inference, self).__init__(var=[\"z\"], cond_var=[\"x\",\"y\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim+y_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -96,7 +96,7 @@
"# inference model q(z|x)\n",
"class InferenceX(Normal):\n",
" def __init__(self):\n",
" super(InferenceX, self).__init__(cond_var=[\"x\"], var=[\"z\"], name=\"q\")\n",
" super(InferenceX, self).__init__(var=[\"z\"], cond_var=[\"x\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -112,7 +112,7 @@
"# inference model q(z|y)\n",
"class InferenceY(Normal):\n",
" def __init__(self):\n",
" super(InferenceY, self).__init__(cond_var=[\"y\"], var=[\"z\"], name=\"q\")\n",
" super(InferenceY, self).__init__(var=[\"z\"], cond_var=[\"y\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(y_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -128,7 +128,7 @@
"# generative model p(x|z) \n",
"class GeneratorX(Bernoulli):\n",
" def __init__(self):\n",
" super(GeneratorX, self).__init__(cond_var=[\"z\"], var=[\"x\"], name=\"p\")\n",
" super(GeneratorX, self).__init__(var=[\"x\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -143,7 +143,7 @@
"# generative model p(y|z) \n",
"class GeneratorY(Categorical):\n",
" def __init__(self):\n",
" super(GeneratorY, self).__init__(cond_var=[\"z\"], var=[\"y\"], name=\"p\")\n",
" super(GeneratorY, self).__init__(var=[\"y\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand Down
8 changes: 4 additions & 4 deletions examples/jmvae_poe.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@
"# inference model q(z|x)\n",
"class InferenceX(Normal):\n",
" def __init__(self):\n",
" super(InferenceX, self).__init__(cond_var=[\"x\"], var=[\"z\"], name=\"q\")\n",
" super(InferenceX, self).__init__(var=[\"z\"], cond_var=[\"x\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -98,7 +98,7 @@
"# inference model q(z|y)\n",
"class InferenceY(Normal):\n",
" def __init__(self):\n",
" super(InferenceY, self).__init__(cond_var=[\"y\"], var=[\"z\"], name=\"q\")\n",
" super(InferenceY, self).__init__(var=[\"z\"], cond_var=[\"y\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(y_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -114,7 +114,7 @@
"# generative model p(x|z) \n",
"class GeneratorX(Bernoulli):\n",
" def __init__(self):\n",
" super(GeneratorX, self).__init__(cond_var=[\"z\"], var=[\"x\"], name=\"p\")\n",
" super(GeneratorX, self).__init__(var=[\"x\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -129,7 +129,7 @@
"# generative model p(y|z) \n",
"class GeneratorY(Categorical):\n",
" def __init__(self):\n",
" super(GeneratorY, self).__init__(cond_var=[\"z\"], var=[\"y\"], name=\"p\")\n",
" super(GeneratorY, self).__init__(var=[\"y\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand Down
6 changes: 3 additions & 3 deletions examples/m2.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@
"# inference model q(z|x,y)\n",
"class Inference(Normal):\n",
" def __init__(self):\n",
" super().__init__(cond_var=[\"x\",\"y\"], var=[\"z\"], name=\"q\")\n",
" super().__init__(var=[\"z\"], cond_var=[\"x\",\"y\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim+y_dim, 512)\n",
" self.fc21 = nn.Linear(512, z_dim)\n",
Expand All @@ -123,7 +123,7 @@
"# generative model p(x|z,y) \n",
"class Generator(Bernoulli):\n",
" def __init__(self):\n",
" super().__init__(cond_var=[\"z\",\"y\"], var=[\"x\"], name=\"p\")\n",
" super().__init__(var=[\"x\"], cond_var=[\"z\",\"y\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim+y_dim, 512)\n",
" self.fc2 = nn.Linear(512, x_dim)\n",
Expand All @@ -136,7 +136,7 @@
"# classifier p(y|x)\n",
"class Classifier(RelaxedCategorical):\n",
" def __init__(self):\n",
" super(Classifier, self).__init__(cond_var=[\"x\"], var=[\"y\"], name=\"p\")\n",
" super(Classifier, self).__init__(var=[\"y\"], cond_var=[\"x\"], name=\"p\")\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, y_dim)\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion examples/maximum_likelihood.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@
"# classifier p(y|x)\n",
"class Classifier(Categorical):\n",
" def __init__(self):\n",
" super(Classifier, self).__init__(cond_var=[\"x\"], var=[\"y\"])\n",
" super(Classifier, self).__init__(var=[\"y\"], cond_var=[\"x\"])\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
" self.fc3 = nn.Linear(512, y_dim)\n",
Expand Down
10 changes: 5 additions & 5 deletions examples/mmd_vae.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@
"metadata": {},
"outputs": [],
"source": [
"from pixyz.distributions import Normal, Bernoulli, DataDistribution\n",
"from pixyz.distributions import Normal, Bernoulli, EmpiricalDistribution\n",
"from pixyz.losses import CrossEntropy, MMD\n",
"from pixyz.models import Model\n",
"from pixyz.utils import print_latex"
Expand All @@ -78,7 +78,7 @@
"# inference model q(z|x)\n",
"class Inference(Normal):\n",
" def __init__(self):\n",
" super(Inference, self).__init__(cond_var=[\"x\"], var=[\"z\"], name=\"q\")\n",
" super(Inference, self).__init__(var=[\"z\"], cond_var=[\"x\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -94,7 +94,7 @@
"# generative model p(x|z) \n",
"class Generator(Bernoulli):\n",
" def __init__(self):\n",
" super(Generator, self).__init__(cond_var=[\"z\"], var=[\"x\"], name=\"p\")\n",
" super(Generator, self).__init__(var=[\"x\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -112,7 +112,7 @@
"prior = Normal(loc=torch.tensor(0.), scale=torch.tensor(1.),\n",
" var=[\"z\"], features_shape=[z_dim], name=\"p_{prior}\").to(device)\n",
"\n",
"p_data = DataDistribution([\"x\"]).to(device)\n",
"p_data = EmpiricalDistribution([\"x\"]).to(device)\n",
"q_mg = (q*p_data).marginalize_var(\"x\")\n",
"q_mg.name = \"q\""
]
Expand Down Expand Up @@ -169,7 +169,7 @@
"Distribution:\n",
" q(z) = \\int q(z|x)p_{data}(x)dx\n",
"Network architecture:\n",
" DataDistribution(\n",
" EmpiricalDistribution(\n",
" name=p_{data}, distribution_name=Data distribution,\n",
" var=['x'], cond_var=[], input_var=['x'], features_shape=torch.Size([])\n",
" )\n",
Expand Down
8 changes: 4 additions & 4 deletions examples/mvae.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@
"# inference model q(z|x) for image modality\n",
"class InferenceX(Normal):\n",
" def __init__(self):\n",
" super(InferenceX, self).__init__(cond_var=[\"x\"], var=[\"z\"], name=\"q\")\n",
" super(InferenceX, self).__init__(var=[\"z\"], cond_var=[\"x\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -144,7 +144,7 @@
"# inference model q(z|y) for label modality\n",
"class InferenceY(Normal):\n",
" def __init__(self):\n",
" super(InferenceY, self).__init__(cond_var=[\"y\"], var=[\"z\"], name=\"q\")\n",
" super(InferenceY, self).__init__(var=[\"z\"], cond_var=[\"y\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(y_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -160,7 +160,7 @@
"# generative model p(x|z) \n",
"class GeneratorX(Bernoulli):\n",
" def __init__(self):\n",
" super(GeneratorX, self).__init__(cond_var=[\"z\"], var=[\"x\"], name=\"p\")\n",
" super(GeneratorX, self).__init__(var=[\"x\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -175,7 +175,7 @@
"# generative model p(y|z) \n",
"class GeneratorY(Categorical):\n",
" def __init__(self):\n",
" super(GeneratorY, self).__init__(cond_var=[\"z\"], var=[\"y\"], name=\"p\")\n",
" super(GeneratorY, self).__init__(var=[\"y\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand Down
4 changes: 2 additions & 2 deletions examples/vae.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@
" z ~ N(loc, scale)\n",
" \"\"\"\n",
" def __init__(self):\n",
" super(Inference, self).__init__(cond_var=[\"x\"], var=[\"z\"], name=\"q\")\n",
" super(Inference, self).__init__(var=[\"z\"], cond_var=[\"x\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -117,7 +117,7 @@
" parameterizes the bernoulli(for MNIST) observation likelihood p(x | z)\n",
" \"\"\"\n",
" def __init__(self):\n",
" super(Generator, self).__init__(cond_var=[\"z\"], var=[\"x\"], name=\"p\")\n",
" super(Generator, self).__init__(var=[\"x\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand Down
4 changes: 2 additions & 2 deletions examples/vae_with_vae_class.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@
" z ~ N(loc, scale)\n",
" \"\"\"\n",
" def __init__(self):\n",
" super(Inference, self).__init__(cond_var=[\"x\"], var=[\"z\"], name=\"q\")\n",
" super(Inference, self).__init__(var=[\"z\"], cond_var=[\"x\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -117,7 +117,7 @@
" parameterizes the bernoulli(for MNIST) observation likelihood p(x | z)\n",
" \"\"\"\n",
" def __init__(self):\n",
" super(Generator, self).__init__(cond_var=[\"z\"], var=[\"x\"], name=\"p\")\n",
" super(Generator, self).__init__(var=[\"x\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand Down
4 changes: 2 additions & 2 deletions examples/vi.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@
"# inference model q(z|x)\n",
"class Inference(Normal):\n",
" def __init__(self):\n",
" super(Inference, self).__init__(cond_var=[\"x\"], var=[\"z\"], name=\"q\")\n",
" super(Inference, self).__init__(var=[\"z\"], cond_var=[\"x\"], name=\"q\")\n",
"\n",
" self.fc1 = nn.Linear(x_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand All @@ -93,7 +93,7 @@
"# generative model p(x|z) \n",
"class Generator(Bernoulli):\n",
" def __init__(self):\n",
" super(Generator, self).__init__(cond_var=[\"z\"], var=[\"x\"], name=\"p\")\n",
" super(Generator, self).__init__(var=[\"x\"], cond_var=[\"z\"], name=\"p\")\n",
"\n",
" self.fc1 = nn.Linear(z_dim, 512)\n",
" self.fc2 = nn.Linear(512, 512)\n",
Expand Down
2 changes: 1 addition & 1 deletion pixyz/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
name = "pixyz"
__version__ = "0.2.1"
__version__ = "0.3.0"
Loading

0 comments on commit ec2faf6

Please sign in to comment.