removed hard-coded instances of self.latent_dim = 100

This commit is contained in:
Jonas Sourlier 2018-06-15 16:56:48 +02:00
Родитель e59ca08597
Коммит c52cdcbed8
1 изменённых файлов: 4 добавлений и 4 удалений

Просмотреть файл

@ -34,7 +34,7 @@ class GAN():
self.generator = self.build_generator()
# The generator takes noise as input and generates imgs
z = Input(shape=(100,))
z = Input(shape=(self.latent_dim,))
img = self.generator(z)
# For the combined model we will only train the generator
@ -112,7 +112,7 @@ class GAN():
idx = np.random.randint(0, X_train.shape[0], batch_size)
imgs = X_train[idx]
noise = np.random.normal(0, 1, (batch_size, 100))
noise = np.random.normal(0, 1, (batch_size, self.latent_dim))
# Generate a batch of new images
gen_imgs = self.generator.predict(noise)
@ -126,7 +126,7 @@ class GAN():
# Train Generator
# ---------------------
noise = np.random.normal(0, 1, (batch_size, 100))
noise = np.random.normal(0, 1, (batch_size, self.latent_dim))
# Train the generator (to have the discriminator label samples as valid)
g_loss = self.combined.train_on_batch(noise, valid)
@ -140,7 +140,7 @@ class GAN():
def sample_images(self, epoch):
r, c = 5, 5
noise = np.random.normal(0, 1, (r * c, 100))
noise = np.random.normal(0, 1, (r * c, self.latent_dim))
gen_imgs = self.generator.predict(noise)
# Rescale images 0 - 1