Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
KOMAL BADI
Building Autoencoders in Keras
Commits
a9cae3d7
Commit
a9cae3d7
authored
May 14, 2019
by
William Stonewall Monroe
Browse files
Added some comments
parent
5fdb6c78
Changes
1
Hide whitespace changes
Inline
Side-by-side
DenoisingWithAutoencoders.ipynb
View file @
a9cae3d7
%% Cell type:markdown id: tags:
# First lets get our data!
%% Cell type:code id: tags:
```
python
from
keras.datasets
import
mnist
import
numpy
as
np
(
x_train
,
_
),
(
x_test
,
_
)
=
mnist
.
load_data
()
x_train
=
x_train
.
astype
(
'float32'
)
/
255.
x_test
=
x_test
.
astype
(
'float32'
)
/
255.
x_train
=
np
.
reshape
(
x_train
,
(
len
(
x_train
),
28
,
28
,
1
))
# adapt this if using `channels_first` image data format
x_test
=
np
.
reshape
(
x_test
,
(
len
(
x_test
),
28
,
28
,
1
))
# adapt this if using `channels_first` image data format
noise_factor
=
0.5
x_train_noisy
=
x_train
+
noise_factor
*
np
.
random
.
normal
(
loc
=
0.0
,
scale
=
1.0
,
size
=
x_train
.
shape
)
x_test_noisy
=
x_test
+
noise_factor
*
np
.
random
.
normal
(
loc
=
0.0
,
scale
=
1.0
,
size
=
x_test
.
shape
)
x_train_noisy
=
np
.
clip
(
x_train_noisy
,
0.
,
1.
)
x_test_noisy
=
np
.
clip
(
x_test_noisy
,
0.
,
1.
)
```
%%%% Output: stream
Using TensorFlow backend.
%% Cell type:markdown id: tags:
# Let's look at some of our data
%% Cell type:code id: tags:
```
python
# use Matplotlib (don't ask)
import
matplotlib.pyplot
as
plt
##wsm
n
=
10
plt
.
figure
(
figsize
=
(
20
,
2
))
for
i
in
range
(
1
,
n
):
##wsm changed from for i in range(n):
ax
=
plt
.
subplot
(
1
,
n
,
i
)
plt
.
imshow
(
x_test_noisy
[
i
].
reshape
(
28
,
28
))
plt
.
gray
()
ax
.
get_xaxis
().
set_visible
(
False
)
ax
.
get_yaxis
().
set_visible
(
False
)
plt
.
show
()
```
%%%% Output: display_data

%% Cell type:markdown id: tags:
# Let's create our machine learning architecture
Here we're creating an Autoencoder architecture
%% Cell type:code id: tags:
```
python
from
keras.layers
import
Input
,
Dense
,
Conv2D
,
MaxPooling2D
,
UpSampling2D
from
keras.models
import
Model
from
keras
import
backend
as
K
input_img
=
Input
(
shape
=
(
28
,
28
,
1
))
# adapt this if using `channels_first` image data format
x
=
Conv2D
(
32
,
(
3
,
3
),
activation
=
'relu'
,
padding
=
'same'
)(
input_img
)
x
=
MaxPooling2D
((
2
,
2
),
padding
=
'same'
)(
x
)
x
=
Conv2D
(
32
,
(
3
,
3
),
activation
=
'relu'
,
padding
=
'same'
)(
x
)
encoded
=
MaxPooling2D
((
2
,
2
),
padding
=
'same'
)(
x
)
# at this point the representation is (7, 7, 32)
x
=
Conv2D
(
32
,
(
3
,
3
),
activation
=
'relu'
,
padding
=
'same'
)(
encoded
)
x
=
UpSampling2D
((
2
,
2
))(
x
)
x
=
Conv2D
(
32
,
(
3
,
3
),
activation
=
'relu'
,
padding
=
'same'
)(
x
)
x
=
UpSampling2D
((
2
,
2
))(
x
)
decoded
=
Conv2D
(
1
,
(
3
,
3
),
activation
=
'sigmoid'
,
padding
=
'same'
)(
x
)
autoencoder
=
Model
(
input_img
,
decoded
)
autoencoder
.
compile
(
optimizer
=
'adadelta'
,
loss
=
'binary_crossentropy'
)
```
%% Cell type:markdown id: tags:
# Let's Train our Model...
%% Cell type:code id: tags:
```
python
from
keras.callbacks
import
TensorBoard
autoencoder
.
fit
(
x_train_noisy
,
x_train
,
epochs
=
100
,
batch_size
=
128
,
shuffle
=
True
,
validation_data
=
(
x_test_noisy
,
x_test
),
callbacks
=
[
TensorBoard
(
log_dir
=
'/data/scratch/wsmonroe'
,
histogram_freq
=
0
,
write_graph
=
False
)])
```
%%%% Output: stream
Train on 60000 samples, validate on 10000 samples
Epoch 1/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0966 - val_loss: 0.0957
Epoch 2/100
60000/60000 [==============================] - 3s 44us/step - loss: 0.0966 - val_loss: 0.0957
Epoch 3/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0965 - val_loss: 0.0959
Epoch 4/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0963 - val_loss: 0.0959
Epoch 5/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0962 - val_loss: 0.0955
Epoch 6/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0962 - val_loss: 0.0953
Epoch 7/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0962 - val_loss: 0.0953
Epoch 8/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0960 - val_loss: 0.0952
Epoch 9/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0959 - val_loss: 0.0958
Epoch 10/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0959 - val_loss: 0.0957
Epoch 11/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0958 - val_loss: 0.0961
Epoch 12/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0958 - val_loss: 0.0954
Epoch 13/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0957 - val_loss: 0.0952
Epoch 14/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0956 - val_loss: 0.0951
Epoch 15/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0956 - val_loss: 0.0948
Epoch 16/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0956 - val_loss: 0.0949
Epoch 17/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0954 - val_loss: 0.0949
Epoch 18/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0954 - val_loss: 0.0947
Epoch 19/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0953 - val_loss: 0.0947
Epoch 20/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0953 - val_loss: 0.0949
Epoch 21/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0953 - val_loss: 0.0959
Epoch 22/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0952 - val_loss: 0.0952
Epoch 23/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0952 - val_loss: 0.0958
Epoch 24/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0951 - val_loss: 0.0946
Epoch 25/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0952 - val_loss: 0.0947
Epoch 26/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0951 - val_loss: 0.0944
Epoch 27/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0950 - val_loss: 0.0945
Epoch 28/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0950 - val_loss: 0.0947
Epoch 29/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0949 - val_loss: 0.0947
Epoch 30/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0949 - val_loss: 0.0949
Epoch 31/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0948 - val_loss: 0.0954
Epoch 32/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0948 - val_loss: 0.0942
Epoch 33/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0948 - val_loss: 0.0954
Epoch 34/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0948 - val_loss: 0.0942
Epoch 35/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0948 - val_loss: 0.0950
Epoch 36/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0947 - val_loss: 0.0942
Epoch 37/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0947 - val_loss: 0.0945
Epoch 38/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0946 - val_loss: 0.0950
Epoch 39/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0946 - val_loss: 0.0944
Epoch 40/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0946 - val_loss: 0.0943
Epoch 41/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0945 - val_loss: 0.0947
Epoch 42/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0945 - val_loss: 0.0947
Epoch 43/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0945 - val_loss: 0.0952
Epoch 44/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0944 - val_loss: 0.0949
Epoch 45/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0944 - val_loss: 0.0940
Epoch 46/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0944 - val_loss: 0.0940
Epoch 47/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0944 - val_loss: 0.0946
Epoch 48/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0944 - val_loss: 0.0945
Epoch 49/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0943 - val_loss: 0.0945
Epoch 50/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0943 - val_loss: 0.0940
Epoch 51/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0943 - val_loss: 0.0950
Epoch 52/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0943 - val_loss: 0.0939
Epoch 53/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0942 - val_loss: 0.0950
Epoch 54/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0942 - val_loss: 0.0939
Epoch 55/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0942 - val_loss: 0.0940
Epoch 56/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0942 - val_loss: 0.0939
Epoch 57/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0942 - val_loss: 0.0941
Epoch 58/100
60000/60000 [==============================] - 3s 49us/step - loss: 0.0941 - val_loss: 0.0943
Epoch 59/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0941 - val_loss: 0.0948
Epoch 60/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0941 - val_loss: 0.0937
Epoch 61/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0940 - val_loss: 0.0937
Epoch 62/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0941 - val_loss: 0.0946
Epoch 63/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0940 - val_loss: 0.0944
Epoch 64/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0940 - val_loss: 0.0938
Epoch 65/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0940 - val_loss: 0.0938
Epoch 66/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0940 - val_loss: 0.0937
Epoch 67/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0939 - val_loss: 0.0937
Epoch 68/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0939 - val_loss: 0.0937
Epoch 69/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0939 - val_loss: 0.0944
Epoch 70/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0939 - val_loss: 0.0938
Epoch 71/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0940 - val_loss: 0.0940
Epoch 72/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0939 - val_loss: 0.0937
Epoch 73/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0939 - val_loss: 0.0941
Epoch 74/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0939 - val_loss: 0.0936
Epoch 75/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0938 - val_loss: 0.0936
Epoch 76/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0938 - val_loss: 0.0937
Epoch 77/100
%%%% Output: stream
60000/60000 [==============================] - 3s 50us/step - loss: 0.0939 - val_loss: 0.0936
Epoch 78/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0938 - val_loss: 0.0937
Epoch 79/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0938 - val_loss: 0.0935
Epoch 80/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0938 - val_loss: 0.0936
Epoch 81/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0937 - val_loss: 0.0935
Epoch 82/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0937 - val_loss: 0.0936
Epoch 83/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0937 - val_loss: 0.0935
Epoch 84/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0937 - val_loss: 0.0935
Epoch 85/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0937 - val_loss: 0.0936
Epoch 86/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0937 - val_loss: 0.0935
Epoch 87/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0936 - val_loss: 0.0944
Epoch 88/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0937 - val_loss: 0.0942
Epoch 89/100
60000/60000 [==============================] - 3s 45us/step - loss: 0.0936 - val_loss: 0.0934
Epoch 90/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0936 - val_loss: 0.0936
Epoch 91/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0936 - val_loss: 0.0935
Epoch 92/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0936 - val_loss: 0.0936
Epoch 93/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0936 - val_loss: 0.0940
Epoch 94/100
60000/60000 [==============================] - 3s 48us/step - loss: 0.0936 - val_loss: 0.0940
Epoch 95/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0935 - val_loss: 0.0934
Epoch 96/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0936 - val_loss: 0.0934
Epoch 97/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0936 - val_loss: 0.0944
Epoch 98/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0936 - val_loss: 0.0933
Epoch 99/100
60000/60000 [==============================] - 3s 46us/step - loss: 0.0935 - val_loss: 0.0936
Epoch 100/100
60000/60000 [==============================] - 3s 47us/step - loss: 0.0935 - val_loss: 0.0934
%%%% Output: execute_result
<keras.callbacks.History at 0x2aab68566470>
%% Cell type:markdown id: tags:
# How'd we do?
%% Cell type:code id: tags:
```
python
decoded_imgs
=
autoencoder
.
predict
(
x_test_noisy
)
n
=
10
plt
.
figure
(
figsize
=
(
20
,
4
))
for
i
in
range
(
1
,
n
):
# display original
ax
=
plt
.
subplot
(
2
,
n
,
i
)
plt
.
imshow
(
x_test_noisy
[
i
].
reshape
(
28
,
28
))
plt
.
gray
()
ax
.
get_xaxis
().
set_visible
(
False
)
ax
.
get_yaxis
().
set_visible
(
False
)
# display reconstruction
ax
=
plt
.
subplot
(
2
,
n
,
i
+
n
)
plt
.
imshow
(
decoded_imgs
[
i
].
reshape
(
28
,
28
))
plt
.
gray
()
ax
.
get_xaxis
().
set_visible
(
False
)
ax
.
get_yaxis
().
set_visible
(
False
)
plt
.
show
()
```
%%%% Output: display_data

%% Cell type:code id: tags:
```
python
``
`
%%
Cell
type
:
code
id
:
tags
:
```
python
```
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment