![]() | ![]() | ![]() | ![]() |
āĻāĻ āĻāĻŋāĻāĻā§āϰāĻŋāϝāĻŧāĻžāϞāĻāĻŋ āϤāĻŋāύāĻāĻŋ āĻāĻĻāĻžāĻšāϰāĻŖ āϏāĻš āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰā§āϰ āϏāĻžāĻĨā§ āĻĒāϰāĻŋāĻāϝāĻŧ āĻāϰāĻŋāϝāĻŧā§ āĻĻā§āϝāĻŧ: āĻŦā§āϏāĻŋāĻ, āĻāĻŽā§āĻ āĻĄāĻŋāύā§āĻāϏāĻŋāĻ āĻāĻŦāĻ āĻ āϏāĻā§āĻāϤāĻŋ āϏāύāĻžāĻā§āϤāĻāϰāĻŖāĨ¤
āĻāĻāĻāĻŋ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āĻšāϞ āĻāĻāĻāĻŋ āĻŦāĻŋāĻļā§āώ āϧāϰāύā§āϰ āύāĻŋāĻāϰāĻžāϞ āύā§āĻāĻāϝāĻŧāĻžāϰā§āĻ āϝāĻž āϤāĻžāϰ āĻāύāĻĒā§āĻāĻā§ āϤāĻžāϰ āĻāĻāĻāĻĒā§āĻā§ āĻ āύā§āϞāĻŋāĻĒāĻŋ āĻāϰāĻžāϰ āĻāύā§āϝ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŋāϤāĨ¤ āĻāĻĻāĻžāĻšāϰāĻŖāϏā§āĻŦāϰā§āĻĒ, āĻāĻāĻāĻŋ āĻšāϏā§āϤāϞāĻŋāĻāĻŋāϤ āĻ āĻā§āĻā§āϰ āĻāĻāĻāĻŋ āĻāĻŋāϤā§āϰ āĻĻā§āĻāϝāĻŧāĻž āĻšāϞā§, āĻāĻāĻāĻŋ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āĻĒā§āϰāĻĨāĻŽā§ āĻāĻŋāϤā§āϰāĻāĻŋāĻā§ āĻāĻāĻāĻŋ āύāĻŋāĻŽā§āύ āĻŽāĻžāϤā§āϰāĻŋāĻ āϏā§āĻĒā§āϤ āĻĒā§āϰāϤāĻŋāύāĻŋāϧāĻŋāϤā§āĻŦā§ āĻāύāĻā§āĻĄ āĻāϰā§, āϤāĻžāϰāĻĒāϰ⧠āϏā§āĻĒā§āϤ āĻāĻĒāϏā§āĻĨāĻžāĻĒāύāĻžāĻāĻŋāĻā§ āĻāĻāĻāĻŋ āĻāĻŋāϤā§āϰ⧠āĻĄāĻŋāĻā§āĻĄ āĻāϰā§āĨ¤ āĻāĻāĻāĻŋ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āĻĒā§āύāϰā§āĻāĻ āύ āϤā§āϰā§āĻāĻŋ āĻāĻŽāĻŋāϝāĻŧā§ āĻĄā§āĻāĻž āϏāĻāĻā§āĻāĻŋāϤ āĻāϰāϤ⧠āĻļā§āĻā§āĨ¤
āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āϏāĻŽā§āĻĒāϰā§āĻā§ āĻāϰāĻ āĻāĻžāύāϤā§, āĻ āύā§āĻā§āϰāĻš āĻāϰ⧠āĻāϝāĻŧāĻžāύ āĻā§āĻĄāĻĢā§āϞā§, āĻāϝāĻŧā§āĻļā§āϝāĻŧāĻž āĻŦā§āĻā§āĻāĻŋāĻ āĻāĻŦāĻ āĻ ā§āϝāĻžāϰāύ āĻā§āϰāĻāĻŋāϞā§āϰ āĻĄāĻŋāĻĒ āϞāĻžāϰā§āύāĻŋāĻ āĻĨā§āĻā§ āĻ āϧā§āϝāĻžāϝāĻŧ 14 āĻĒāĻĄāĻŧāĻžāϰ āĻāĻĨāĻž āĻŦāĻŋāĻŦā§āĻāύāĻž āĻāϰā§āύāĨ¤
TensorFlow āĻāĻŦāĻ āĻ āύā§āϝāĻžāύā§āϝ āϞāĻžāĻāĻŦā§āϰā§āϰāĻŋ āĻāĻŽāĻĻāĻžāύāĻŋ āĻāϰā§āύ
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import tensorflow as tf
from sklearn.metrics import accuracy_score, precision_score, recall_score
from sklearn.model_selection import train_test_split
from tensorflow.keras import layers, losses
from tensorflow.keras.datasets import fashion_mnist
from tensorflow.keras.models import Model
āĻĄā§āĻāĻžāϏā§āĻ āϞā§āĻĄ āĻāϰā§āύ
āĻļā§āϰ⧠āĻāϰāϤā§, āĻāĻĒāύāĻŋ āĻĢā§āϝāĻžāĻļāύ MNIST āĻĄā§āĻāĻžāϏā§āĻ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰ⧠āĻŽā§āϞāĻŋāĻ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰāĻā§ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āĻĻā§āĻŦā§āύāĨ¤ āĻāĻ āĻĄā§āĻāĻžāϏā§āĻā§āϰ āĻĒā§āϰāϤāĻŋāĻāĻŋ āĻāĻŋāϤā§āϰ 28x28 āĻĒāĻŋāĻā§āϏā§āϞāĨ¤
(x_train, _), (x_test, _) = fashion_mnist.load_data()
x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
print (x_train.shape)
print (x_test.shape)
Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/train-labels-idx1-ubyte.gz 32768/29515 [=================================] - 0s 0us/step 40960/29515 [=========================================] - 0s 0us/step Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/train-images-idx3-ubyte.gz 26427392/26421880 [==============================] - 0s 0us/step 26435584/26421880 [==============================] - 0s 0us/step Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/t10k-labels-idx1-ubyte.gz 16384/5148 [===============================================================================================] - 0s 0us/step Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/t10k-images-idx3-ubyte.gz 4423680/4422102 [==============================] - 0s 0us/step 4431872/4422102 [==============================] - 0s 0us/step (60000, 28, 28) (10000, 28, 28)
āĻĒā§āϰāĻĨāĻŽ āĻāĻĻāĻžāĻšāϰāĻŖ: āĻŽā§āϞāĻŋāĻ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ
āĻĻā§āĻāĻŋ āĻāύ āϏā§āϤāϰ āϏāĻš āĻāĻāĻāĻŋ āĻ
āĻā§āĻāύāĻā§āĻĄāĻžāϰāĻā§ āϏāĻāĻā§āĻāĻžāϝāĻŧāĻŋāϤ āĻāϰā§āύ: āĻāĻāĻāĻŋ encoder
, āϝāĻž āĻāĻŋāϤā§āϰāĻā§āϞāĻŋāĻā§ āĻāĻāĻāĻŋ 64 āĻŽāĻžāϤā§āϰāĻŋāĻ āϏā§āĻĒā§āϤ āĻā§āĻā§āĻāϰ⧠āϏāĻāĻā§āĻāĻŋāϤ āĻāϰ⧠āĻāĻŦāĻ āĻāĻāĻāĻŋ decoder
, āϝāĻž āϏā§āĻĒā§āϤ āϏā§āĻĨāĻžāύ āĻĨā§āĻā§ āĻāϏāϞ āĻāĻŋāϤā§āϰāĻāĻŋāĻā§ āĻĒā§āύāϰā§āĻāĻ āύ āĻāϰā§ā§ˇ
āĻāĻĒāύāĻžāϰ āĻŽāĻĄā§āϞ āϏāĻāĻā§āĻāĻžāϝāĻŧāĻŋāϤ āĻāϰāϤā§, āĻā§āϰāĻžāϏ āĻŽāĻĄā§āϞ āϏāĻžāĻŦāĻā§āϞāĻžāϏāĻŋāĻ API āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰā§āύāĨ¤
latent_dim = 64
class Autoencoder(Model):
def __init__(self, latent_dim):
super(Autoencoder, self).__init__()
self.latent_dim = latent_dim
self.encoder = tf.keras.Sequential([
layers.Flatten(),
layers.Dense(latent_dim, activation='relu'),
])
self.decoder = tf.keras.Sequential([
layers.Dense(784, activation='sigmoid'),
layers.Reshape((28, 28))
])
def call(self, x):
encoded = self.encoder(x)
decoded = self.decoder(encoded)
return decoded
autoencoder = Autoencoder(latent_dim)
autoencoder.compile(optimizer='adam', loss=losses.MeanSquaredError())
āĻāύāĻĒā§āĻ āĻāĻŦāĻ āϞāĻā§āώā§āϝ āĻāĻāϝāĻŧ āĻšāĻŋāϏāĻžāĻŦā§ x_train
āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰ⧠āĻŽāĻĄā§āϞāĻā§ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āĻĻāĻŋāύāĨ¤ encoder
784 āĻŽāĻžāϤā§āϰāĻž āĻĨā§āĻā§ āϏā§āĻĒā§āϤ āϏā§āĻĨāĻžāύ āĻĨā§āĻā§ āĻĄā§āĻāĻžāϏā§āĻāĻā§ āϏāĻāĻā§āĻāĻŋāϤ āĻāϰāϤ⧠āĻļāĻŋāĻāĻŦā§ āĻāĻŦāĻ decoder
āĻāϏāϞ āĻāĻŋāϤā§āϰāĻā§āϞāĻŋāĻā§ āĻĒā§āύāϰā§āĻāĻ āύ āĻāϰāϤ⧠āĻļāĻŋāĻāĻŦā§āĨ¤ .
autoencoder.fit(x_train, x_train,
epochs=10,
shuffle=True,
validation_data=(x_test, x_test))
Epoch 1/10 1875/1875 [==============================] - 4s 2ms/step - loss: 0.0243 - val_loss: 0.0140 Epoch 2/10 1875/1875 [==============================] - 3s 2ms/step - loss: 0.0116 - val_loss: 0.0106 Epoch 3/10 1875/1875 [==============================] - 3s 2ms/step - loss: 0.0100 - val_loss: 0.0098 Epoch 4/10 1875/1875 [==============================] - 3s 2ms/step - loss: 0.0094 - val_loss: 0.0094 Epoch 5/10 1875/1875 [==============================] - 3s 2ms/step - loss: 0.0092 - val_loss: 0.0092 Epoch 6/10 1875/1875 [==============================] - 3s 2ms/step - loss: 0.0090 - val_loss: 0.0091 Epoch 7/10 1875/1875 [==============================] - 3s 2ms/step - loss: 0.0090 - val_loss: 0.0090 Epoch 8/10 1875/1875 [==============================] - 3s 2ms/step - loss: 0.0089 - val_loss: 0.0090 Epoch 9/10 1875/1875 [==============================] - 3s 2ms/step - loss: 0.0088 - val_loss: 0.0089 Epoch 10/10 1875/1875 [==============================] - 3s 2ms/step - loss: 0.0088 - val_loss: 0.0089 <keras.callbacks.History at 0x7ff1d35df550>
āĻāĻāύ āϝā§āĻšā§āϤ⧠āĻŽāĻĄā§āϞāĻāĻŋ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŋāϤ āĻšāϝāĻŧā§āĻā§, āĻāϏā§āύ āĻĒāϰā§āĻā§āώāĻž āϏā§āĻ āĻĨā§āĻā§ āĻāĻŦāĻŋāĻā§āϞāĻŋāĻā§ āĻāύāĻā§āĻĄāĻŋāĻ āĻāĻŦāĻ āĻĄāĻŋāĻā§āĻĄāĻŋāĻ āĻāϰ⧠āĻĒāϰā§āĻā§āώāĻž āĻāϰāĻŋ⧎
encoded_imgs = autoencoder.encoder(x_test).numpy()
decoded_imgs = autoencoder.decoder(encoded_imgs).numpy()
n = 10
plt.figure(figsize=(20, 4))
for i in range(n):
# display original
ax = plt.subplot(2, n, i + 1)
plt.imshow(x_test[i])
plt.title("original")
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display reconstruction
ax = plt.subplot(2, n, i + 1 + n)
plt.imshow(decoded_imgs[i])
plt.title("reconstructed")
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.show()
āĻĻā§āĻŦāĻŋāϤā§āϝāĻŧ āĻāĻĻāĻžāĻšāϰāĻŖ: āĻāĻŽā§āĻ denoising
āĻāĻāĻāĻŋ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰāĻā§ āĻāĻŦāĻŋ āĻĨā§āĻā§ āĻļāĻŦā§āĻĻ āĻ āĻĒāϏāĻžāϰāĻŖ āĻāϰāϤā§āĻ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āĻĻā§āĻāϝāĻŧāĻž āϝā§āϤ⧠āĻĒāĻžāϰā§āĨ¤ āύāĻŋāĻŽā§āύāϞāĻŋāĻāĻŋāϤ āĻŦāĻŋāĻāĻžāĻā§, āĻāĻĒāύāĻŋ āĻĒā§āϰāϤāĻŋāĻāĻŋ āĻāĻŦāĻŋāϤ⧠āĻāϞā§āĻŽā§āϞ⧠āĻļāĻŦā§āĻĻ āĻĒā§āϰāϝāĻŧā§āĻ āĻāϰ⧠āĻĢā§āϝāĻžāĻļāύ MNIST āĻĄā§āĻāĻžāϏā§āĻā§āϰ āĻāĻāĻāĻŋ āĻā§āϞāĻžāĻšāϞāĻĒā§āϰā§āĻŖ āϏāĻāϏā§āĻāϰāĻŖ āϤā§āϰāĻŋ āĻāϰāĻŦā§āύāĨ¤ āϤāĻžāϰāĻĒāϰ⧠āĻāĻĒāύāĻŋ āĻāĻāĻāĻŋ āϏā§āĻŦāϝāĻŧāĻāĻā§āϰāĻŋāϝāĻŧ āĻāύāĻā§āĻĄāĻžāϰāĻā§ āĻāύāĻĒā§āĻ āĻšāĻŋāϏāĻžāĻŦā§ āĻļā§āϰāĻā§āϞ āĻāĻŽā§āĻ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰ⧠āĻāĻŦāĻ āĻāϏāϞ āĻāĻŋāϤā§āϰāĻāĻŋāĻā§ āϞāĻā§āώā§āϝ āĻšāĻŋāϏāĻžāĻŦā§ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āĻĻā§āĻŦā§āύāĨ¤
āĻāĻā§ āĻāϰāĻž āĻĒāϰāĻŋāĻŦāϰā§āϤāύāĻā§āϞāĻŋ āĻŦāĻžāĻĻ āĻĻāĻŋāϤ⧠āĻĄā§āĻāĻžāϏā§āĻāĻāĻŋ āĻĒā§āύāϰāĻžāϝāĻŧ āĻāĻŽāĻĻāĻžāύāĻŋ āĻāϰāĻž āϝāĻžāĻāĨ¤
(x_train, _), (x_test, _) = fashion_mnist.load_data()
x_train = x_train.astype('float32') / 255.
x_test = x_test.astype('float32') / 255.
x_train = x_train[..., tf.newaxis]
x_test = x_test[..., tf.newaxis]
print(x_train.shape)
(60000, 28, 28, 1)
āĻāĻŽā§āĻ āĻāϞā§āĻŽā§āϞ⧠āĻļāĻŦā§āĻĻ āϝā§āĻ āĻāϰāĻž
noise_factor = 0.2
x_train_noisy = x_train + noise_factor * tf.random.normal(shape=x_train.shape)
x_test_noisy = x_test + noise_factor * tf.random.normal(shape=x_test.shape)
x_train_noisy = tf.clip_by_value(x_train_noisy, clip_value_min=0., clip_value_max=1.)
x_test_noisy = tf.clip_by_value(x_test_noisy, clip_value_min=0., clip_value_max=1.)
āĻļā§āϰāĻā§āϞ āĻāĻŽā§āĻ āĻĒā§āϞāĻ.
n = 10
plt.figure(figsize=(20, 2))
for i in range(n):
ax = plt.subplot(1, n, i + 1)
plt.title("original + noise")
plt.imshow(tf.squeeze(x_test_noisy[i]))
plt.gray()
plt.show()
āĻāĻāĻāĻŋ āĻāύāĻā§āϞāĻŋāĻāĻļāύāĻžāϞ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āϏāĻāĻā§āĻāĻžāϝāĻŧāĻŋāϤ āĻāϰā§āύ
āĻāĻ āĻāĻĻāĻžāĻšāϰāĻŖā§, āĻāĻĒāύāĻŋ encoder
Conv2D āϏā§āϤāϰāĻā§āϞāĻŋ āĻāĻŦāĻ decoder
Conv2DT āĻā§āϰāĻžāύā§āϏāĻĒā§āĻ āϏā§āϤāϰāĻā§āϞāĻŋ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰ⧠āĻāĻāĻāĻŋ āϰā§āĻĒāĻžāύā§āϤāϰāĻŽā§āϞāĻ āĻ
āĻā§āĻāύāĻā§āĻĄāĻžāϰāĻā§ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āĻĻā§āĻŦā§āύ⧎
class Denoise(Model):
def __init__(self):
super(Denoise, self).__init__()
self.encoder = tf.keras.Sequential([
layers.Input(shape=(28, 28, 1)),
layers.Conv2D(16, (3, 3), activation='relu', padding='same', strides=2),
layers.Conv2D(8, (3, 3), activation='relu', padding='same', strides=2)])
self.decoder = tf.keras.Sequential([
layers.Conv2DTranspose(8, kernel_size=3, strides=2, activation='relu', padding='same'),
layers.Conv2DTranspose(16, kernel_size=3, strides=2, activation='relu', padding='same'),
layers.Conv2D(1, kernel_size=(3, 3), activation='sigmoid', padding='same')])
def call(self, x):
encoded = self.encoder(x)
decoded = self.decoder(encoded)
return decoded
autoencoder = Denoise()
autoencoder.compile(optimizer='adam', loss=losses.MeanSquaredError())
autoencoder.fit(x_train_noisy, x_train,
epochs=10,
shuffle=True,
validation_data=(x_test_noisy, x_test))
Epoch 1/10 1875/1875 [==============================] - 8s 3ms/step - loss: 0.0169 - val_loss: 0.0107 Epoch 2/10 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0095 - val_loss: 0.0086 Epoch 3/10 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0082 - val_loss: 0.0080 Epoch 4/10 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0078 - val_loss: 0.0077 Epoch 5/10 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0076 - val_loss: 0.0075 Epoch 6/10 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0074 - val_loss: 0.0074 Epoch 7/10 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0073 - val_loss: 0.0073 Epoch 8/10 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0072 - val_loss: 0.0072 Epoch 9/10 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0071 - val_loss: 0.0071 Epoch 10/10 1875/1875 [==============================] - 6s 3ms/step - loss: 0.0070 - val_loss: 0.0071 <keras.callbacks.History at 0x7ff1c45a31d0>
āĻāϏā§āύ āĻāύāĻā§āĻĄāĻžāϰā§āϰ āĻāĻāĻāĻŋ āϏāĻžāϰāĻžāĻāĻļ āĻĻā§āĻā§ āύā§āĻāϝāĻŧāĻž āϝāĻžāĻāĨ¤ āϞāĻā§āώā§āϝ āĻāϰā§āύ āĻāĻŋāĻāĻžāĻŦā§ 28x28 āĻĨā§āĻā§ 7x7 āĻĒāϰā§āϝāύā§āϤ āĻāĻŋāϤā§āϰāĻā§āϞāĻŋ āĻĄāĻžāĻāύāϏā§āϝāĻžāĻŽā§āĻĒ āĻāϰāĻž āĻšāϝāĻŧā§āĻā§āĨ¤
autoencoder.encoder.summary()
Model: "sequential_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d (Conv2D) (None, 14, 14, 16) 160 conv2d_1 (Conv2D) (None, 7, 7, 8) 1160 ================================================================= Total params: 1,320 Trainable params: 1,320 Non-trainable params: 0 _________________________________________________________________
āĻĄāĻŋāĻā§āĻĄāĻžāϰāĻāĻŋ 7x7 āĻĨā§āĻā§ 28x28 āĻĒāϰā§āϝāύā§āϤ āĻāĻŋāϤā§āϰāĻā§āϞāĻŋāϰ āύāĻŽā§āύāĻž āĻĻā§āϝāĻŧ⧎
autoencoder.decoder.summary()
Model: "sequential_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_transpose (Conv2DTra (None, 14, 14, 8) 584 nspose) conv2d_transpose_1 (Conv2DT (None, 28, 28, 16) 1168 ranspose) conv2d_2 (Conv2D) (None, 28, 28, 1) 145 ================================================================= Total params: 1,897 Trainable params: 1,897 Non-trainable params: 0 _________________________________________________________________
āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āĻĻā§āĻŦāĻžāϰāĻž āĻāϤā§āĻĒāĻžāĻĻāĻŋāϤ āĻā§āϞāĻžāĻšāϞāĻĒā§āϰā§āĻŖ āĻāĻŋāϤā§āϰ āĻāĻŦāĻ āĻ āϏā§āĻŦā§āĻā§āϤ āĻāĻŋāϤā§āϰ āĻāĻāϝāĻŧāĻ āĻĒā§āϞāĻ āĻāϰāĻžāĨ¤
encoded_imgs = autoencoder.encoder(x_test).numpy()
decoded_imgs = autoencoder.decoder(encoded_imgs).numpy()
n = 10
plt.figure(figsize=(20, 4))
for i in range(n):
# display original + noise
ax = plt.subplot(2, n, i + 1)
plt.title("original + noise")
plt.imshow(tf.squeeze(x_test_noisy[i]))
plt.gray()
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# display reconstruction
bx = plt.subplot(2, n, i + n + 1)
plt.title("reconstructed")
plt.imshow(tf.squeeze(decoded_imgs[i]))
plt.gray()
bx.get_xaxis().set_visible(False)
bx.get_yaxis().set_visible(False)
plt.show()
āϤā§āϤā§āϝāĻŧ āĻāĻĻāĻžāĻšāϰāĻŖ: āĻ āϏāĻā§āĻāϤāĻŋ āϏāύāĻžāĻā§āϤāĻāϰāĻŖ
āĻāĻāĻžāϰāĻāĻŋāĻ
āĻāĻ āĻāĻĻāĻžāĻšāϰāĻŖā§, āĻāĻĒāύāĻŋ ECG5000 āĻĄā§āĻāĻžāϏā§āĻā§ āĻ
āϏāĻā§āĻāϤāĻŋ āϏāύāĻžāĻā§āϤ āĻāϰāϤ⧠āĻāĻāĻāĻŋ āĻ
āĻā§āĻāύāĻā§āĻĄāĻžāϰāĻā§ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āĻĻā§āĻŦā§āύāĨ¤ āĻāĻ āĻĄā§āĻāĻžāϏā§āĻā§ 5,000āĻāĻŋ āĻāϞā§āĻā§āĻā§āϰā§āĻāĻžāϰā§āĻĄāĻŋāĻāĻā§āϰāĻžāĻŽ āϰāϝāĻŧā§āĻā§, āĻĒā§āϰāϤāĻŋāĻāĻŋāϤ⧠140āĻāĻŋ āĻĄā§āĻāĻž āĻĒāϝāĻŧā§āύā§āĻ āϰāϝāĻŧā§āĻā§ā§ˇ āĻāĻĒāύāĻŋ āĻĄā§āĻāĻžāϏā§āĻā§āϰ āĻāĻāĻāĻŋ āϏāϰāϞā§āĻā§āϤ āϏāĻāϏā§āĻāϰāĻŖ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰāĻŦā§āύ, āϝā§āĻāĻžāύ⧠āĻĒā§āϰāϤāĻŋāĻāĻŋ āĻāĻĻāĻžāĻšāϰāĻŖāĻā§ 0
(āĻāĻāĻāĻŋ āĻ
āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāύā§āĻĻā§āϰ āϏāĻžāĻĨā§ āϏāĻŽā§āĻĒāϰā§āĻāĻŋāϤ), āĻŦāĻž 1
(āĻāĻāĻāĻŋ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāύā§āĻĻā§āϰ āϏāĻžāĻĨā§ āϏāĻŽā§āĻĒāϰā§āĻāĻŋāϤ) āϞā§āĻŦā§āϞ āĻāϰāĻž āĻšāϝāĻŧā§āĻā§āĨ¤ āĻāĻĒāύāĻŋ āĻ
āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāύā§āĻĻ āϏāύāĻžāĻā§āϤ āĻāϰāϤ⧠āĻāĻā§āϰāĻšā§.
āĻāĻŋāĻāĻžāĻŦā§ āĻāĻĒāύāĻŋ āĻāĻāĻāĻŋ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰ⧠āĻ āϏāĻā§āĻāϤāĻŋ āϏāύāĻžāĻā§āϤ āĻāϰāĻŦā§āύ? āĻŽāύ⧠āϰāĻžāĻāĻŦā§āύ āϝ⧠āĻāĻāĻāĻŋ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āĻĒā§āύāϰā§āĻāĻ āύā§āϰ āϤā§āϰā§āĻāĻŋ āĻāĻŽāĻžāύā§āϰ āĻāύā§āϝ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŋāϤāĨ¤ āĻāĻĒāύāĻŋ āĻļā§āϧā§āĻŽāĻžāϤā§āϰ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāύā§āĻĻā§ āĻāĻāĻāĻŋ āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āĻĻā§āĻŦā§āύ, āϤāĻžāϰāĻĒāϰ āϏāĻŽāϏā§āϤ āĻĄā§āĻāĻž āĻĒā§āύāϰā§āĻāĻ āύ āĻāϰāϤ⧠āĻāĻāĻŋ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰā§āύāĨ¤ āĻāĻŽāĻžāĻĻā§āϰ āĻ āύā§āĻŽāĻžāύ āĻšāϞ āϝ⧠āĻ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāύā§āĻĻā§ āĻāĻā§āĻāϤāϰ āĻĒā§āύāϰā§āĻāĻ āύ āϤā§āϰā§āĻāĻŋ āĻĨāĻžāĻāĻŦā§āĨ¤ āϤāĻžāϰāĻĒāϰ⧠āĻāĻĒāύāĻŋ āĻāĻāĻāĻŋ āĻāύā§āĻĻāĻā§ āĻāĻāĻāĻŋ āĻ āϏāĻā§āĻāϤāĻŋ āĻšāĻŋāϏāĻžāĻŦā§ āĻļā§āϰā§āĻŖā§āĻŦāĻĻā§āϧ āĻāϰāĻŦā§āύ āϝāĻĻāĻŋ āĻĒā§āύāϰā§āĻāĻ āύā§āϰ āϤā§āϰā§āĻāĻŋ āĻāĻāĻāĻŋ āύāĻŋāϰā§āĻĻāĻŋāώā§āĻ āĻĨā§āϰā§āĻļāĻšā§āϞā§āĻĄāĻā§ āĻ āϤāĻŋāĻā§āϰāĻŽ āĻāϰā§āĨ¤
āĻāϏāĻŋāĻāĻŋ āĻĄā§āĻāĻž āϞā§āĻĄ āĻāϰā§āύ
āĻāĻĒāύāĻŋ āϝ⧠āĻĄā§āĻāĻžāϏā§āĻāĻāĻŋ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰāĻŦā§āύ āϤāĻž timeseriesclassification.com āĻĨā§āĻā§ āĻāĻāĻāĻŋāϰ āĻāĻĒāϰ āĻāĻŋāϤā§āϤāĻŋ āĻāϰā§āĨ¤
# Download the dataset
dataframe = pd.read_csv('http://storage.googleapis.com/download.tensorflow.org/data/ecg.csv', header=None)
raw_data = dataframe.values
dataframe.head()
# The last element contains the labels
labels = raw_data[:, -1]
# The other data points are the electrocadriogram data
data = raw_data[:, 0:-1]
train_data, test_data, train_labels, test_labels = train_test_split(
data, labels, test_size=0.2, random_state=21
)
āĻĄā§āĻāĻžāĻā§ [0,1]
āϤ⧠āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāϰā§āύāĨ¤
min_val = tf.reduce_min(train_data)
max_val = tf.reduce_max(train_data)
train_data = (train_data - min_val) / (max_val - min_val)
test_data = (test_data - min_val) / (max_val - min_val)
train_data = tf.cast(train_data, tf.float32)
test_data = tf.cast(test_data, tf.float32)
āĻāĻĒāύāĻŋ āĻļā§āϧā§āĻŽāĻžāϤā§āϰ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāύā§āĻĻ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰ⧠āĻ
āĻā§āĻāύāĻā§āĻĄāĻžāϰāĻā§ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āĻĻā§āĻŦā§āύ, āϝāĻž āĻāĻ āĻĄā§āĻāĻžāϏā§āĻā§ 1
āĻšāĻŋāϏāĻžāĻŦā§ āϞā§āĻŦā§āϞ āĻāϰāĻž āĻšāϝāĻŧā§āĻā§āĨ¤ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāύā§āĻĻāĻā§āϞāĻŋāĻā§ āĻ
āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāύā§āĻĻ āĻĨā§āĻā§ āĻāϞāĻžāĻĻāĻž āĻāϰā§āύāĨ¤
train_labels = train_labels.astype(bool)
test_labels = test_labels.astype(bool)
normal_train_data = train_data[train_labels]
normal_test_data = test_data[test_labels]
anomalous_train_data = train_data[~train_labels]
anomalous_test_data = test_data[~test_labels]
āĻāĻāĻāĻŋ āϏāĻžāϧāĻžāϰāĻŖ āĻāϏāĻŋāĻāĻŋ āĻĒā§āϞāĻ āĻāϰā§āύāĨ¤
plt.grid()
plt.plot(np.arange(140), normal_train_data[0])
plt.title("A Normal ECG")
plt.show()
āĻāĻāĻāĻŋ āĻ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāϏāĻŋāĻāĻŋ āĻĒā§āϞāĻ āĻāϰā§āύāĨ¤
plt.grid()
plt.plot(np.arange(140), anomalous_train_data[0])
plt.title("An Anomalous ECG")
plt.show()
āĻŽāĻĄā§āϞ āϤā§āϰāĻŋ āĻāϰā§āύ
class AnomalyDetector(Model):
def __init__(self):
super(AnomalyDetector, self).__init__()
self.encoder = tf.keras.Sequential([
layers.Dense(32, activation="relu"),
layers.Dense(16, activation="relu"),
layers.Dense(8, activation="relu")])
self.decoder = tf.keras.Sequential([
layers.Dense(16, activation="relu"),
layers.Dense(32, activation="relu"),
layers.Dense(140, activation="sigmoid")])
def call(self, x):
encoded = self.encoder(x)
decoded = self.decoder(encoded)
return decoded
autoencoder = AnomalyDetector()
autoencoder.compile(optimizer='adam', loss='mae')
āϞāĻā§āώā§āϝ āĻāϰā§āύ āϝ⧠āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰāĻā§ āĻļā§āϧā§āĻŽāĻžāϤā§āϰ āϏāĻžāϧāĻžāϰāĻŖ āĻāϏāĻŋāĻāĻŋ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰ⧠āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŋāϤ āĻāϰāĻž āĻšāϝāĻŧ, āϤāĻŦā§ āϏāĻŽā§āĻĒā§āϰā§āĻŖ āĻĒāϰā§āĻā§āώāĻžāϰ āϏā§āĻ āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰ⧠āĻŽā§āϞā§āϝāĻžāϝāĻŧāύ āĻāϰāĻž āĻšāϝāĻŧāĨ¤
history = autoencoder.fit(normal_train_data, normal_train_data,
epochs=20,
batch_size=512,
validation_data=(test_data, test_data),
shuffle=True)
Epoch 1/20 5/5 [==============================] - 1s 33ms/step - loss: 0.0576 - val_loss: 0.0531 Epoch 2/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0552 - val_loss: 0.0514 Epoch 3/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0519 - val_loss: 0.0499 Epoch 4/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0483 - val_loss: 0.0475 Epoch 5/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0445 - val_loss: 0.0451 Epoch 6/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0409 - val_loss: 0.0432 Epoch 7/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0377 - val_loss: 0.0415 Epoch 8/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0348 - val_loss: 0.0401 Epoch 9/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0319 - val_loss: 0.0388 Epoch 10/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0293 - val_loss: 0.0378 Epoch 11/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0273 - val_loss: 0.0369 Epoch 12/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0259 - val_loss: 0.0361 Epoch 13/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0249 - val_loss: 0.0354 Epoch 14/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0239 - val_loss: 0.0346 Epoch 15/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0230 - val_loss: 0.0340 Epoch 16/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0222 - val_loss: 0.0335 Epoch 17/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0215 - val_loss: 0.0331 Epoch 18/20 5/5 [==============================] - 0s 9ms/step - loss: 0.0211 - val_loss: 0.0331 Epoch 19/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0208 - val_loss: 0.0329 Epoch 20/20 5/5 [==============================] - 0s 8ms/step - loss: 0.0206 - val_loss: 0.0327
plt.plot(history.history["loss"], label="Training Loss")
plt.plot(history.history["val_loss"], label="Validation Loss")
plt.legend()
<matplotlib.legend.Legend at 0x7ff1d339b790>
āĻāĻĒāύāĻŋ āĻļā§āĻā§āϰāĻ āĻāĻāĻāĻŋ ECG āĻā§ āĻ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻšāĻŋāϏāĻžāĻŦā§ āĻļā§āϰā§āĻŖā§āĻŦāĻĻā§āϧ āĻāϰāĻŦā§āύ āϝāĻĻāĻŋ āĻĒā§āύāϰā§āĻāĻ āύā§āϰ āϤā§āϰā§āĻāĻŋ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖā§āϰ āĻāĻĻāĻžāĻšāϰāĻŖ āĻĨā§āĻā§ āĻāĻāĻāĻŋ āĻāĻĻāϰā§āĻļ āĻŦāĻŋāĻā§āϝā§āϤāĻŋāϰ āĻā§āϝāĻŧā§ āĻŦā§āĻļāĻŋ āĻšāϝāĻŧāĨ¤ āĻĒā§āϰāĻĨāĻŽā§, āĻāϏā§āύ āĻā§āϰā§āύāĻŋāĻ āϏā§āĻ āĻĨā§āĻā§ āĻāĻāĻāĻŋ āϏāĻžāϧāĻžāϰāĻŖ āĻāϏāĻŋāĻāĻŋ, āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰ āĻĻā§āĻŦāĻžāϰāĻž āĻāύāĻā§āĻĄ āĻāĻŦāĻ āĻĄāĻŋāĻā§āĻĄ āĻāϰāĻžāϰ āĻĒāϰ⧠āĻĒā§āύāϰā§āĻāĻ āύ āĻāĻŦāĻ āĻĒā§āύāϰā§āĻāĻ āύ āϤā§āϰā§āĻāĻŋāϰ āĻĒā§āϞāĻ āĻāϰāĻž āϝāĻžāĻāĨ¤
encoded_data = autoencoder.encoder(normal_test_data).numpy()
decoded_data = autoencoder.decoder(encoded_data).numpy()
plt.plot(normal_test_data[0], 'b')
plt.plot(decoded_data[0], 'r')
plt.fill_between(np.arange(140), decoded_data[0], normal_test_data[0], color='lightcoral')
plt.legend(labels=["Input", "Reconstruction", "Error"])
plt.show()
āĻāĻāĻāĻŋ āĻ āύā§āϰā§āĻĒ āĻĒā§āϞāĻ āϤā§āϰāĻŋ āĻāϰā§āύ, āĻāĻ āϏāĻŽāϝāĻŧ āĻāĻāĻāĻŋ āĻ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻĒāϰā§āĻā§āώāĻžāϰ āĻāĻĻāĻžāĻšāϰāĻŖā§āϰ āĻāύā§āϝāĨ¤
encoded_data = autoencoder.encoder(anomalous_test_data).numpy()
decoded_data = autoencoder.decoder(encoded_data).numpy()
plt.plot(anomalous_test_data[0], 'b')
plt.plot(decoded_data[0], 'r')
plt.fill_between(np.arange(140), decoded_data[0], anomalous_test_data[0], color='lightcoral')
plt.legend(labels=["Input", "Reconstruction", "Error"])
plt.show()
āĻ āϏāĻā§āĻāϤāĻŋāĻā§āϞāĻŋ āϏāύāĻžāĻā§āϤ āĻāϰā§āύ
āĻĒā§āύāϰā§āĻāĻ āύā§āϰ āĻā§āώāϤāĻŋ āĻāĻāĻāĻŋ āύāĻŋāϰā§āĻĻāĻŋāώā§āĻ āĻĨā§āϰā§āĻļāĻšā§āϞā§āĻĄā§āϰ āĻā§āϝāĻŧā§ āĻŦā§āĻļāĻŋ āĻāĻŋāύāĻž āϤāĻž āĻāĻŖāύāĻž āĻāϰ⧠āĻ āϏāĻā§āĻāϤāĻŋāĻā§āϞāĻŋ āϏāύāĻžāĻā§āϤ āĻāϰā§āύ⧎ āĻāĻ āĻāĻŋāĻāĻā§āϰāĻŋāϝāĻŧāĻžāϞā§, āĻāĻĒāύāĻŋ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āϏā§āĻ āĻĨā§āĻā§ āϏāĻžāϧāĻžāϰāĻŖ āĻāĻĻāĻžāĻšāϰāĻŖā§āϰ āĻāĻĄāĻŧ āĻāĻĄāĻŧ āϤā§āϰā§āĻāĻŋ āĻāĻŖāύāĻž āĻāϰāĻŦā§āύ, āϤāĻžāϰāĻĒāϰ āĻāĻŦāĻŋāώā§āϝāϤā§āϰ āĻāĻĻāĻžāĻšāϰāĻŖāĻā§āϞāĻŋāĻā§ āĻ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻšāĻŋāϏāĻžāĻŦā§ āĻļā§āϰā§āĻŖā§āĻŦāĻĻā§āϧ āĻāϰāĻŦā§āύ āϝāĻĻāĻŋ āĻĒā§āύāϰā§āĻāĻ āύā§āϰ āϤā§āϰā§āĻāĻŋāĻāĻŋ āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āϏā§āĻ āĻĨā§āĻā§ āĻāĻāĻāĻŋ āĻāĻĻāϰā§āĻļ āĻŦāĻŋāĻā§āϝā§āϤāĻŋāϰ āĻā§āϝāĻŧā§ āĻŦā§āĻļāĻŋ āĻšāϝāĻŧāĨ¤
āĻĒā§āϰāĻļāĻŋāĻā§āώāĻŖ āϏā§āĻ āĻĨā§āĻā§ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāϏāĻŋāĻāĻŋāϤ⧠āĻĒā§āύāϰā§āĻāĻ āύā§āϰ āϤā§āϰā§āĻāĻŋ āĻĒā§āϞāĻ āĻāϰā§āύ
reconstructions = autoencoder.predict(normal_train_data)
train_loss = tf.keras.losses.mae(reconstructions, normal_train_data)
plt.hist(train_loss[None,:], bins=50)
plt.xlabel("Train loss")
plt.ylabel("No of examples")
plt.show()
āĻāĻāĻāĻŋ āĻĨā§āϰā§āĻļāĻšā§āϞā§āĻĄ āĻŽāĻžāύ āĻāϝāĻŧāύ āĻāϰā§āύ āϝāĻž āĻāĻĄāĻŧ āĻĨā§āĻā§ āĻāĻĒāϰ⧠āĻāĻāĻāĻŋ āĻāĻĻāϰā§āĻļ āĻŦāĻŋāĻā§āϝā§āϤāĻŋāĨ¤
threshold = np.mean(train_loss) + np.std(train_loss)
print("Threshold: ", threshold)
Threshold: 0.03241627
āĻāĻĒāύāĻŋ āϝāĻĻāĻŋ āĻĒāϰā§āĻā§āώāĻžāϰ āϏā§āĻā§ āĻ āϏā§āĻŦāĻžāĻāĻžāĻŦāĻŋāĻ āĻāĻĻāĻžāĻšāϰāĻŖāĻā§āϞāĻŋāϰ āĻāύā§āϝ āĻĒā§āύāϰā§āĻāĻ āύ āϤā§āϰā§āĻāĻŋ āĻĒāϰā§āĻā§āώāĻž āĻāϰā§āύ, āĻāĻĒāύāĻŋ āϞāĻā§āώā§āϝ āĻāϰāĻŦā§āύ āϝ⧠āĻ āϧāĻŋāĻāĻžāĻāĻļā§āϰ āĻĨā§āϰā§āĻļāĻšā§āϞā§āĻĄā§āϰ āĻā§āϝāĻŧā§ āĻŦā§āĻļāĻŋ āĻĒā§āύāϰā§āĻāĻ āύ āϤā§āϰā§āĻāĻŋ āϰāϝāĻŧā§āĻā§āĨ¤ āĻĨā§āϰā§āĻļāĻšā§āϞā§āĻĄ āĻĒāϰāĻŋāĻŦāϰā§āϤāύ āĻāϰā§, āĻāĻĒāύāĻŋ āĻāĻĒāύāĻžāϰ āĻļā§āϰā§āĻŖā§āĻŦāĻĻā§āϧāĻāĻžāϰā§āϰ āύāĻŋāϰā§āĻā§āϞāϤāĻž āĻāĻŦāĻ āĻĒā§āϰāϤā§āϝāĻžāĻšāĻžāϰ āϏāĻŽāύā§āĻŦāϝāĻŧ āĻāϰāϤ⧠āĻĒāĻžāϰā§āύāĨ¤
reconstructions = autoencoder.predict(anomalous_test_data)
test_loss = tf.keras.losses.mae(reconstructions, anomalous_test_data)
plt.hist(test_loss[None, :], bins=50)
plt.xlabel("Test loss")
plt.ylabel("No of examples")
plt.show()
āĻĒā§āύāϰā§āĻāĻ āύ āϤā§āϰā§āĻāĻŋ āĻĨā§āϰā§āĻļāĻšā§āϞā§āĻĄā§āϰ āĻā§āϝāĻŧā§ āĻŦā§āĻļāĻŋ āĻšāϞ⧠āĻāĻāĻāĻŋ āĻ āϏāĻā§āĻāϤāĻŋ āĻšāĻŋāϏāĻžāĻŦā§ āĻāĻāĻāĻŋ ECG āĻļā§āϰā§āĻŖā§āĻŦāĻĻā§āϧ āĻāϰā§āύāĨ¤
def predict(model, data, threshold):
reconstructions = model(data)
loss = tf.keras.losses.mae(reconstructions, data)
return tf.math.less(loss, threshold)
def print_stats(predictions, labels):
print("Accuracy = {}".format(accuracy_score(labels, predictions)))
print("Precision = {}".format(precision_score(labels, predictions)))
print("Recall = {}".format(recall_score(labels, predictions)))
preds = predict(autoencoder, test_data, threshold)
print_stats(preds, test_labels)
Accuracy = 0.944 Precision = 0.9921875 Recall = 0.9071428571428571
āĻĒāϰāĻŦāϰā§āϤ⧠āĻĒāĻĻāĻā§āώā§āĻĒ
āĻ āĻā§āĻāύāĻā§āĻĄāĻžāϰāĻā§āϞāĻŋāϰ āϏāĻžāĻĨā§ āĻ āϏāĻā§āĻāϤāĻŋ āϏāύāĻžāĻā§āϤāĻāϰāĻŖ āϏāĻŽā§āĻĒāϰā§āĻā§ āĻāϰāĻ āĻāĻžāύāϤā§, āĻāĻŋāĻā§āĻāϰ āĻĄāĻŋāĻŦāĻŋāϝāĻŧāĻžāϰ TensorFlow.js āĻāϰ āϏāĻžāĻĨā§ āύāĻŋāϰā§āĻŽāĻŋāϤ āĻāĻ āĻĻā§āϰā§āĻĻāĻžāύā§āϤ āĻāύā§āĻāĻžāϰā§āĻā§āĻāĻŋāĻ āĻāĻĻāĻžāĻšāϰāĻŖāĻāĻŋ āĻĻā§āĻā§āύāĨ¤ āĻŦāĻžāϏā§āϤāĻŦ-āĻŦāĻŋāĻļā§āĻŦ āĻŦā§āϝāĻŦāĻšāĻžāϰā§āϰ āĻā§āώā§āϤā§āϰā§, āĻāĻĒāύāĻŋ āĻļāĻŋāĻāϤ⧠āĻĒāĻžāϰā§āύ āϝ⧠āĻā§āĻāĻžāĻŦā§ āĻāϝāĻŧāĻžāϰāĻŦāĻžāϏ āĻā§āύāϏāϰāĻĢā§āϞ⧠āĻŦā§āϝāĻŦāĻšāĻžāϰ āĻāϰ⧠āĻāĻāĻāϏāĻāϏ āĻā§āϞāĻŋāĻŽā§āĻā§āϰāĻŋ āĻĄā§āĻāĻžāϤ⧠āĻ āϏāĻā§āĻāϤāĻŋ āϏāύāĻžāĻā§āϤ āĻāϰā§āĨ¤ āĻŦā§āϏāĻŋāĻ āϏāĻŽā§āĻĒāϰā§āĻā§ āĻāϰāĻ āĻāĻžāύāϤā§, āĻĢā§āϰāĻžāĻāϏā§āϝāĻŧāĻž āĻā§āϞā§āĻā§āϰ āĻāĻ āĻŦā§āϞāĻ āĻĒā§āϏā§āĻāĻāĻŋ āĻĒāĻĄāĻŧāĻžāϰ āĻāĻĨāĻž āĻŦāĻŋāĻŦā§āĻāύāĻž āĻāϰā§āύāĨ¤ āĻāϰāĻ āĻŦāĻŋāĻļāĻĻ āĻŦāĻŋāĻŦāϰāĻŖā§āϰ āĻāύā§āϝ, āĻāϝāĻŧāĻžāύ āĻā§āĻĄāĻĢā§āϞā§, āĻāϝāĻŧā§āĻļā§āϝāĻŧāĻž āĻŦā§āĻā§āĻāĻŋāĻ āĻāĻŦāĻ āĻ ā§āϝāĻžāϰāύ āĻā§āϰāĻāĻŋāϞā§āϰ āĻĄāĻŋāĻĒ āϞāĻžāϰā§āύāĻŋāĻ āĻĨā§āĻā§ āĻ āϧā§āϝāĻžāϝāĻŧ 14 āĻĻā§āĻā§āύāĨ¤