dogs&cats_data
library(keras)
### iris data
dat = iris
x_train = dat[,1:3] %>% as.matrix()
y_train <- dat[,4] %>% to_categorical()
model_iris <- keras_model_sequential() %>%
layer_dense(units=20, activation = "relu", input_shape = ncol(x_train)) %>%
layer_dense(units = 10, activation = "relu") %>%
layer_dense(units = 3, activation = "softmax")
summary(model_iris)
model_iris %>% compile(
optimizer = "rmsprop",
loss = "categorical_crossentropy",
metrics = c("accuracy")
)
model_iris %>% fit(
x_train,
y_train,
epochs = 100,
batch_size = 5
)
model_iris %>% evaluate(x_train,y_train)
### with vaidation
history <- model_iris %>% fit(x_train,
y_train,
validation_split = 0.20,
epochs=300,
batch_size = 5,
shuffle = T)
plot(history)
normalizer <- function(x) {
min_x = apply(x,2,min)
max_x = apply(x,2,max)
num <- t(x) - min_x
denom <- max_x - min_x
normalized = t(num/denom)
return (normalized)
}
x_trainN = normalizer(x_train)
model_iris <- keras_model_sequential() %>%
layer_dense(units=20, activation = "relu", input_shape = ncol(x_train)) %>%
layer_dense(units = 10, activation = "relu") %>%
layer_dense(units = 3, activation = "softmax")
model_iris %>% compile(
optimizer = "rmsprop",
loss = "categorical_crossentropy",
metrics = c("accuracy")
)
history_N <- model_iris %>% fit(x_trainN,
y_train,
validation_split = 0.20,
epochs=150,
batch_size = 5,
shuffle = T)
plot(history_N)
### reuter
imdb <- dataset_imdb(num_words = 10000)
c(c(train_data, train_labels), c(test_data, test_labels)) %<-% imdb
vectorize_sequences <- function(sequences, dimension = 10000) {
results <- matrix(0, nrow = length(sequences), ncol = dimension)
for (i in 1:length(sequences))
results[i, sequences[[i]]] <- 1
results
}
x_trainTEMP <- vectorize_sequences(train_data)
x_test <- vectorize_sequences(test_data)
y_trainTEMP <- as.numeric(train_labels)
y_test <- as.numeric(test_labels)
val_idx = 1:1000
x_val = x_trainTEMP[val_idx, ]
y_val = y_trainTEMP[val_idx]
x_train = x_trainTEMP[-val_idx, ]
y_train = y_trainTEMP[-val_idx]
original_model <- keras_model_sequential() %>%
layer_dense(units = 16, activation = "relu", input_shape = c(10000)) %>%
layer_dense(units = 16, activation = "relu") %>%
layer_dense(units = 1, activation = "sigmoid")
original_model %>% compile(
optimizer = "rmsprop",
loss = "binary_crossentropy",
metrics = c("accuracy")
)
history_orig <- original_model %>% fit(x_train,
y_train,
epochs=20,
batch_size = 512,
validation_data = list(x_val, y_val))
plot(history_orig)
original_model %>% evaluate(x_test, y_test)
smaller_model <- keras_model_sequential() %>%
layer_dense(units = 4, activation = "relu", input_shape = c(10000)) %>%
layer_dense(units = 4, activation = "relu") %>%
layer_dense(units = 1, activation = "sigmoid")
smaller_model %>% compile(
optimizer = "rmsprop",
loss = "binary_crossentropy",
metrics = c("accuracy")
)
history_small <- smaller_model %>% fit(x_train,
y_train,
epochs=20,
batch_size = 512,
validation_data = list(x_val, y_val))
plot(history_small)
smaller_model %>% evaluate(x_test, y_test)
bigger_model <- keras_model_sequential() %>%
layer_dense(units = 512, activation = "relu", input_shape = c(10000)) %>%
layer_dense(units = 512, activation = "relu") %>%
layer_dense(units = 1, activation = "sigmoid")
bigger_model %>% compile(
optimizer = "rmsprop",
loss = "binary_crossentropy",
metrics = c("accuracy")
)
history_big <- bigger_model %>% fit(x_train,
y_train,
epochs=20,
batch_size = 512,
validation_data = list(x_val, y_val))
plot(history_big)
model_L2 <- keras_model_sequential() %>%
layer_dense(units = 16, kernel_regularizer = regularizer_l2(0.0001),
activation = "relu", input_shape = c(10000)) %>%
layer_dense(units = 16, kernel_regularizer = regularizer_l2(0.0001),
activation = "relu") %>%
layer_dense(units = 1, activation = "sigmoid")
model_L2 %>% compile(
optimizer = "rmsprop",
loss = "binary_crossentropy",
metrics = c("accuracy")
)
history_L2 <- model_L2 %>% fit(x_train,
y_train,
epochs=20,
batch_size = 512,
validation_data = list(x_val, y_val))
plot(history_L2)
model_L2 %>% evaluate(x_test, y_test)
model_DO <- keras_model_sequential() %>%
layer_dense(units = 16,activation = "relu", input_shape = c(10000)) %>%
layer_dropout(rate = 0.5) %>%
layer_dense(units = 16,activation = "relu") %>%
layer_dropout(rate = 0.5) %>%
layer_dense(units = 1, activation = "sigmoid")
model_DO %>% compile(
optimizer = "rmsprop",
loss = "binary_crossentropy",
metrics = c("accuracy")
)
history_DO <- model_DO %>% fit(x_train,
y_train,
epochs=20,
batch_size = 512,
validation_data = list(x_val, y_val))
plot(history_DO)
model_DO %>% evaluate(x_test, y_test)
### MNIST
mnist <- dataset_mnist()
c(c(tr_img, tr_lab),c(te_img, te_lab)) %<-% mnist
tr_img <- array_reshape(tr_img, c(60000,28,28,1))
tr_img = tr_img / 255
te_img <- array_reshape(te_img, c(10000,28,28,1))
te_img = te_img / 255
tr_lab = to_categorical(tr_lab)
te_lab = to_categorical(te_lab)
model <- keras_model_sequential() %>%
layer_conv_2d(filter = 32, kernel_size = c(3,3),
activation = "relu",input_shape = c(28,28,1)) %>%
layer_max_pooling_2d(pool_size = c(2,2)) %>%
layer_conv_2d(filter = 64, kernel_size = c(3,3),activation = "relu") %>%
layer_max_pooling_2d(pool_size = c(2,2)) %>%
layer_flatten() %>%
layer_dense(units =64, activation = "relu") %>%
layer_dense(units =10, activation = "softmax")
model %>% compile(
optimizer = "rmsprop",
loss = "categorical_crossentropy",
metrics = c("accuracy")
)
history <- model %>% fit(
tr_img, tr_lab,
epochs = 5,
validation_split = 0.20,
batch_size = 64
)
model %>% evaluate(te_img, te_lab)
original_dataset_dir <- "~/Downloads/dogs-vs-cats/train"
base_dir <- "~/Downloads/cats_and_dogs_small/"
dir.create(base_dir)
train_dir <- file.path(base_dir, "train")
dir.create(train_dir)
validation_dir <- file.path(base_dir, "validation")
dir.create(validation_dir)
test_dir <- file.path(base_dir, "test")
dir.create(test_dir)
train_cats_dir <- file.path(train_dir, "cats")
dir.create(train_cats_dir)
train_dogs_dir <- file.path(train_dir, "dogs")
dir.create(train_dogs_dir)
validation_cats_dir <- file.path(validation_dir, "cats")
dir.create(validation_cats_dir)
validation_dogs_dir <- file.path(validation_dir, "dogs")
dir.create(validation_dogs_dir)
test_cats_dir <- file.path(test_dir, "cats")
dir.create(test_cats_dir)
test_dogs_dir <- file.path(test_dir, "dogs")
dir.create(test_dogs_dir)
fnames <- paste0("cat.", 1:1000, ".jpg")
file.copy(file.path(original_dataset_dir, fnames),
file.path(train_cats_dir))
fnames <- paste0("cat.", 1001:1500, ".jpg")
file.copy(file.path(original_dataset_dir, fnames),
file.path(validation_cats_dir))
fnames <- paste0("cat.", 1501:2000, ".jpg")
file.copy(file.path(original_dataset_dir, fnames),
file.path(test_cats_dir))
fnames <- paste0("dog.", 1:1000, ".jpg")
file.copy(file.path(original_dataset_dir, fnames),
file.path(train_dogs_dir))
fnames <- paste0("dog.", 1001:1500, ".jpg")
file.copy(file.path(original_dataset_dir, fnames),
file.path(validation_dogs_dir))
fnames <- paste0("dog.", 1501:2000, ".jpg")
file.copy(file.path(original_dataset_dir, fnames),
file.path(test_dogs_dir))
model <- keras_model_sequential() %>%
layer_conv_2d(filters = 32, kernel_size = c(3, 3), activation = "relu",
input_shape = c(150, 150, 3)) %>%
layer_max_pooling_2d(pool_size = c(2, 2)) %>%
layer_conv_2d(filters = 64, kernel_size = c(3, 3), activation = "relu") %>%
layer_max_pooling_2d(pool_size = c(2, 2)) %>%
layer_conv_2d(filters = 128, kernel_size = c(3, 3), activation = "relu") %>%
layer_max_pooling_2d(pool_size = c(2, 2)) %>%
layer_conv_2d(filters = 128, kernel_size = c(3, 3), activation = "relu") %>%
layer_max_pooling_2d(pool_size = c(2, 2)) %>%
layer_flatten() %>%
layer_dense(units = 512, activation = "relu") %>%
layer_dense(units = 1, activation = "sigmoid")
summary(model)
model %>% compile(
loss = "binary_crossentropy",
optimizer = optimizer_rmsprop(lr = 1e-4),
metrics = c("acc")
)
train_datagen <- image_data_generator(rescale = 1/255)
validation_datagen <- image_data_generator(rescale = 1/255)
train_generator <- flow_images_from_directory(
train_dir,
train_datagen,
target_size = c(150, 150),
batch_size = 20,
class_mode = "binary"
)
validation_generator <- flow_images_from_directory(
validation_dir,
validation_datagen,
target_size = c(150, 150),
batch_size = 20,
class_mode = "binary"
)
history <- model %>% fit_generator(
train_generator,
steps_per_epoch = 100,
epochs = 10,
validation_data = validation_generator,
validation_steps = 50
)
model <- keras_model_sequential() %>%
layer_conv_2d(filters = 32, kernel_size = c(3, 3), activation = "relu",
input_shape = c(150, 150, 3)) %>%
layer_max_pooling_2d(pool_size = c(2, 2)) %>%
layer_conv_2d(filters = 64, kernel_size = c(3, 3), activation = "relu") %>%
layer_max_pooling_2d(pool_size = c(2, 2)) %>%
layer_conv_2d(filters = 128, kernel_size = c(3, 3), activation = "relu") %>%
layer_max_pooling_2d(pool_size = c(2, 2)) %>%
layer_conv_2d(filters = 128, kernel_size = c(3, 3), activation = "relu") %>%
layer_max_pooling_2d(pool_size = c(2, 2)) %>%
layer_flatten() %>%
layer_dropout(rate = 0.5) %>%
layer_dense(units = 512, activation = "relu") %>%
layer_dense(units = 1, activation = "sigmoid")
model %>% compile(
loss = "binary_crossentropy",
optimizer = optimizer_rmsprop(lr = 1e-4),
metrics = c("acc")
)
datagen <- image_data_generator(
rescale = 1/255,
rotation_range = 40,
width_shift_range = 0.2,
height_shift_range = 0.2,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = TRUE
)
test_datagen <- image_data_generator(rescale = 1/255)
train_generator <- flow_images_from_directory(
train_dir,
datagen,
target_size = c(150, 150),
batch_size = 32,
class_mode = "binary"
)
validation_generator <- flow_images_from_directory(
validation_dir,
test_datagen,
target_size = c(150, 150),
batch_size = 32,
class_mode = "binary"
)
history <- model %>% fit_generator(
train_generator,
steps_per_epoch = 100,
epochs = 10,
validation_data = validation_generator,
validation_steps = 50
)
datagen <- image_data_generator(
rotation_range = 40,
width_shift_range = 0.2,
height_shift_range = 0.2,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = FALSE,
fill_mode = "nearest"
)
conv_base <- application_vgg16(
weights = "imagenet",
include_top = FALSE,
input_shape = c(150, 150, 3)
)
summary(conv_base)
model <- keras_model_sequential() %>%
conv_base %>%
layer_flatten() %>%
layer_dense(units = 256, activation = "relu") %>%
layer_dense(units = 1, activation = "sigmoid")
train_datagen = image_data_generator(
rescale = 1/255,
rotation_range = 40,
width_shift_range = 0.2,
height_shift_range = 0.2,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = TRUE,
fill_mode = "nearest"
)
test_datagen <- image_data_generator(rescale = 1/255)
train_generator <- flow_images_from_directory(
train_dir,
train_datagen,
target_size = c(150, 150),
batch_size = 20,
class_mode = "binary"
)
validation_generator <- flow_images_from_directory(
validation_dir,
test_datagen,
target_size = c(150, 150),
batch_size = 20,
class_mode = "binary"
)
model %>% compile(
loss = "binary_crossentropy",
optimizer = optimizer_rmsprop(lr = 2e-5),
metrics = c("accuracy")
)
history <- model %>% fit_generator(
train_generator,
steps_per_epoch = 100,
epochs = 3,
validation_data = validation_generator,
validation_steps = 50
)
unfreeze_weights(conv_base, from = "block5_conv1")
model %>% compile(
loss = "binary_crossentropy",
optimizer = optimizer_rmsprop(lr = 1e-5),
metrics = c("accuracy")
)
history <- model %>% fit_generator(
train_generator,
steps_per_epoch = 100,
epochs = 10,
validation_data = validation_generator,
validation_steps = 50
)
test_generator <- flow_images_from_directory(
test_dir,
test_datagen,
target_size = c(150, 150),
batch_size = 20,
class_mode = "binary"
)
model %>% evaluate_generator(test_generator, steps = 50)
Related