Disclaimer

このcourselogにあるコードは、主に学部生・博士課程前期向けの講義・演習・実習で例として提示しているもので、原則直感的に分かりやすいように書いているつもりです。例によってはとても非効率なものもあるでしょうし、「やっつけ」で書いているものあります。また、普段はMATLABを使用していますので、変な癖がでているかもしれません。これらの例を使用・参考にする場合はそれを踏まえてたうえで使用・参考にして下さい。
卒業論文に関する資料:[2015PDF] [word template] [latex template] [表紙] [レポートの書き方] [引用文献など]
A Brief Guide to R (Rのコマンドの手引きおよび例)はこちらをどうぞ

2019 データ解析基礎論a DAA02

x<-matrix(1:8, nrow=2)
x<-matrix(1:8, nrow=2,byrow=T)
data01<-data.frame(score = c(2,4,3,4),     
                   dose = c(rep(10,2),rep(100,2)),  
                   condition = rep(c('exp','control'),2))

dat01<-read.csv("http://www.matsuka.info/data_folder/temp_data01.txt",  
                header=T)
dat02<-read.csv("http://www.matsuka.info/data_folder/temp_data02.txt",    
                header=T, row.name=1)
dat03<-read.table("http://www.matsuka.info/data_folder/temp_data03.txt",
                  header=T, row.name=4)
dat<-read.csv("http://www.matsuka.info/data_folder/datWA01.txt",   
              header=T);
mean(dat$shoesize[dat$gender == "M"])
mean(dat$shoesize[dat$gender == "F"])
mean(dat$shoesize[dat$h > 180])

v1 = seq(-3,3,0.1)
v2 = v1^2
plot(x = v1, y = v2)
plot(v1, v2, col = 'red')

plot(v1, v2, main = "THIS IS THE TITLE", cex.lab = 1.5,
     xlab = "Label for X-axis",ylab = "Label for Y-axis")

plot(v1, v2, col = "blue", type = "o", lty = 2, pch = 19, 
     cex.lab = 1.5, lwd = 3, main = "Y=X*X", xlab = "X", 
     ylab="X*X", xlim=c(-3.5,3.5), ylim=c(-0.5, 10))

dat<- read.csv("http://www.matsuka.info/data_folder/datWA01.txt")
hist(dat$h)
hist(dat$h, breaks = 20, main = “Histogram of Height”, 
     xlab = "Height", col = 'blue', xlim = c(140, 190))

dens<-density(dat$h); 
hist(dat$h, main = "Histogram of Height", xlab = "Height",  
     xlim = c(140,190), probability = T)
lines(dens, lwd = 2, col = ‘red’, lty=2)

plot(v1, v2, col = "blue", type = "l", 
     pch = 19, cex.lab = 1.5, lwd = 3, 
     xlab = "X", ylab="f(X)", 
     xlim=c(-3.5,3.5), ylim=c(-0.5, 10))
lines(v1, v1^3, col='red',lwd = 3)
legend("bottomright", c("x^2","x^3"), col=c('blue','red'), lwd=2)

boxplot(dat$h ~ dat$gender,
        main="Distribution of Height by Gender", 
        ylab="Gender", xlab="Height", col=c('blue','cyan'),
        ylim=c(140,190), horizontal=T)

interaction.plot(dat$gender,
                 dat$affil,
                 dat$h, 
                 pch=c(20,20), 
                 col=c("skyblue","orange"), 
                 xlab="gender", ylab="height", 
                 lwd=3,type='b',cex=2,
                 trace.label="Affiliation")

hist(dat[dat$gender=='F',]$h, 
    main="Dist. of Height for Female Participants", 
    xlab="Height", xlim=c(140,190), probability=T)
dens.F = density(dat[dat$gender=='F',]$h)
lines(dens.F, col='blue',lwd=2) 

hist(dat[dat$gender==‘M’,]$h, main=“Dist. of Height for Male 
     Participants”, xlab=“Height”, xlim=c(140,190), 
     probability=T,ylim=c(0,0.08))
dens.M = density(dat[dat$gender=='M',]$h)
lines(dens.M, col='green', lwd=2)

plot(dat$shoesize, dat$h, 
     main="Relationship b/w shoesize and height",
     xlab = 'shoesize’, ylab='height’, 
     pch=19, col="red")
txt = paste("r =",round(cor(dat$shoesize,dat$h), 4))
text(22, 175, txt, cex = 1.5)

abline(h = mean(dat$h), col='blue');
abline(v = mean(dat$shoesize), col='green')

plot(dat[dat$gender=='F',]$shoesize, dat[dat$gender=='F',]$h,  
     main="Relationship b/w shoesize and height", xlab='shoesize', ylab='height', 
     cex.lab=1.5, pch=19, col='blue', xlim=c(20,29), ylim=c(140,190))
lines(dat[dat$gender=='M',]$shoesize,dat[dat$gender=='M',]$h, 
      type = 'p', pch = 15, col = 'green')
legend("topleft", c('Female','Male'), pch =c(19,15), 
       col = c('blue','green'), cex = 1.5)

2019 データ解析基礎論A DAA01

dat<-data.frame(score=c(78,70,66,76,78,76,88, 76, 76,72,60,72,70,72,84,70),
                cond=c(rep('low',8), rep('high',8)))
boxplot(score~cond, col = c("skyblue",'skyblue4'),data=dat)
summary(aov(score ~ cond, data = dat))


dat <- read.csv("http://www.matsuka.info/data_folder/hwsk8-17-6.csv")
plot(ani~otouto, data=dat,pch=20,cex=3,xlab ="score of Otouto", ylab = "score of Ani")
dat.lm <- lm(ani~otouto, data=dat)
abline(dat.lm, col = 'red',lwd = 2.5)

dat<-read.csv("http://www.matsuka.info/data_folder/datWA01.txt")
dat.glm <- glm(gender~shoesize,family="binomial",data=dat)
plot(as.numeric(gender)-1~shoesize,data=dat,pch=20,cex=3,ylab="P(Male)")
cf = coef(dat.glm)
temp.x = seq(20,30,0.1)
y = 1/(1+exp(-1*(cf[1]+temp.x*cf[2])))
lines(temp.x,y,col='cyan',lwd=2)

dat <- read.table("http://www.matsuka.info/data_folder/tdkPCA01.txt")
dat.pca <- princomp(dat)
biplot(dat.pca)

dat<-read.csv("http://matsuka.info/data_folder/tdkClust.csv", header=TRUE, row.names=1)
dat.cluster=hclust(dist(dat),method="average")
plot(dat.cluster,cex=1.5)

data01<-data.frame(score = c(2,4,3,4),
                   dose = c(rep(10,2),rep(100,2)),
                   condition = rep(c('exp','control'),2))

dat01<-read.csv("http://www.matsuka.info/data_folder/temp_data01.txt",
                header=T)
dat02<-read.csv("http://www.matsuka.info/data_folder/temp_data02.txt",
                header=T, row.name=1)
dat03<-read.table("http://www.matsuka.info/data_folder/temp_data03.txt",
                  header=T, row.name=4)
dat<-read.csv("http://www.matsuka.info/data_folder/datWA01.txt",
              header=T);

認知情報解析学演習b

init.RNN <- function(n.uniq,size.hidden){
  W.h = matrix(rnorm(size.hidden*size.hidden), nrow = size.hidden)*0.01
  W.x = matrix(rnorm(n.uniq*size.hidden), nrow = n.uniq)*0.01
  b.h = matrix(rnorm(size.hidden), nrow = 1)*0.01
  W.o = matrix(rnorm(n.uniq*size.hidden),nrow = size.hidden)*0.01
  b.o = matrix(rnorm(n.uniq), nrow = 1)*0.01
  return(list(W.h = W.h, W.x= W.x, b.h = b.h, W.o = W.o, b.o = b.o))
}

affine.forwd <- function(x, W, b){
  return(x%*%W + matrix(1, nrow = nrow(x), ncol = 1)%*%b)
}

affine.bckwd <- function(x, W, b, dout){
  dx = dout%*%t(W)
  dW = t(x)%*%dout
  db = colSums(dout)
  return(list(dx = dx, dW = dW, db = db))
}


softmax.forwd <- function(x, target){
  max.x = apply(x,1,max)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  delta = 1e-7;
  R = nrow(as.matrix(y))
  return(-sum(target*log(y + delta))/R)
}

softmax.pred <- function(x, target){
  max.x = apply(x,1,max)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  return(y)
}

softmax.bckwd <- function(x, target,  dout = 1){
  max.x = apply(x, 1, max)
  R = nrow(x)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  return((y-target)/R)
}

RNN.forward <- function(h.prev, x, network){
  b.size = nrow(x)
  h = h.prev%*%network$W.h + x%*%network$W.x
  hb = h+matrix(1,nrow=b.size,ncol=1)%*%network$b.h
  h.next = tanh(hb)
  return(h.next = h.next)
}

RNN.backward <- function(dh.next, network, x, h.next, h.prev){
  dt = dh.next * (1- h.next^2)
  db = colSums(dt)
  dW.h = t(h.prev)%*%dt
  dh.prev = dt%*%t(network$W.h)
  dW.x = t(x)%*%dt
  dx = dt%*%t(network$W.x)
  return(list(db = db, dW.h = dW.h, dh.prev = dh.prev, dW.x = dW.x, dx=dx))
}

txt = "You say goodbye and I say hello.  you say goodbay and I say hello"

txt2corpus <- function(txt){
  txt = tolower(txt)
  txt = gsub('[.]', ' . sos',txt)
  words = unlist(strsplit(c('sos',txt), " "))
  uniq.words = unique(words)
  n.uniq = length(uniq.words)
  n.words = length(words)
  corpus = rep(0,n.words)
  corpus = match(words,uniq.words)
  return(corpus)
}

corp2contxt1SRNN = function(corpus){
  len.corp = length(corpus)
  # creating target matrix
  idxT = cbind(1:(len.corp-1), corpus[2:len.corp])
  targ1S = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
  targ1S[idxT]=1
  # creating context matrices
  idxC = cbind(1:(len.corp-1),corpus[1:(len.corp-1)])
  contxt = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
  contxt[idxC]=1
  return(list(y=targ1S,x=contxt))
}

corp = txt2corpus(txt)
dat = corp2contxt1SRNN(corp)

size.hidden = 7
network <- init.RNN(8,size.hidden)

n.rep = 100000;lambda = 0.01;batch.size = 3; time = 3;
h.prev =array(0, c(batch.size, size.hidden, time))
h.next = array(0, c(batch.size, size.hidden, (time+1)))
loss = rep(0,n.rep)
for (i.rep in 1:n.rep){
  for (i.t in 1:time){
    idx = seq(i.t, corp.len, time)
    h.next[, , i.t] = RNN.forward(h.prev[, , i.t], dat$x[idx,], network)
    if (i.t < time){
      h.prev[, , (i.t+1)] = h.next[, , i.t]
    }
  }
  dWHs = matrix(0,nrow=nrow(network$W.h),ncol=ncol(network$W.h))
  dWXs = matrix(0,nrow=nrow(network$W.x),ncol=ncol(network$W.x))
  dBHs = matrix(0,nrow=nrow(network$b.h),ncol=ncol(network$b.h))
  dWOs = matrix(0,nrow=nrow(network$W.o),ncol=ncol(network$W.o))
  dBOs = matrix(0,nrow=nrow(network$b.o),ncol=ncol(network$b.o))
  d.prev = matrix(0,nrow=batch.size,ncol=size.hidden)
  L = 0
  for (i.t in time:1){
    idx = idx = seq(i.t, corp.len, time)
    O = affine.forwd(h.next[,,i.t], network$W.o, network$b.o)
    L = L + softmax.forwd(O, dat$y[idx,])
    ds = softmax.bckwd(O, dat$y[idx,], 1)
    dW.o = affine.bckwd(h.next[,,i.t], network$W.o, network$b.o, ds)
    dWOs = dWOs + dW.o$dW
    dBOs = dBOs + dW.o$db
    RNN.d = RNN.backward(dW.o$dx+d.prev, network, dat$x[idx,],h.next[,,(i.t+1)],h.prev[,,i.t])
    dWHs = dWHs + RNN.d$dW.h
    dWXs = dWXs + RNN.d$dW.x
    dBHs = dBHs + RNN.d$db
    d.prev = RNN.d$dh.prev
  }
  loss[i.rep] = L
  network$W.o = network$W.o - lambda*dWOs
  network$b.o = network$b.o - lambda*dBOs
  network$W.h = network$W.h - lambda*dWHs
  network$W.x = network$W.x - lambda*dWXs
  network$b.h = network$b.h - lambda*dBHs
}
plot(loss,type='l')
par(mfrow=c(9,1))
for (i.t in 1:time){
  idx = idx = seq(i.t, corp.len, time)
  O = affine.forwd(h.next[,,i.t], network$W.o, network$b.o)
  print(softmax.pred(O, dat$y[idx,]))
  for (i in 1:3){
    barplot(softmax.pred(O, dat$y[idx,])[i,])
  }
}

認知情報解析学演習 RNN

txt = "You say goodbye and I say hello.  you say goodbay and I say hello"

txt2corpus <- function(txt){
  txt = tolower(txt)
  txt = gsub('[.]', ' . sos',txt)
  words = unlist(strsplit(c('sos',txt), " "))
  uniq.words = unique(words)
  n.uniq = length(uniq.words)
  n.words = length(words)
  corpus = rep(0,n.words)
  corpus = match(words,uniq.words)
  return(corpus)
}

corp = txt2corpus(txt)

corp2contxt1SRNN = function(corpus){
  len.corp = length(corpus)
  # creating target matrix
  idxT = cbind(1:(len.corp-1), corpus[2:len.corp])
  targ1S = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
  targ1S[idxT]=1
  # creating context matrices
  idxC = cbind(1:(len.corp-1),corpus[1:(len.corp-1)])
  contxt = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
  contxt[idxC]=1
  return(list(y=targ1S,x=contxt))
}

init.RNN <- function(n.uniq,size.hidden){
  W.h = matrix(rnorm(size.hidden*size.hidden),nrow=size.hidden)*0.01
  W.x = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
  b = matrix(rnorm(size.hidden),nrow=1)*0.01
  return(list(W.h = W.h, W.x= W.x, b = b))
}
MatMult.forwd <- function(x, W){
  return(x%*%W)
}

MatMult.bckwd <- function(x, W, dout){
  dx = dout%*%t(W)
  dW = t(x)%*%dout
  return(list(dx = dx, dW = dW))
}

affine.forwd <- function(x, W, b){
  return(x%*%W + matrix(1, nrow = nrow(x), ncol = 1)%*%b)
}

affine.bckwd <- function(x, W, b, dout){
  dx = dout%*%t(W)
  dW = t(x)%*%dout
  db = colSums(dout)
  return(list(dx = dx, dW = dW, db = db))
}


softmax.forwd <- function(x, target){
  max.x = apply(x,1,max)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  delta = 1e-7;
  R = nrow(as.matrix(y))
  return(-sum(target*log(y + delta))/R)
}

softmax.pred <- function(x, target){
  max.x = apply(x,1,max)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  return(y)
}

softmax.bckwd <- function(x, target,  dout = 1){
  max.x = apply(x, 1, max)
  R = nrow(x)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  return((y-target)/R)
}


RNN.forward <- function(h.prev, x, network){
  b.size = nrow(x)
  h = h.prev%*%network$W.h + x%*%network$W.x
  hb = h+matrix(1,nrow=b.size,ncol=1)%*%network$b
  h.next = tanh(hb)
  return(h.next = h.next)
}

RNN.backward <- function(dh.next, network, x, h.next, h.prev){
  dt = dh.next * (1- h.next^2)
  db = colSums(dt)
  dW.h = t(h.prev)%*%dt
  dh.prev = dt%*%t(network$W.h)
  dW.x = t(x)%*%dt
  dx = dt%*%t(network$W.x)
  return(list(db = db, dW.h = dW.h, dh.prev = dh.prev, dW.x = dW.x, dx=dx))
}


txt = "You say goodbye and I say hello."

batch.size = 3
time = 3

size.hidden = 7
corp = txt2corpus(txt)
dat<-corp2contxt1SRNN(corp)
network <- init.RNN(ncol(dat$y), size.hidden)
corp.len = nrow(dat$y)
h.prev =array(0, c(batch.size, size.hidden, time))
h.next = array(0, c(batch.size, size.hidden, (time+1)))
W.out = matrix(rnorm(size.hidden * ncol(dat$y)), nrow = size.hidden)
b.out = matrix(rnorm(ncol(dat$y)), nrow = 1)
n.rep = 100000;lambda = 0.25;

loss = rep(0,n.rep)
for (i.rep in 1:n.rep){
  for (i.t in 1:time){
    idx = seq(i.t, corp.len, time)
    h.next[, , i.t] = RNN.forward(h.prev[, , i.t], dat$x[idx,], network)
    if (i.t < time){
      h.prev[, , (i.t+1)] = h.next[, , i.t]
    }
  }
  dWHs = matrix(0,nrow=nrow(network$W.h),ncol=ncol(network$W.h))
  dWXs = matrix(0,nrow=nrow(network$W.x),ncol=ncol(network$W.x))
  dBs = matrix(0,nrow=nrow(network$b),ncol=ncol(network$b))
  dWOs = matrix(0,nrow=nrow(W.out),ncol=ncol(W.out))
  dBOs = matrix(0,nrow=nrow(b.out),ncol=ncol(b.out))
  d.prev = matrix(0,nrow=batch.size,ncol=size.hidden)
  L = 0
  for (i.t in time:1){
    idx = idx = seq(i.t, corp.len, time)
    O = affine.forwd(h.next[,,i.t], W.out, b.out)
    L = L + softmax.forwd(O, dat$y[idx,])
    ds = softmax.bckwd(O, dat$y[idx,], 1)
    dW.o = affine.bckwd(h.next[,,i.t], W.out, b.out, ds)
    dWOs = dWOs + dW.o$dW
    dBOs = dBOs + dW.o$db
    RNN.d = RNN.backward(dW.o$dx+d.prev, network, dat$x[idx,],h.next[,,(i.t+1)],h.prev[,,i.t])
    #RNN.d = RNN.backward(dW.o$dx, network, dat$x[idx,],h.next[,,i.t],h.prev[,,i.t])
    dWHs = dWHs + RNN.d$dW.h
    dWXs = dWXs + RNN.d$dW.x
    dBs = dBs + RNN.d$db
    d.prev = RNN.d$dh.prev
  }
  loss[i.rep] = L
  W.out = W.out - lambda*dWOs
  b.out = b.out - lambda*dBOs
  network$W.h - lambda*dWHs
  network$W.x - lambda*dWXs
  network$b - lambda*dBs
}
plot(loss, type='l')
for (i.t in 1:time){
  idx = idx = seq(i.t, corp.len, time)
  O = affine.forwd(h.next[,,i.t], W.out, b.out)
  print(softmax.pred(O, dat$y[idx,]))
}

認知情報解析学演習 DL-2 ch.5

txt = "You say goodbye and I say hello."

txt2corpus <- function(txt){
  txt = tolower(txt)
  txt = gsub('[.]', ' . sos',txt)
  words = unlist(strsplit(c('sos',txt), " "))
  uniq.words = unique(words)
  n.uniq = length(uniq.words)
  n.words = length(words)
  corpus = rep(0,n.words)
  corpus = match(words,uniq.words)
  return(corpus)
}

corp = txt2corpus(txt)

corp2contxt1SRNN = function(corpus){
  len.corp = length(corpus)
  # creating target matrix
  idxT = cbind(1:(len.corp-1), corpus[2:len.corp])
  targ1S = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
  targ1S[idxT]=1
  # creating context matrices
  idxC = cbind(1:(len.corp-1),corpus[1:(len.corp-1)])
  contxt = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
  contxt[idxC]=1
  return(list(y=targ1S,x=contxt))
}
dat<-corp2contxt1SRNN(corp)

init.RNN <- function(n.uniq,size.hidden){
  W.h = matrix(rnorm(size.hidden*size.hidden),nrow=size.hidden)*0.01
  W.x = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
  b = matrix(rnorm(size.hidden),nrow=1)*0.01
  return(list(W.h = W.h, W.x= W.x, b = b))
}

RNN.forward <- function(h.prev, x, network){
  b.size = nrow(x)
  h = h.prev%*%network$W.h + x.temp%*%network$W.x
  hb = h+matrix(1,nrow=b.size,ncol=1)%*%network$b
  h.next = tanh(hb)
  return(h.next = h.next)
}

RNN.backward <- function(dh.next, network, x, h.next, h.prev){
  dt = dh.next * (1- h.next^2)
  db = colSums(dt)
  dW.h = t(h.prev)%*%dt
  dh.prev = dt%*%t(network$W.h)
  dW.x = t(x)%*%dt
  dx = dt%*%t(network$W.x)
  return(list(db = db, dW.h = dW.h, dh.prev = dh.prev, dW.x = dW.x, dx=dx))
}

affine.forwd <- function(x, W, b){
  return(x%*%W + matrix(1, nrow = nrow(x), ncol = 1)%*%b)
}

affine.bckwd <- function(x, W, b, dout){
  dx = dout%*%t(W)
  dW = t(x)%*%dout
  db = colSums(dout)
  return(list(dx = dx, dW = dW, db = db))
}

softmax.forwd <- function(x, target){
  max.x = apply(x,1,max)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  delta = 1e-7;
  R = nrow(as.matrix(y))
  return(-sum(target*log(y + delta))/R)
}

softmax.bckwd <- function(x, target,  dout = 1){
  max.x = apply(x, 1, max)
  R = nrow(x)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  return((y-target)/R)
}

# checking modules
batch.size = 3; time = 3

corp.len = nrow(dat$y) 
size.hidden = 5
h.prev =array(0, c(batch.size, size.hidden, time))
h.next = array(0, c(batch.size, size.hidden, time))

for (i.t in 1:time){
  idx = seq(i.t, corp.len, batch.size)
  h.next[, , i.t] = RNN.forward(h.prev[, , i.t], dat$x[idx,], network)
  if (i.t < time){
    h.prev[, , (i.t+1)] = h.next[, , i.t]
  }
}

W.out = matrix(rnorm(size.hidden * 8), nrow = size.hidden)
b.out = matrix(rnorm(8), nrow = 1)
O = affine.forwd(h.next[,,3], W.out, b.out)
L = softmax.forwd(O, dat$y[c(3,6,9),])
ds = softmax.bckwd(O, dat$y[c(3,6,9),], 1)
dW.o = affine.bckwd(O, W.out, ds)

認知情報解析演習 DL-V2 ch4

# p136
embed.forwd <- function(W, idx){
  return(W[idx,])
}

h1 <- embed.forwd(W,idx1)
h2 <- embed.forwd(W,idx2)
h = (h1 + h2)/2

embed.dot.forwd <- function(W, h, idx){
  return(O=rowSums(W[idx,]*h))
}

s = embed.dot.forwd(network$W.out,h,batch.y)

sigmoid.forwd <- function(O){
  return(1/(1+exp(-O)))
}

sigmoidWL.forwd <- function(O,target){
  y = 1/(1+exp(-O)) 
  loss=-sum(target*log(y)+(1-target)*log(1 - y))
  return(loss)
}

sigmoidWL.backwd <- function(O,target,dout=1){
  y = 1/(1+exp(-O)) 
  dW = (y - target)*dout
  return(dW)
}

embed.dot.backwd <- function(W,h,idx,dout){
  dh = dout * W[idx,]
  dW = dout * h
  return(list(dh = dh, dW = dW))
}

embed.backwd <- function(W, idx, dout){
  dW = matrix(0,nrow(W),ncol(W))
  for (i.idx in 1:length(idx)){
    dW[idx[i.idx], ] = dW[idx[i.idx], ] + dout[i.idx, ]
  }
  return(dW)
}

# 158

txt2corpus <- function(txt){
  txt = tolower(txt)
  txt = gsub('[.]', ' .',txt)
  words = unlist(strsplit(txt, " "))
  uniq.words = unique(words)
  n.uniq = length(uniq.words)
  n.words = length(words)
  corpus = rep(0,n.words)
  corpus = match(words,uniq.words)
  return(corpus)
}

corp2contxt <- function(corpus){
  len.corp = length(corpus)
  target = corpus[2:(len.corp-1)]
  col1 = corpus[1:(len.corp-2)]
  col2 = corpus[3:len.corp]
  context = cbind(col1,col2)
  return(list(context=context,target = target))
}

init.W2V <- function(n.uniq,size.hidden){
  W.in = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
  W.out = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
  return(list(W.in = W.in, W.out= W.out))
}


### running example
network<-init.W2V(8,5)
n.batch = 3;
txt = "You say goodbye and I say hello.."
corp = txt2corpus(txt)
corp = c(8,8,corp)
dat<-corp2contxt(corp)
n.data =  length(dat$target)

n.iter = 3000;
lambda=0.1;
loss = rep(0, n.iter)
for (i.iter in 1:n.iter){
  samp.idx = sample(1:n.data,n.batch)
  batch.c1 = dat$context[samp.idx,1]
  batch.c2 = dat$context[samp.idx,2]
  batch.y = dat$target[samp.idx]
  h1 <- embed.forwd(network$W.in,batch.c1)
  h2 <- embed.forwd(network$W.in,batch.c2)
  h = 0.5 * (h1 + h2)
  # positive only
  s = embed.dot.forwd(network$W.out,h,batch.y)
  z = sigmoidWL.forwd(s,1)
  loss[i.iter] = z
  ds = sigmoidWL.backwd(s, 1, 1)
  dE = embed.dot.backwd(network$W.out,h, batch.y, ds)
  dh = dE$dh*0.5
  dIn1 = embed.backwd(network$W.in,dat$context[samp.idx,1], dh)
  dIn2 = embed.backwd(network$W.in,dat$context[samp.idx,2], dh)
  network$W.out[batch.y,] = network$W.out[batch.y,] - lambda*dE$dW
  network$W.in = network$W.in - lambda*dIn1
  network$W.in = network$W.in - lambda*dIn2
}
plot(loss, type='l')

samp.idx = c(2:6,8,9,1)
batch.c1 = dat$context[samp.idx,1]
batch.c2 = dat$context[samp.idx,2]
batch.y = dat$target[samp.idx]
h1 <- embed.forwd(network$W.in,batch.c1)
h2 <- embed.forwd(network$W.in,batch.c2)
h = 0.5 * (h1 + h2)
s = MatMult.forwd(h,t(network$W.out))
z = sigmoid.forwd(s)
res=cbind(z,batch.y)
par(mfrow=c(8,1))
for (i in 1:8){
  col.spec = rep("black",8)
  col.spec[i]="orange"
  barplot(res[i, 1:8],col=col.spec)
}
# with negative sampling
# p158

unigram.sampler <- function(corpus, target, power, sample.size){
  neg.corpus <- corpus[-which(corpus == target)]
  uniq.word = unique(neg.corpus)
  n.word = length(neg.corpus)
  prob = (as.vector(table(neg.corpus))/n.word)^power
  prob = prob/sum(prob)
  sample.idx = sample(uniq.word, sample.size, prob = prob)
  return(sort(sample.idx))
}

get.neg.sample <- function(corpus, target, power, sample.size){
  sample.id = matrix(0, nrow = length(target), ncol = sample.size)
  for (i.targ in 1:length(target)){
    sample.id[i.targ,] = unigram.sampler(corpus, target[i.targ], power, sample.size)
  }
  return(sample.id)
}

init.W2V <- function(n.uniq,size.hidden){
  W.in = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
  W.out = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
  return(list(W.in = W.in, W.out= W.out))
}

network<-init.W2V(8,5)
n.batch = 3;
txt = "You say goodbye and I say hello.."
corp = txt2corpus(txt)
corp = c(8,8,corp)
dat<-corp2contxt(corp)
n.data =  length(dat$target)

n.iter = 3000;
lambda=0.1;
loss = rep(0, n.iter)
sample.size = 2
for (i.iter in 1:n.iter){
  samp.idx = sample(1:n.data,n.batch)
  batch.c1 = dat$context[samp.idx,1]
  batch.c2 = dat$context[samp.idx,2]
  batch.y = dat$target[samp.idx]
  h1 <- embed.forwd(network$W.in,batch.c1)
  h2 <- embed.forwd(network$W.in,batch.c2)
  h = 0.5 * (h1 + h2)
  # positive
  s = embed.dot.forwd(network$W.out,h,batch.y)
  z = sigmoidWL.forwd(s,1)
  loss[i.iter] = z
  ds = sigmoidWL.backwd(s, 1, 1)
  dE = embed.dot.backwd(network$W.out,h, batch.y, ds)
  dh = dE$dh*0.5
  dIn1 = embed.backwd(network$W.in,dat$context[samp.idx,1], dh)
  dIn2 = embed.backwd(network$W.in,dat$context[samp.idx,2], dh)
  network$W.out[batch.y,] = network$W.out[batch.y,] - lambda*dE$dW
  network$W.in = network$W.in - lambda*dIn1
  network$W.in = network$W.in - lambda*dIn2
  # negative
  neg.set <- get.neg.sample(corp,batch.y, 0.75, sample.size)
  for (i.neg in 1:sample.size){
      s = embed.dot.forwd(network$W.out,h,neg.set[,i.neg])
      z = sigmoidWL.forwd(s,0)
      loss[i.iter] = loss[i.iter] + z
      ds = sigmoidWL.backwd(s, 0, dout=1)
      dE = embed.dot.backwd(network$W.out,h,neg.set[,i.neg], ds)
      dh = dE$dh*0.5
      dIn1 = embed.backwd(network$W.in, dat$context[samp.idx,1],dh)
      dIn2 = embed.backwd(network$W.in, dat$context[samp.idx,2],dh)
      network$W.out[neg.set[,i.neg],] = network$W.out[neg.set[,i.neg],] - lambda/sample.size*dE$dW
      network$W.in = network$W.in - lambda*dIn1
      network$W.in = network$W.in - lambda*dIn2
    }
}
plot(loss, type='l')

samp.idx = c(2:6,8,9,1)
batch.c1 = dat$context[samp.idx,1]
batch.c2 = dat$context[samp.idx,2]
batch.y = dat$target[samp.idx]
h1 <- embed.forwd(network$W.in,batch.c1)
h2 <- embed.forwd(network$W.in,batch.c2)
h = 0.5 * (h1 + h2)
s = MatMult.forwd(h,t(network$W.out))
z = sigmoid.forwd(s)
res=cbind(z,batch.y)
par(mfrow=c(8,1))
for (i in 1:8){
  col.spec = rep("black",8)
  col.spec[i]="orange"
  barplot(res[i, 1:8],col=col.spec)
}

par(mfrow=c(4,1))
for (i in 1:4){
  col.spec = rep("black",8)
  col.spec[i]="orange"
  barplot(res[i, 1:8],col=col.spec)
}

基礎自習B02

dec2bin<-function(num, digits=8) {
  bin=c()
  if (num==0){
    bin=0
  } else {
    while(num!=0){
      rem=num%%2
     num=num%/%2
      bin=c(rem,bin)
    }
  }
  if (length(bin) < digits){
    res=matrix(0,nrow=1,ncol=digits)
    res[(digits-length(bin)+1):digits]=bin
  } else {res=bin}
  return(res)
}

データ解析基礎論B Loglineak Analysis

freq<-c(33,37,7,23)
pref<-factor(c('soba','udon','soba','udon'))
region<-factor(c('east','east','west','west'))
dat<-data.frame(pref,region,freq)
dat.table=table(pref,region)
dat.table[cbind(pref,region)]<-freq

dat.loglinCE_A<-loglin(dat.table, list(1), fit=T,param=T)
dat.loglinCE_B<-loglin(dat.table,list(2), fit=T,param=T)
dat.loglinIND<-loglin(dat.table,list(1,2), fit=T,param=T)
dat.loglinSAT<-loglin(dat.table,list(c(1,2)), fit=T,param=T)


freq<-c(9,5,2,4,16,10,26,28)
gener<-factor(c(rep('female',4),c(rep('male',4))))
affil<-factor(rep(c('L','L','E','E'),2))
circle<-factor(rep(c('tennis','astro'),4))
dat<-data.frame(gener,affil,circle,freq)

dat.table<-table(gender,affil,circle)
dat.table[cbind(gender,affil,circle)]<-freq

dat.loglin2<-loglin(dat.table,list(1), fit=T,param=T)
dat.loglin3<-loglin(dat.table,list(1,3), fit=T,param=T)
dat.loglin4<-loglin(dat.table,list(1,2,3), fit=T,param=T)
dat.loglin5<-loglin(dat.table,list(c(1,3),2), fit=T,param=T)
dat.loglin6<-loglin(dat.table,list(c(1,3),c(1,2)), fit=T,param=T)
dat.loglin7<-loglin(dat.table,list(c(1,3),c(1,2),c(2,3)), fit=T,param=T)
dat.loglin8<-loglin(dat.table,list(c(1,2,3)), fit=T,param=T)

source('http://peach.l.chiba-u.ac.jp/course_folder/cuUtil02.R')

データ解析基礎論B テスト理論

dat<-read.table("http://peach.l.chiba-u.ac.jp/course_folder/survey_data01.txt")

install.packages("psych")
library("psych")
ca<-alpha(dat)

dat<-read.csv("http://peach.l.chiba-u.ac.jp/course_folder/survey2.csv")
image(cor(dat)[10:1,1:10])

ca1_5 = alpha(dat[,1:5])
ca1_5

ca6_10 = alpha(dat[,6:10])
ca6_10

F1<-factanal(dat[,1:5],1)
F2<-factanal(dat[,6:10],1)

library(sem)
fa.model=cfa(reference.indicator=FALSE)
F1: q1,q2,q3,q4,q5 
F2: q6,q7,q8,q9,q10

fa.model<-update(fa.model)
delete, F1<->F2

fa.result<-sem(fa.model, cov(dat), 300)
summary(fa.result)

install.packages("ltm") 
library(ltm)
dat<-read.table("http://peach.l.chiba-u.ac.jp/course_folder/survey_data01.txt")
dat = dat-1
descript(dat)
irt1P<-rasch(dat)
plot.rasch(irt1P)
GoF.rasch(irt1P)
person.fit(irt1P)
item.fit(irt1P)
theta = factor.scores(irt1P)
cor(rowSums(theta[[1]][,1:10]),theta[[1]]$z1)

irt2P<-ltm(dat~z1) 
plot.ltm(irt2P)
person.fit(irt2P)
item.fit(irt2P)
theta2P = factor.scores(irt2P)
cor(rowSums(theta2P[[1]][,1:10]),theta2P[[1]]$z1)

認知情報解析演習B DL2 Ch 3

# p99
ctxt = c(1,rep(0,6))
W = matrix(rnorm(7*3),nrow=7)
h = ctxt%*%W

# p 100
MatMult.forwd <- function(x, W){
  return(x%*%W)
}
res <- MatMult.forwd(ctxt,W)

# p105
c1 = c(1,rep(0,6))
c2 = rep(0,7); c2[3]=1
W.in = matrix(rnorm(7*3),nrow=7)
W.out = matrix(rnorm(7*3),nrow=3)
h1 = MatMult.forwd(c1,W.in)
h2 = MatMult.forwd(c2,W.in)
h = 0.5*(h1+h2)
s = MatMult.forwd(h,W.out)
print(s)

# p107
softmax1.pred <- function(x){
  max.x = max(x)
  x = x - max.x
  y = exp(x)/sum(exp(x))
  return(y)
}

softmaxCE1.forwd <- function(x, target){
  max.x = max(x)
  x = x - max.x
  y = exp(x)/sum(exp(x))
  delta = 1e-7;
  return(-sum(target*log(y + delta)))
}

targ=rep(0,7); targ[2]=1

# p112
txt = "You say goodbye and I say hello."
txt = tolower(txt)
txt = gsub('[.]', ' .',txt)
words = unlist(strsplit(txt, " "))
uniq.words = unique(words)
n.uniq = length(uniq.words)
n.words = length(words)
corpus = rep(0,n.words)
corpus = match(words,uniq.words)

txt = "You say goodbye and I say hello.";
txt2corpus <- function(txt){
  txt = tolower(txt)
  txt = gsub('[.]', ' .',txt)
  words = unlist(strsplit(txt, " "))
  uniq.words = unique(words)
  n.uniq = length(uniq.words)
  n.words = length(words)
  corpus = rep(0,n.words)
  corpus = match(words,uniq.words)
  return(corpus)
}
corp = txt2corpus(txt)

len.corp = length(corp)
target = corp[2:(len.corp-1)]
col1 = corp[1:(len.corp-2)]
col2 = corp[3:len.corp]
context = cbind(col1,col2)

corp2contxt <- function(corpus){
  len.corp = length(corpus)
  target = corpus[2:(len.corp-1)]
  col1 = corpus[1:(len.corp-2)]
  col2 = corpus[3:len.corp]
  context = cbind(col1,col2)
  return(list(context=context,target = target))
}

# p114
len.corp = length(corpus)
idxT = cbind(1:(len.corp-2), corpus[2:(len.corp-1)])
targ1S = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
targ1S[idxT]=1

idxC1 = cbind(1:(len.corp-2),corpus[1:(len.corp-2)])
idxC2 = cbind(1:(len.corp-2),corpus[3:len.corp])

contxt1 = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
contxt2 = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
contxt1[idxC1]=1
contxt2[idxC2]=1

corp2contxt1S = function(corpus){
  len.corp = length(corpus)
  # creating target matrix
  idxT = cbind(1:(len.corp-2), corpus[2:(len.corp-1)])
  targ1S = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
  targ1S[idxT]=1
  
  # creating context matrices
  idxC1 = cbind(1:(len.corp-2),corpus[1:(len.corp-2)])
  idxC2 = cbind(1:(len.corp-2),corpus[3:len.corp])
  contxt1 = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
  contxt2 = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
  contxt1[idxC1]=1
  contxt2[idxC2]=1
  return(list(target=targ1S,contxt1=contxt1,contxt2=contxt2))
}

# p116~
init.W2V <- function(n.uniq,size.hidden){
  W.in = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
  W.out = matrix(rnorm(n.uniq*size.hidden),nrow=size.hidden)*0.01
  return(list(W.in = W.in, W.out= W.out))
}

softmax.forwd <- function(x, target){
  max.x = apply(x,1,max)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  delta = 1e-7;
  R = nrow(as.matrix(y))
  return(-sum(target*log(y + delta))/R)
}

softmax.bckwd <- function(x, target,  dout = 1){
  max.x = apply(x, 1, max)
  R = nrow(x)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  return((y-target)/R)
}

MatMult.bckwd <- function(x, W, dout){
  dx = dout%*%t(W)
  dW = t(x)%*%dout
  return(list(dx = dx, dW = dW))
}

softmax.pred <- function(x, target){
  max.x = apply(x,1,max)
  C = ncol(x)
  x = x - max.x%*%matrix(1,nrow=1,ncol=C)
  y = exp(x)/rowSums(exp(x))
  return(y)
}

network<-init.W2V(7,5)
n.batch = 3;
n.data = nrow(dat$target)
n.iter = 1000;
lambda=0.1;
loss = rep(0, n.iter)
for (i.iter in 1:n.iter){
  samp.idx = sample(1:n.data,n.batch)
  batch.c1 = dat$contxt1[samp.idx,]
  batch.c2 = dat$contxt2[samp.idx,]
  batch.y = dat$target[samp.idx,]
  h1 <- MatMult.forwd(batch.c1, network$W.in)
  h2 <- MatMult.forwd(batch.c2, network$W.in)
  h = 0.5 * (h1 + h2)
  s = MatMult.forwd(h,network$W.out)
  z = softmax.forwd(s,batch.y)
  loss[i.iter] = z
  ds = softmax.bckwd(s, batch.y, 1)
  da = MatMult.bckwd(h,network$W.out,ds)
  da$dW = da$dW*0.5
  dIn1 = MatMult.bckwd(batch.c1,network$W.in,da$dx)
  dIn2 = MatMult.bckwd(batch.c2,network$W.in,da$dx)
  network$W.out = network$W.out - lambda*da$dW
  network$W.in = network$W.in - lambda*dIn1$dW
  network$W.in = network$W.in - lambda*dIn2$dW
}
plot(loss, type='l')