# データ解析基礎論A. Week10

```freq<-c(33,37,7,23)
pref<-factor(c('soba','udon','soba','udon'))
region<-factor(c('east','east','west','west'))
dat<-data.frame(pref,region,freq)
dat.table=table(pref,region)
dat.table[cbind(pref,region)]<-freq

ME.lrt=2*sum((freq)*log(freq/25))
dat.loglinCE_A<-loglin(dat.table,list(1), fit=T,param=T)
dat.loglinCE_B<-loglin(dat.table,list(2), fit=T,param=T)
dat.loglinIND<-loglin(dat.table,list(1,2), fit=T,param=T)

ME.lrt-dat.loglinCE_A\$lrt
1-pchisq(ME.lrt-dat.loglinCE_A\$lrt,1)

dat.loglinCE_A\$lrt-dat.loglinIND\$lrt
1-pchisq(dat.loglinCE_A\$lrt-dat.loglinIND\$lrt,1)

dat.loglinIND\$lrt-dat.loglinSAT\$lrt
1-pchisq(dat.loglinIND\$lrt-dat.loglinSAT\$lrt,1)

freq<-c(9,5,2,4,16,10,26,28)
gender<-factor(c(rep('female',4),c(rep('male',4))))
affil<-factor(rep(c('L','L','E','E'),2))
circle<-factor(rep(c('tennis','astro'),4))
dat<-data.frame(gener,affil,circle,freq)

dat.table<-table(gender,affil,circle)
dat.table[cbind(gender,affil,circle)]<-freq

dat.loglin2<-loglin(dat.table,list(1), fit=T,param=T)

dat.loglin2<-loglin(dat.table,list(1), fit=T,param=T)
dat.loglin3<-loglin(dat.table,list(1,3), fit=T,param=T)
dat.loglin4<-loglin(dat.table,list(1,2,3), fit=T,param=T)
dat.loglin5<-loglin(dat.table,list(c(1,3),2), fit=T,param=T)
dat.loglin6<-loglin(dat.table,list(c(1,3),c(1,2)), fit=T,param=T)
dat.loglin7<-loglin(dat.table,list(c(1,3),c(1,2),c(2,3)), fit=T,param=T)
dat.loglin8<-loglin(dat.table,list(c(1,2,3)), fit=T,param=T)
```

# 認知情報解析　DL　ch5までのまとめ

```# ReLU
relu.forwd <- function(x){
return(pmax(x,0))
}
relu.bckwd <- function(z, dout){
dout[which(z <= 0)] = 0
return(dout)
}
# sigmoid
sigmoid.forwd <- function(x){
return(1/(1+exp(-x)))
}
sigmoid.bckwd <- function(z, dout){
return(dout*(1-z)*z)
}
# Affine
affine.forwd <- function(x, W, b){
return(x%*%W + matrix(1, nrow = nrow(x), ncol = 1)%*%b)
}
affine.bckwd <- function(x, W, dout){
dx = dout%*%t(W)
dW = t(x)%*%dout
db = colSums(dout)
return(list(dx = dx, dW = dW, db = db))
}
# softmax with CE
softmax.forwd <- function(x, target){
max.x = apply(x,1,max)
C = ncol(x)
x = x - max.x%*%matrix(1,nrow=1,ncol=C)
y = exp(x)/rowSums(exp(x))
delta = 1e-7;
R = nrow(as.matrix(y))
return(list(smx = y, ce = -sum(target*log(y + delta))/R))
}
softmax.bckwd <- function(smx, target){
R = nrow(smx)
return((smx-target)/R)
}

activation <- function(A, target, actFun){
if (actFun == "sigmoid"){
return(sigmoid.forwd(A))
}
if (actFun == "relu"){
return(relu.forwd(A))
}
if (actFun == "softmax"){
return(softmax.forwd(A, target))
}
}

Dact <- function(z, smx, target, dout, actFun){
if (actFun == "sigmoid"){
return(sigmoid.bckwd(z, dout))
}
if (actFun == "relu"){
return(relu.bckwd(z, dout))
}
if (actFun == "softmax"){
return(softmax.bckwd(smx, target))
}
}

# function for initialization
init.network <- function(n.neurons, actFun){
n.layer = length(n.neurons)
W = list(); b = list()
for (i.layer in 1:(n.layer-1)){
W[[i.layer]] = matrix(rnorm(n.neurons[i.layer]*n.neurons[(i.layer+1)],sd = 0.1),
nrow=n.neurons[i.layer])
b[[i.layer]] =  matrix(rnorm(n.neurons[(i.layer+1)],sd = 0.1), nrow = 1)
}
return(list(W = W, b = b, actFun = actFun))
}

cu.nnet = function(train.x, train.y, net, HP = c(10,1000,0.05)){
# HP: Hyperparameters
# HP[1] = batch size
# HP[2] = n iteration
# HP[3] = learning rate
n.layer <- length(net\$W)
loss = rep(0,HP[2])
A = list(); z = list(); Dact = list(); Daff = list()
for (i.iter in 1:HP[2]){
for (i.layer in 1:n.layer){
if (i.layer == 1){
x = x.batch
} else {
x = z[[(i.layer - 1)]]
}
A[[i.layer]] = affine.forwd(x, net\$W[[i.layer]], net\$b[[i.layer]])
z[[i.layer]] = activation(A[[i.layer]], t.batch, net\$actFun[i.layer])
}
loss[i.iter] = z[[i.layer]]\$ce
smx = z[[i.layer]]\$smx
for (i.layerR in n.layer:1){
if (i.layerR == n.layer){
dout = 1
} else {
dout = Daff[[(i.layerR+1)]]\$dx
}
Dact[[i.layerR]] = Dact(z[[i.layerR]], smx, t.batch, dout, net\$actFun[i.layerR])
if (i.layerR==1){
x = x.batch
} else {
x = A[[(i.layerR-1)]]
}
Daff[[i.layerR]] = affine.bckwd(x, network\$W[[i.layerR]], Dact[[i.layerR]])
}
for (i.layer in 1:n.layer){
net\$W[[i.layer]] = net\$W[[i.layer]] - HP[3]*Daff[[i.layer]]\$dW
net\$b[[i.layer]] = net\$b[[i.layer]] - HP[3]*Daff[[i.layer]]\$db
}
}
return(list(loss = loss, net = net))
}

# iris
train.x = as.matrix(iris[,1:4])
train.y.temp = as.numeric(iris[,5])
train.y = matrix(0,nrow = nrow(train.x), ncol =3)
train.y[which(train.y.temp==1), 1]=1
train.y[which(train.y.temp==2), 2]=1
train.y[which(train.y.temp==3), 3]=1
actF = c("relu","softmax")
network = init.network(c(4,15,3), actF)
res = cu.nnet(train.x, train.y, network, HP=c(15,2000,0.1))
plot(res\$loss,type='l')

# MNIST
train <- data.matrix(train)
test <- data.matrix(test)
train.x <- as.matrix(train[,-1]/255)
train.y.temp <- train[,1]
train.y = matrix(0,nrow = nrow(train.x), ncol = 10)
for (i in 1:nrow(train.x)){
train.y[i,(train.y.temp[i]+1)]=1
}
actF = c("relu","relu","softmax")
network = init.network(c(784,100,50,10), actF)
res = cu.nnet(train.x, train.y, network,HP=c(100,2000,0.1))
plot(res\$loss,type='l')

########################################################

init.network <- function(n.neurons, actFun, Opt, sdv){
n.layer = length(n.neurons)
W = list(); b = list();
mW = list(); mb = list();    # momentum
aMW = list(); aMb = list();  # adam M
aVW = list(); aVb = list();  # adam V
for (i.layer in 1:(n.layer-1)){
if (nargs() == 3) {
if (actFun[i.layer]=="sigmoid"){
# Xavier
sdv = 1/sqrt(n.neurons[i.layer])
} else {
# He - assumes ReLU
sdv = sqrt(2/n.neurons[i.layer])
}
}
W[[i.layer]] = matrix(rnorm(n.neurons[i.layer]*n.neurons[(i.layer+1)], sd = sdv),
nrow=n.neurons[i.layer])
b[[i.layer]] =  matrix(rnorm(n.neurons[(i.layer+1)], sd = sdv), nrow = 1)
if (Opt == "momentum"){
mW[[i.layer]] = matrix(0, nrow=n.neurons[i.layer], ncol=n.neurons[(i.layer+1)])
mb[[i.layer]] = matrix(0, nrow = 1, ncol=n.neurons[(i.layer+1)])
}
hW[[i.layer]] = matrix(0, nrow=n.neurons[i.layer], ncol=n.neurons[(i.layer+1)])
hb[[i.layer]] = matrix(0, nrow = 1, ncol=n.neurons[(i.layer+1)])
}
aMW[[i.layer]] = matrix(0, nrow=n.neurons[i.layer], ncol=n.neurons[(i.layer+1)])
aMb[[i.layer]] = matrix(0, nrow = 1, ncol=n.neurons[(i.layer+1)])
aVW[[i.layer]] = matrix(0, nrow=n.neurons[i.layer], ncol=n.neurons[(i.layer+1)])
aVb[[i.layer]] = matrix(0, nrow = 1, ncol=n.neurons[(i.layer+1)])
}
}
return(list(W = W, b = b, actFun = actFun, optimizer = Opt,
mW=mW, mb=mb,
hW=hW, hb=hb,
aMW=aMW,aMb=aMb,aVW=aVW))
}

OPT<-function(net, Daff, HP){
if (net\$optimizer == "momentum"){
return(Opt.mom(net, Daff, HP))
}
}
}
}

Opt.mom <- function(net, Daff, HP){
# HP[3] = learning rate
# HP[4] = weight decay
# HP[5] = momentum
n.layer <- length(net\$W)
for (i.layer in 1:n.layer){
net\$mW[[i.layer]] = HP[5]*net\$mW[[i.layer]]
- HP[3]*Daff[[i.layer]]\$dW - HP[4]*net\$W[[i.layer]]
net\$mb[[i.layer]] = HP[5]*net\$mb[[i.layer]]
- HP[3]*Daff[[i.layer]]\$db - HP[4]*net\$b[[i.layer]]
net\$W[[i.layer]] = net\$W[[i.layer]] + net\$mW[[i.layer]]
net\$b[[i.layer]] = net\$b[[i.layer]] + net\$mb[[i.layer]]
}
return(net=net)
}

cu.nnet = function(train.x, train.y, net, HP = c(10,1000,0.05,0.01,0.1,0.999,0.9)){
# HP: Hyperparameters
# HP[1] = batch size
# HP[2] = n iteration
# HP[3] = learning rate
# HP[4] = weight decay
# HP[5] = momentum
n.layer <- length(net\$W)
loss = rep(0,HP[2])
A = list(); z = list(); Dact = list(); Daff = list()
for (i.iter in 1:HP[2]){
for (i.layer in 1:n.layer){
if (i.layer == 1){
x = x.batch
} else {
x = z[[(i.layer - 1)]]
}
A[[i.layer]] = affine.forwd(x, net\$W[[i.layer]], net\$b[[i.layer]])
z[[i.layer]] = activation(A[[i.layer]], t.batch, net\$actFun[i.layer])
}
loss[i.iter] = z[[i.layer]]\$ce
smx = z[[i.layer]]\$smx
for (i.layerR in n.layer:1){
if (i.layerR == n.layer){
dout = 1
} else {
dout = Daff[[(i.layerR+1)]]\$dx
}
Dact[[i.layerR]] = Dact(z[[i.layerR]], smx, t.batch, dout, net\$actFun[i.layerR])
if (i.layerR==1){
x = x.batch
} else {
x = A[[(i.layerR-1)]]
}
Daff[[i.layerR]] = affine.bckwd(x, net\$W[[i.layerR]], Dact[[i.layerR]])
}
net = OPT(net, Daff, HP)
}
return(list(loss = loss, net = net))
}

# iris
train.x = as.matrix(iris[,1:4])
train.y.temp = as.numeric(iris[,5])
train.y = matrix(0,nrow = nrow(train.x), ncol =3)
train.y[which(train.y.temp==1), 1]=1
train.y[which(train.y.temp==2), 2]=1
train.y[which(train.y.temp==3), 3]=1
actF = c("relu","softmax")
network = init.network(c(4,15,3), actF, "momentum")
res = cu.nnet(train.x, train.y, network, HP=c(15,1000,0.01,0.0001,0.9,0.999,0.9))
hist(res\$net\$W[[1]])
plot(res\$loss,type='l')
```

# データ解析基礎論A week09

```dat<-read.csv("http://www.matsuka.info/data_folder/datWA01.txt")
plot(dat\$shoesize, dat\$gender, pch=20, cex=3, xlab="Shoe size",
ylab="gender", col='blue', ylim = c(0.5, 2.5), yaxt="n")
axis(2, at = c(1,2),labels=c("female","male"))
dat.lm<-lm(as.numeric(dat\$gender)~dat\$shoesize,data=dat)
abline(dat.lm,lwd=4,col='red')

chisq.test(c(72,23,16,49),p=rep(40,4),rescale.p=T)
M=matrix(c(52,48,8,42),nrow=2)
chisq.test(M,correct=F)

plot(dat\$shoesize, dat\$gender, pch=20, cex=3, xlab="Shoe size",
ylab="gender", col='blue', ylim = c(0.5, 2.5), yaxt="n")
axis(2, at = c(1,2),labels=c("female","male"))
plot(dat\$shoesize, dat\$gender, pch=20, cex=3, xlab="Shoe size",
ylab="gender", col='blue', ylim = c(0.5, 2.5), yaxt="n")
axis(2, at = c(1,2),labels=c("female","male"))
abline(dat.lm,lwd=4,col='red')

p = seq(0.1,0.9,length.out = 20)
plot(p/(1-p),col="red",pch=20,cex=3)
plot(log(p/(1-p)),col="red",pch=20,cex=3)

dat.lr<-glm(gender~shoesize, family=binomial, data=dat)
x=seq((min(dat\$shoesize)-2), (max(dat\$shoesize)+2), 0.1)
keisu  = coef(dat.lr)
y.prob=1/(1+exp(-1*(keisu[1]+keisu[2]*x)))
plot(dat\$shoesize, dat\$gender, pch=20, cex=3, xlab="Shoe size",
ylab="gender", col='blue', ylim = c(0.5, 2.5), yaxt="n")
axis(2, at = c(1,2),labels=c("female","male"))
lines(x,y.prob+1,lty=2,lwd=4,col='green')
abline(dat.lm,lwd=4,col='red')

anova(dat.lr, test ="Chisq")

dat.lr0<-glm(gender~1,family="binomial",data=dat)
dat.lrS<-glm(gender~shoesize,family=binomial,data=dat)
dat.lrH<-glm(gender~h,family="binomial",data=dat)

dat.glmAllMult=glm(survival~age*Ncigarettes*NdaysGESTATION,family=binomial,data=dat)
stepAIC(dat.glmAllMult)
```

# データ解析基礎論　week 08

```source("http://peach.l.chiba-u.ac.jp/course_folder/jn_plot.R")
lm <- lm(liking~sexism*protest,data=protest)
summary(lm)
jn_plot(lm, "protest", "sexism", alpha=.05)
```

# dlls

```import numpy as np
import matplotlib.pylab as plt

h = 1e-4 # 0.0001

for idx in range(x.size):
tmp_val = x[idx]
x[idx] = float(tmp_val) + h
fxhP = f(x) # f(x+h)

x[idx] = tmp_val - h
fxhM = f(x) # f(x-h)
grad[idx] = (fxhP - fxhM) / (2*h)

x[idx] = tmp_val # 値を元に戻す

x = init_x
x_history = []

for i in range(step_num):
x_history.append( x.copy() )

return x, np.array(x_history)

def function_2(x):
return (x[0]**2)/20 + x[1]**2

init_x = np.array([-3.0, 4.0])

lr = 0.9
step_num = 100
x, x_history = gradDesc(function_2, init_x, lr=lr, step_num=step_num)

plt.plot( [-5, 5], [0,0], '--b')
plt.plot( [0,0], [-5, 5], '--b')
plt.plot(x_history[:,0], x_history[:,1], '-o')

plt.xlim(-3.5, 3.5)
plt.ylim(-4.5, 4.5)
plt.xlabel("X0")
plt.ylabel("X1")
plt.show()
```

# 認知情報解析　ch06.1

```func02R = function(x){
return(1/20*x[1]^2 + x[2]^2)
}
h = 1e-4
R = nrow(x)
C = ncol(x)
for (i.col in 1:C){
for (i.row in 1:R){
temp.x = x[i.row,i.col]
x[i.row, i.col] = temp.x + h
plusH = do.call(func, list(x))
x[i.row, i.col] = temp.x - h
minusH = do.call(func,list(x))
grad[i.row, i.col] = (plusH - minusH)/(2*h)
x[i.row, i.col] = temp.x
}
}
}

require(plot3D)
x = seq(-10,10,0.2)
y = seq(-10,10,0.2)
M = mesh(x,y)
Z = as.vector(1/20*M\$x^2)+as.vector(M\$y^2)
Z.mesh = matrix(Z,nrow(M\$x))
contour(x,y,Z.mesh,drawlabels = F,nlevels=40)

grad.desc <- function(func, init.x, lr, n.iter){
x = init.x
x.hist = init.x
for (i.iter in 1:n.iter) {
x.hist = rbind(x.hist,x)
}
return(x.hist)
}
x.init = matrix(c(-7,2),nrow = 1)
lines(gd,type='o',col = 'green',pch=20)
```

# データ解析基礎論A week07

```dat<-read.csv("http://www.matsuka.info/data_folder/dktb312.txt")
dat2<-data.frame(result=dat\$result,
c1=c(rep(0,8),rep(1,8),rep(0,16)),
c2=c(rep(0,16),rep(1,8),rep(0,8)),
c3=c(rep(0,24),rep(1,8)))
dat2.lm<-lm(result~.,data=dat2)

dat3<-data.frame(result=dat\$result,
c1=c(rep(-3,8), rep(1,24)),
c2=c(rep(0,8),rep(-2,8),rep(1,16)),
c3=c(rep(0,16),rep(-1,8),rep(1,8)))
dat3.lm<-lm(result~c1+c2+c3,data=dat3)

plot(dat\$result~dat\$duration,data=dat[dat\$method=="method.x",])
result<-dat\$result[dat\$method=="method.X"]
CL=c(rep(-3,5),rep(-1,5),rep(1,5),rep(3,5))
CQ=c(rep(-1,5),rep(1,5),rep(1,5),rep(-1,5))
CC=c(rep(-3,5),rep(1,5),rep(-1,5),rep(3,5))

# or
dat2<-dat[dat\$method=="method.X",]
dat2.lm<-lm(result~duration, data=dat2,
contrasts=list(duration="contr.poly"))

result2=result;
result2[16:20]=result2[16:20]-3
plot(tapply(result2,dat\$duration[dat\$method=="method.X"],mean),
pch=20,col="red",lwd=3, type="o",cex=3,ylab="mean",xlab="time")
trend2.lm<-lm(result2~CL+CQ+CC)

Cont=contr.poly(4)

##
subj.gender = c(rep("male",30),rep("female",30))
pic.gender = rep(c(rep("male",15),rep("female",15)),2)
# 15m-m, 15m-f, 15f-m, 15f-f
eye.fix = c(round(rnorm(15,50,5)),
round(rnorm(15,70,5)),
round(rnorm(15,65,5)),
round(rnorm(15,50,5)))
datE = data.frame(eye.fix = eye.fix, subj.gender= subj.gender,pic.gender=pic.gender)
interaction.plot(datE\$subj.gender, datE\$pic.gender, datE\$eye.fix,
trace.label = "Gender of Stimuli", xlab = "Gender of Participants",
ylab = "Number of fixation",lwd = 3,type = "o",pch = c(17,20),cex =3,
col = c('skyblue','coral'),legend =T)
datE.lm <- lm(eye.fix~subj.gender*pic.gender,data=datE)

dat\$pretest=dat\$pretest*0.1

dat.lm01<-lm(sales~price, data=as.data.frame(scale(dat)))
plot(dat.lm01,which=1)
plot(dat.lm01,which=2)

par(mfrow=c(2,2))
plot(dat.lm01)

dat\$method=factor(dat\$method, levels(dat\$method)[c(1,3,4,2)])

boil.lm<-lm(log(pressure)~temp, data=boil)
par(mfrow=c(2,2))
plot(boil.lm)

boil.lm<-lm(log(pressure)~temp, data=boil[-12,])
plot(boil.lm)