txt = "You say goodbye and I say hello. you say goodbay and I say hello"
txt2corpus <- function(txt){
txt = tolower(txt)
txt = gsub('[.]', ' . sos',txt)
words = unlist(strsplit(c('sos',txt), " "))
uniq.words = unique(words)
n.uniq = length(uniq.words)
n.words = length(words)
corpus = rep(0,n.words)
corpus = match(words,uniq.words)
return(corpus)
}
corp = txt2corpus(txt)
corp2contxt1SRNN = function(corpus){
len.corp = length(corpus)
# creating target matrix
idxT = cbind(1:(len.corp-1), corpus[2:len.corp])
targ1S = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
targ1S[idxT]=1
# creating context matrices
idxC = cbind(1:(len.corp-1),corpus[1:(len.corp-1)])
contxt = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
contxt[idxC]=1
return(list(y=targ1S,x=contxt))
}
init.RNN <- function(n.uniq,size.hidden){
W.h = matrix(rnorm(size.hidden*size.hidden),nrow=size.hidden)*0.01
W.x = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
b = matrix(rnorm(size.hidden),nrow=1)*0.01
return(list(W.h = W.h, W.x= W.x, b = b))
}
MatMult.forwd <- function(x, W){
return(x%*%W)
}
MatMult.bckwd <- function(x, W, dout){
dx = dout%*%t(W)
dW = t(x)%*%dout
return(list(dx = dx, dW = dW))
}
affine.forwd <- function(x, W, b){
return(x%*%W + matrix(1, nrow = nrow(x), ncol = 1)%*%b)
}
affine.bckwd <- function(x, W, b, dout){
dx = dout%*%t(W)
dW = t(x)%*%dout
db = colSums(dout)
return(list(dx = dx, dW = dW, db = db))
}
softmax.forwd <- function(x, target){
max.x = apply(x,1,max)
C = ncol(x)
x = x - max.x%*%matrix(1,nrow=1,ncol=C)
y = exp(x)/rowSums(exp(x))
delta = 1e-7;
R = nrow(as.matrix(y))
return(-sum(target*log(y + delta))/R)
}
softmax.pred <- function(x, target){
max.x = apply(x,1,max)
C = ncol(x)
x = x - max.x%*%matrix(1,nrow=1,ncol=C)
y = exp(x)/rowSums(exp(x))
return(y)
}
softmax.bckwd <- function(x, target, dout = 1){
max.x = apply(x, 1, max)
R = nrow(x)
C = ncol(x)
x = x - max.x%*%matrix(1,nrow=1,ncol=C)
y = exp(x)/rowSums(exp(x))
return((y-target)/R)
}
RNN.forward <- function(h.prev, x, network){
b.size = nrow(x)
h = h.prev%*%network$W.h + x%*%network$W.x
hb = h+matrix(1,nrow=b.size,ncol=1)%*%network$b
h.next = tanh(hb)
return(h.next = h.next)
}
RNN.backward <- function(dh.next, network, x, h.next, h.prev){
dt = dh.next * (1- h.next^2)
db = colSums(dt)
dW.h = t(h.prev)%*%dt
dh.prev = dt%*%t(network$W.h)
dW.x = t(x)%*%dt
dx = dt%*%t(network$W.x)
return(list(db = db, dW.h = dW.h, dh.prev = dh.prev, dW.x = dW.x, dx=dx))
}
txt = "You say goodbye and I say hello."
batch.size = 3
time = 3
size.hidden = 7
corp = txt2corpus(txt)
dat<-corp2contxt1SRNN(corp)
network <- init.RNN(ncol(dat$y), size.hidden)
corp.len = nrow(dat$y)
h.prev =array(0, c(batch.size, size.hidden, time))
h.next = array(0, c(batch.size, size.hidden, (time+1)))
W.out = matrix(rnorm(size.hidden * ncol(dat$y)), nrow = size.hidden)
b.out = matrix(rnorm(ncol(dat$y)), nrow = 1)
n.rep = 100000;lambda = 0.25;
loss = rep(0,n.rep)
for (i.rep in 1:n.rep){
for (i.t in 1:time){
idx = seq(i.t, corp.len, time)
h.next[, , i.t] = RNN.forward(h.prev[, , i.t], dat$x[idx,], network)
if (i.t < time){
h.prev[, , (i.t+1)] = h.next[, , i.t]
}
}
dWHs = matrix(0,nrow=nrow(network$W.h),ncol=ncol(network$W.h))
dWXs = matrix(0,nrow=nrow(network$W.x),ncol=ncol(network$W.x))
dBs = matrix(0,nrow=nrow(network$b),ncol=ncol(network$b))
dWOs = matrix(0,nrow=nrow(W.out),ncol=ncol(W.out))
dBOs = matrix(0,nrow=nrow(b.out),ncol=ncol(b.out))
d.prev = matrix(0,nrow=batch.size,ncol=size.hidden)
L = 0
for (i.t in time:1){
idx = idx = seq(i.t, corp.len, time)
O = affine.forwd(h.next[,,i.t], W.out, b.out)
L = L + softmax.forwd(O, dat$y[idx,])
ds = softmax.bckwd(O, dat$y[idx,], 1)
dW.o = affine.bckwd(h.next[,,i.t], W.out, b.out, ds)
dWOs = dWOs + dW.o$dW
dBOs = dBOs + dW.o$db
RNN.d = RNN.backward(dW.o$dx+d.prev, network, dat$x[idx,],h.next[,,(i.t+1)],h.prev[,,i.t])
#RNN.d = RNN.backward(dW.o$dx, network, dat$x[idx,],h.next[,,i.t],h.prev[,,i.t])
dWHs = dWHs + RNN.d$dW.h
dWXs = dWXs + RNN.d$dW.x
dBs = dBs + RNN.d$db
d.prev = RNN.d$dh.prev
}
loss[i.rep] = L
W.out = W.out - lambda*dWOs
b.out = b.out - lambda*dBOs
network$W.h - lambda*dWHs
network$W.x - lambda*dWXs
network$b - lambda*dBs
}
plot(loss, type='l')
for (i.t in 1:time){
idx = idx = seq(i.t, corp.len, time)
O = affine.forwd(h.next[,,i.t], W.out, b.out)
print(softmax.pred(O, dat$y[idx,]))
}
認知情報解析学演習 DL-2 ch.5
txt = "You say goodbye and I say hello."
txt2corpus <- function(txt){
txt = tolower(txt)
txt = gsub('[.]', ' . sos',txt)
words = unlist(strsplit(c('sos',txt), " "))
uniq.words = unique(words)
n.uniq = length(uniq.words)
n.words = length(words)
corpus = rep(0,n.words)
corpus = match(words,uniq.words)
return(corpus)
}
corp = txt2corpus(txt)
corp2contxt1SRNN = function(corpus){
len.corp = length(corpus)
# creating target matrix
idxT = cbind(1:(len.corp-1), corpus[2:len.corp])
targ1S = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
targ1S[idxT]=1
# creating context matrices
idxC = cbind(1:(len.corp-1),corpus[1:(len.corp-1)])
contxt = matrix(0,nrow=len.corp-1,ncol=length(unique(corpus)))
contxt[idxC]=1
return(list(y=targ1S,x=contxt))
}
dat<-corp2contxt1SRNN(corp)
init.RNN <- function(n.uniq,size.hidden){
W.h = matrix(rnorm(size.hidden*size.hidden),nrow=size.hidden)*0.01
W.x = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
b = matrix(rnorm(size.hidden),nrow=1)*0.01
return(list(W.h = W.h, W.x= W.x, b = b))
}
RNN.forward <- function(h.prev, x, network){
b.size = nrow(x)
h = h.prev%*%network$W.h + x.temp%*%network$W.x
hb = h+matrix(1,nrow=b.size,ncol=1)%*%network$b
h.next = tanh(hb)
return(h.next = h.next)
}
RNN.backward <- function(dh.next, network, x, h.next, h.prev){
dt = dh.next * (1- h.next^2)
db = colSums(dt)
dW.h = t(h.prev)%*%dt
dh.prev = dt%*%t(network$W.h)
dW.x = t(x)%*%dt
dx = dt%*%t(network$W.x)
return(list(db = db, dW.h = dW.h, dh.prev = dh.prev, dW.x = dW.x, dx=dx))
}
affine.forwd <- function(x, W, b){
return(x%*%W + matrix(1, nrow = nrow(x), ncol = 1)%*%b)
}
affine.bckwd <- function(x, W, b, dout){
dx = dout%*%t(W)
dW = t(x)%*%dout
db = colSums(dout)
return(list(dx = dx, dW = dW, db = db))
}
softmax.forwd <- function(x, target){
max.x = apply(x,1,max)
C = ncol(x)
x = x - max.x%*%matrix(1,nrow=1,ncol=C)
y = exp(x)/rowSums(exp(x))
delta = 1e-7;
R = nrow(as.matrix(y))
return(-sum(target*log(y + delta))/R)
}
softmax.bckwd <- function(x, target, dout = 1){
max.x = apply(x, 1, max)
R = nrow(x)
C = ncol(x)
x = x - max.x%*%matrix(1,nrow=1,ncol=C)
y = exp(x)/rowSums(exp(x))
return((y-target)/R)
}
# checking modules
batch.size = 3; time = 3
corp.len = nrow(dat$y)
size.hidden = 5
h.prev =array(0, c(batch.size, size.hidden, time))
h.next = array(0, c(batch.size, size.hidden, time))
for (i.t in 1:time){
idx = seq(i.t, corp.len, batch.size)
h.next[, , i.t] = RNN.forward(h.prev[, , i.t], dat$x[idx,], network)
if (i.t < time){
h.prev[, , (i.t+1)] = h.next[, , i.t]
}
}
W.out = matrix(rnorm(size.hidden * 8), nrow = size.hidden)
b.out = matrix(rnorm(8), nrow = 1)
O = affine.forwd(h.next[,,3], W.out, b.out)
L = softmax.forwd(O, dat$y[c(3,6,9),])
ds = softmax.bckwd(O, dat$y[c(3,6,9),], 1)
dW.o = affine.bckwd(O, W.out, ds)
認知情報解析演習 DL-V2 ch4
# p136
embed.forwd <- function(W, idx){
return(W[idx,])
}
h1 <- embed.forwd(W,idx1)
h2 <- embed.forwd(W,idx2)
h = (h1 + h2)/2
embed.dot.forwd <- function(W, h, idx){
return(O=rowSums(W[idx,]*h))
}
s = embed.dot.forwd(network$W.out,h,batch.y)
sigmoid.forwd <- function(O){
return(1/(1+exp(-O)))
}
sigmoidWL.forwd <- function(O,target){
y = 1/(1+exp(-O))
loss=-sum(target*log(y)+(1-target)*log(1 - y))
return(loss)
}
sigmoidWL.backwd <- function(O,target,dout=1){
y = 1/(1+exp(-O))
dW = (y - target)*dout
return(dW)
}
embed.dot.backwd <- function(W,h,idx,dout){
dh = dout * W[idx,]
dW = dout * h
return(list(dh = dh, dW = dW))
}
embed.backwd <- function(W, idx, dout){
dW = matrix(0,nrow(W),ncol(W))
for (i.idx in 1:length(idx)){
dW[idx[i.idx], ] = dW[idx[i.idx], ] + dout[i.idx, ]
}
return(dW)
}
# 158
txt2corpus <- function(txt){
txt = tolower(txt)
txt = gsub('[.]', ' .',txt)
words = unlist(strsplit(txt, " "))
uniq.words = unique(words)
n.uniq = length(uniq.words)
n.words = length(words)
corpus = rep(0,n.words)
corpus = match(words,uniq.words)
return(corpus)
}
corp2contxt <- function(corpus){
len.corp = length(corpus)
target = corpus[2:(len.corp-1)]
col1 = corpus[1:(len.corp-2)]
col2 = corpus[3:len.corp]
context = cbind(col1,col2)
return(list(context=context,target = target))
}
init.W2V <- function(n.uniq,size.hidden){
W.in = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
W.out = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
return(list(W.in = W.in, W.out= W.out))
}
### running example
network<-init.W2V(8,5)
n.batch = 3;
txt = "You say goodbye and I say hello.."
corp = txt2corpus(txt)
corp = c(8,8,corp)
dat<-corp2contxt(corp)
n.data = length(dat$target)
n.iter = 3000;
lambda=0.1;
loss = rep(0, n.iter)
for (i.iter in 1:n.iter){
samp.idx = sample(1:n.data,n.batch)
batch.c1 = dat$context[samp.idx,1]
batch.c2 = dat$context[samp.idx,2]
batch.y = dat$target[samp.idx]
h1 <- embed.forwd(network$W.in,batch.c1)
h2 <- embed.forwd(network$W.in,batch.c2)
h = 0.5 * (h1 + h2)
# positive only
s = embed.dot.forwd(network$W.out,h,batch.y)
z = sigmoidWL.forwd(s,1)
loss[i.iter] = z
ds = sigmoidWL.backwd(s, 1, 1)
dE = embed.dot.backwd(network$W.out,h, batch.y, ds)
dh = dE$dh*0.5
dIn1 = embed.backwd(network$W.in,dat$context[samp.idx,1], dh)
dIn2 = embed.backwd(network$W.in,dat$context[samp.idx,2], dh)
network$W.out[batch.y,] = network$W.out[batch.y,] - lambda*dE$dW
network$W.in = network$W.in - lambda*dIn1
network$W.in = network$W.in - lambda*dIn2
}
plot(loss, type='l')
samp.idx = c(2:6,8,9,1)
batch.c1 = dat$context[samp.idx,1]
batch.c2 = dat$context[samp.idx,2]
batch.y = dat$target[samp.idx]
h1 <- embed.forwd(network$W.in,batch.c1)
h2 <- embed.forwd(network$W.in,batch.c2)
h = 0.5 * (h1 + h2)
s = MatMult.forwd(h,t(network$W.out))
z = sigmoid.forwd(s)
res=cbind(z,batch.y)
par(mfrow=c(8,1))
for (i in 1:8){
col.spec = rep("black",8)
col.spec[i]="orange"
barplot(res[i, 1:8],col=col.spec)
}
# with negative sampling
# p158
unigram.sampler <- function(corpus, target, power, sample.size){
neg.corpus <- corpus[-which(corpus == target)]
uniq.word = unique(neg.corpus)
n.word = length(neg.corpus)
prob = (as.vector(table(neg.corpus))/n.word)^power
prob = prob/sum(prob)
sample.idx = sample(uniq.word, sample.size, prob = prob)
return(sort(sample.idx))
}
get.neg.sample <- function(corpus, target, power, sample.size){
sample.id = matrix(0, nrow = length(target), ncol = sample.size)
for (i.targ in 1:length(target)){
sample.id[i.targ,] = unigram.sampler(corpus, target[i.targ], power, sample.size)
}
return(sample.id)
}
init.W2V <- function(n.uniq,size.hidden){
W.in = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
W.out = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
return(list(W.in = W.in, W.out= W.out))
}
network<-init.W2V(8,5)
n.batch = 3;
txt = "You say goodbye and I say hello.."
corp = txt2corpus(txt)
corp = c(8,8,corp)
dat<-corp2contxt(corp)
n.data = length(dat$target)
n.iter = 3000;
lambda=0.1;
loss = rep(0, n.iter)
sample.size = 2
for (i.iter in 1:n.iter){
samp.idx = sample(1:n.data,n.batch)
batch.c1 = dat$context[samp.idx,1]
batch.c2 = dat$context[samp.idx,2]
batch.y = dat$target[samp.idx]
h1 <- embed.forwd(network$W.in,batch.c1)
h2 <- embed.forwd(network$W.in,batch.c2)
h = 0.5 * (h1 + h2)
# positive
s = embed.dot.forwd(network$W.out,h,batch.y)
z = sigmoidWL.forwd(s,1)
loss[i.iter] = z
ds = sigmoidWL.backwd(s, 1, 1)
dE = embed.dot.backwd(network$W.out,h, batch.y, ds)
dh = dE$dh*0.5
dIn1 = embed.backwd(network$W.in,dat$context[samp.idx,1], dh)
dIn2 = embed.backwd(network$W.in,dat$context[samp.idx,2], dh)
network$W.out[batch.y,] = network$W.out[batch.y,] - lambda*dE$dW
network$W.in = network$W.in - lambda*dIn1
network$W.in = network$W.in - lambda*dIn2
# negative
neg.set <- get.neg.sample(corp,batch.y, 0.75, sample.size)
for (i.neg in 1:sample.size){
s = embed.dot.forwd(network$W.out,h,neg.set[,i.neg])
z = sigmoidWL.forwd(s,0)
loss[i.iter] = loss[i.iter] + z
ds = sigmoidWL.backwd(s, 0, dout=1)
dE = embed.dot.backwd(network$W.out,h,neg.set[,i.neg], ds)
dh = dE$dh*0.5
dIn1 = embed.backwd(network$W.in, dat$context[samp.idx,1],dh)
dIn2 = embed.backwd(network$W.in, dat$context[samp.idx,2],dh)
network$W.out[neg.set[,i.neg],] = network$W.out[neg.set[,i.neg],] - lambda/sample.size*dE$dW
network$W.in = network$W.in - lambda*dIn1
network$W.in = network$W.in - lambda*dIn2
}
}
plot(loss, type='l')
samp.idx = c(2:6,8,9,1)
batch.c1 = dat$context[samp.idx,1]
batch.c2 = dat$context[samp.idx,2]
batch.y = dat$target[samp.idx]
h1 <- embed.forwd(network$W.in,batch.c1)
h2 <- embed.forwd(network$W.in,batch.c2)
h = 0.5 * (h1 + h2)
s = MatMult.forwd(h,t(network$W.out))
z = sigmoid.forwd(s)
res=cbind(z,batch.y)
par(mfrow=c(8,1))
for (i in 1:8){
col.spec = rep("black",8)
col.spec[i]="orange"
barplot(res[i, 1:8],col=col.spec)
}
par(mfrow=c(4,1))
for (i in 1:4){
col.spec = rep("black",8)
col.spec[i]="orange"
barplot(res[i, 1:8],col=col.spec)
}
院:認知情報解析学演習 ch23 & 24
dat<-read.csv("http://peach.l.chiba-u.ac.jp/course_folder/OrdinalProbitData-1grp-1.csv" )
model = "
model {
for ( i in 1:Ntotal ) {
y[i] ~ dcat( pr[i,1:nYlevels] )
pr[i,1] <- pnorm( thresh[1] , mu , 1/sigma^2 )
for ( k in 2:(nYlevels-1) ) {
pr[i,k] <- max( 0 , pnorm( thresh[ k ] , mu , 1/sigma^2 )- pnorm( thresh[k-1] , mu , 1/sigma^2 ) )
}
pr[i,nYlevels] <- 1 - pnorm( thresh[nYlevels-1] , mu , 1/sigma^2 )
}
mu ~ dnorm( (1+nYlevels)/2 , 1/(nYlevels)^2 )
sigma ~ dunif( nYlevels/1000 , nYlevels*10 )
for ( k in 2:(nYlevels-2) ) { # 1 and nYlevels-1 are fixed, not stochastic
thresh[k] ~ dnorm( k+0.5 , 1/2^2 )
}
}
"
writeLines( model , "model.txt" )
y = dat$Y
Ntotal = length(y)
nYlevels = max(y)
thresh = rep(NA,nYlevels-1)
thresh[1] = 1 + 0.5
thresh[nYlevels-1] = nYlevels-1 + 0.5
dataList = list(y = y , nYlevels = nYlevels,thresh = thresh, Ntotal = Ntotal)
parameters = c( "pr" , "thresh","mu","sigma")
jagsModel = jags.model( "model.txt", data=dataList, n.chains=3, n.adapt=500 )
update( jagsModel , n.iter=1000)
codaSamples = coda.samples( jagsModel , variable.names=parameters, n.iter=10000, thin=5)
mcmcMat<-as.matrix(codaSamples)
meanT = rowMeans(cbind(mcmcMat[,"thresh[1]"],mcmcMat[,"thresh[2]"],mcmcMat[,"thresh[3]"],mcmcMat[,"thresh[4]"],mcmcMat[,"thresh[5]"],
mcmcMat[,"thresh[6]"]))
plot(mcmcMat[,"thresh[1]"],meanT,xlim=c(0.5,7))
points(mcmcMat[,"thresh[2]"],meanT,xlim=c(0.5,7))
points(mcmcMat[,"thresh[3]"],meanT,xlim=c(0.5,7))
points(mcmcMat[,"thresh[4]"],meanT,xlim=c(0.5,7))
points(mcmcMat[,"thresh[5]"],meanT,xlim=c(0.5,7))
points(mcmcMat[,"thresh[6]"],meanT,xlim=c(0.5,7))
HDI.plot(mcmcMat[,"mu"],xlab="mu")
HDI.plot(mcmcMat[,"sigma"],xlab="sigma")
model2 = "
model {
for ( i in 1:Ntotal ) {
y[i] ~ dcat( pr[i,1:nYlevels] )
pr[i,1] <- pnorm( thresh[1] , mu[x[i]] , 1/sigma[x[i]]^2 )
for ( k in 2:(nYlevels-1) ) {
pr[i,k] <- max( 0 , pnorm( thresh[ k ] , mu[x[i]] , 1/sigma[x[i]]^2 )
- pnorm( thresh[k-1] , mu[x[i]] , 1/sigma[x[i]]^2 ) )
}
pr[i,nYlevels] <- 1 - pnorm( thresh[nYlevels-1] , mu[x[i]] , 1/sigma[x[i]]^2 )
}
for ( j in 1:2 ) { # 2 groups
mu[j] ~ dnorm( (1+nYlevels)/2 , 1/(nYlevels)^2 )
sigma[j] ~ dunif( nYlevels/1000 , nYlevels*10 )
}
for ( k in 2:(nYlevels-2) ) { # 1 and nYlevels-1 are fixed, not stochastic
thresh[k] ~ dnorm( k+0.5 , 1/2^2 )
}
}
"
writeLines( model2 , "model2.txt" )
dat<-read.csv("http://peach.l.chiba-u.ac.jp/course_folder/OrdinalProbitData1.csv")
y = dat$Y
x = as.numeric(dat$X)
Ntotal = length(y)
nYlevels = max(y)
thresh = rep(NA,nYlevels-1)
thresh[1] = 1 + 0.5
thresh[nYlevels-1] = nYlevels-1 + 0.5
dataList = list(y = y , x= x ,nYlevels = nYlevels,thresh = thresh, Ntotal = Ntotal)
parameters = c( "pr" , "thresh","mu","sigma")
jagsModel = jags.model( "model2.txt", data=dataList, n.chains=3, n.adapt=500 )
update( jagsModel , n.iter=1000)
codaSamples = coda.samples( jagsModel , variable.names=parameters, n.iter=10000, thin=5)
mcmcMat<-as.matrix(codaSamples)
HDI.plot(mcmcMat[,"mu[1]"])
HDI.plot(mcmcMat[,"mu[2]"])
HDI.plot(mcmcMat[,"mu[1]"]-mcmcMat[,"mu[2]"])
model3 = "
data {
xm <- mean(x)
xsd <- sd(x)
for ( i in 1:Ntotal ) {
zx[i] <- ( x[i] - xm ) / xsd
}
}
model {
for ( i in 1:Ntotal ) {
y[i] ~ dcat( pr[i,1:nYlevels] )
pr[i,1] <- pnorm( thresh[1] , mu[i] , 1/sigma^2 )
for ( k in 2:(nYlevels-1) ) {
pr[i,k] <- max( 0 , pnorm( thresh[ k ] , mu[i] , 1/sigma^2 )
- pnorm( thresh[k-1] , mu[i] , 1/sigma^2 ) )
}
pr[i,nYlevels] <- 1 - pnorm( thresh[nYlevels-1] , mu[i] , 1/sigma^2 )
mu[i] <- zbeta0 + sum( zbeta * zx[i] )
}
# Priors vague on standardized scale:
zbeta0 ~ dnorm( (1+nYlevels)/2 , 1/(nYlevels)^2 )
zbeta ~ dnorm( 0 , 1/(nYlevels)^2 )
zsigma ~ dunif( nYlevels/1000 , nYlevels*10 )
# Transform to original scale:
beta <- ( zbeta / xsd )
beta0 <- zbeta0 - sum( zbeta * xm / xsd )
sigma <- zsigma
for ( k in 2:(nYlevels-2) ) { # 1 and nYlevels-1 are fixed
thresh[k] ~ dnorm( k+0.5 , 1/2^2 )
}
}
"
writeLines( model3, con="model3.txt" )
dat<-read.csv("http://peach.l.chiba-u.ac.jp/course_folder/OrdinalProbitData-LinReg-2.csv")
y = dat$Y
x = as.numeric(dat$X)
Ntotal = length(y)
Nx =1
nYlevels = max(y)
thresh = rep(NA,nYlevels-1)
thresh[1] = 1 + 0.5
thresh[nYlevels-1] = nYlevels-1 + 0.5
dataList = list(y = y , Nx= 1,x= x ,nYlevels = nYlevels,thresh = thresh, Ntotal = Ntotal)
parameters = c("thresh","mu","sigma","beta","beta0")
jagsModel = jags.model( "model3.txt", data=dataList, n.chains=3, n.adapt=500 )
update( jagsModel , n.iter=1000)
codaSamples = coda.samples( jagsModel , variable.names=parameters, n.iter=10000, thin=5)
mcmcMat<-as.matrix(codaSamples)
plot(dat$X,dat$Y,pch=20,cex=3)
for (i.plot in 1:100){
abline(a=mcmcMat[i.plot,"beta0"],b=mcmcMat[i.plot,"beta"],col="orange",lwd=2)
}
HDI.plot(mcmcMat[,"beta"])
#plot(1,1,type='n',xlim=c(0,6),ylim=c(0,6))
#abline(a=0,b=1,lwd=2)
#x.temp = seq(0,6,length.out=100)
#y = dnorm(x.temp,mean=3,sd=1)
#lines(x=-y*3+1,y=x.temp)
#lines(x=c(-1,3),y=c(3,3))
#lines(x=c(-1,0.5),y=c(0.5,0.5),lty=2,col="red")
#lines(x=c(-1,1.5),y=c(1.5,1.5),lty=2,col="red")
#lines(x=c(-1,2.5),y=c(2.5,2.5),lty=2,col="red")
#lines(x=c(-1,3.5),y=c(3.5,3.5),lty=2,col="red")
#lines(x=c(-1,4.5),y=c(4.5,4.5),lty=2,col="red")
#lines(x=c(-1,5.5),y=c(5.5,5.5),lty=2,col="red")
model4="
model {
for ( i in 1:Ncell ) {
y[i] ~ dpois( lambda[i] )
lambda[i] <- exp( a0 + a1[x1[i]] + a2[x2[i]] + a1a2[x1[i],x2[i]] )
}
a0 ~ dnorm( yLogMean , 1/(yLogSD*2)^2 )
for ( j1 in 1:Nx1Lvl ) { a1[j1] ~ dnorm( 0.0 , 1/a1SD^2 ) }
a1SD ~ dgamma(agammaShRa[1],agammaShRa[2])
for ( j2 in 1:Nx2Lvl ) { a2[j2] ~ dnorm( 0.0 , 1/a2SD^2 ) }
a2SD ~ dgamma(agammaShRa[1],agammaShRa[2])
for ( j1 in 1:Nx1Lvl ) { for ( j2 in 1:Nx2Lvl ) {
a1a2[j1,j2] ~ dnorm( 0.0 , 1/a1a2SD^2 )
} }
a1a2SD ~ dgamma(agammaShRa[1],agammaShRa[2])
# Convert a0,a1[],a2[],a1a2[,] to sum-to-zero b0,b1[],b2[],b1b2[,] :
for ( j1 in 1:Nx1Lvl ) { for ( j2 in 1:Nx2Lvl ) {
m[j1,j2] <- a0 + a1[j1] + a2[j2] + a1a2[j1,j2] # cell means
} }
b0 <- mean( m[1:Nx1Lvl,1:Nx2Lvl] )
for ( j1 in 1:Nx1Lvl ) { b1[j1] <- mean( m[j1,1:Nx2Lvl] ) - b0 }
for ( j2 in 1:Nx2Lvl ) { b2[j2] <- mean( m[1:Nx1Lvl,j2] ) - b0 }
for ( j1 in 1:Nx1Lvl ) { for ( j2 in 1:Nx2Lvl ) {
b1b2[j1,j2] <- m[j1,j2] - ( b0 + b1[j1] + b2[j2] )
} }
# Compute predicted proportions:
for ( j1 in 1:Nx1Lvl ) { for ( j2 in 1:Nx2Lvl ) {
expm[j1,j2] <- exp(m[j1,j2])
ppx1x2p[j1,j2] <- expm[j1,j2]/sum(expm[1:Nx1Lvl,1:Nx2Lvl])
} }
for ( j1 in 1:Nx1Lvl ) { ppx1p[j1] <- sum(ppx1x2p[j1,1:Nx2Lvl]) }
for ( j2 in 1:Nx2Lvl ) { ppx2p[j2] <- sum(ppx1x2p[1:Nx1Lvl,j2]) }
}"
writeLines( model4, con="model4.txt" )
dat = read.csv( file="http://peach.l.chiba-u.ac.jp/course_folder/HairEyeColor.csv" )
y=dat$Count
x1=dat$Eye
x2=dat$Hair
x1 = as.numeric(as.factor(x1))
x1levels = levels(as.factor(dat$Eye))
x2 = as.numeric(as.factor(x2))
x2levels = levels(as.factor(dat$Eye))
Nx1Lvl = length(unique(x1))
Nx2Lvl = length(unique(x2))
Ncell = length(y) # number of rows of data, not sum(y)
yLogMean = log(sum(y)/(Nx1Lvl*Nx2Lvl))
yLogSD = log(sd(c(rep(0,Ncell-1),sum(y))))
MeanSD2gamma <- function( mean, sd ) {
shape = mean^2 / sd^2
rate = mean / sd^2
return(data.frame(shape,rate))
}
ModeSD2gamma <- function( mode, sd ) {
rate = ( mode + sqrt( mode^2 + 4 * sd^2 ) )/( 2 * sd^2 )
shape = 1 + mode * rate
return(data.frame(shape,rate))
}
temp=ModeSD2gamma(mode=yLogSD , sd=2*yLogSD)
agammaShRa = unlist(temp )
data_list = list(
y = y ,x1 = x1 , x2 = x2 ,
Ncell = Ncell , Nx1Lvl = Nx1Lvl , Nx2Lvl = Nx2Lvl ,
yLogMean = yLogMean , yLogSD = yLogSD ,agammaShRa = agammaShRa
)
jagsModel =jags.model("model4.txt", data = data_list, n.chains = 3, n.adapt = 500)
update(jagsModel, 500)
codaSamples = coda.samples(jagsModel, variable.names = c("b0", "b1", "b2", "b1b2", "ppx1p", "ppx2p", "ppx1x2p"),
n.iter = ((10000*1)/1), n.adapt = 500)
mcmcMat<-as.matrix(codaSamples)
EyeBlueHairBlack <- mcmcMat[,"ppx1x2p[1,1]"]
HDI.plot(EyeBlueHairBlack)
EyeBlue_vs_EyeBrown_at_HairBlack <- mcmcMat[,"b1b2[1,1]"] - mcmcMat[,"b1b2[2,1]"] - mcmcMat[,"b1[2]"] + mcmcMat[,"b1[1]"]
HDI.plot(EyeBlue_vs_EyeBrown_at_HairBlack)
EyeBlue_vs_EyeBrown_at_Blond <- mcmcMat[,"b1b2[1,2]"] - mcmcMat[,"b1b2[2,2]"] - mcmcMat[,"b1[2]"] + mcmcMat[,"b1[2]"]
HDI.plot(EyeBlue_vs_EyeBrown_at_Blond)
diff <- EyeBlue_vs_EyeBrown_at_HairBlack - EyeBlue_vs_EyeBrown_at_Blond
HDI.plot(diff)
基礎自習B02
dec2bin<-function(num, digits=8) {
bin=c()
if (num==0){
bin=0
} else {
while(num!=0){
rem=num%%2
num=num%/%2
bin=c(rem,bin)
}
}
if (length(bin) < digits){
res=matrix(0,nrow=1,ncol=digits)
res[(digits-length(bin)+1):digits]=bin
} else {res=bin}
return(res)
}
データ解析基礎論B Loglineak Analysis
freq<-c(33,37,7,23)
pref<-factor(c('soba','udon','soba','udon'))
region<-factor(c('east','east','west','west'))
dat<-data.frame(pref,region,freq)
dat.table=table(pref,region)
dat.table[cbind(pref,region)]<-freq
dat.loglinCE_A<-loglin(dat.table, list(1), fit=T,param=T)
dat.loglinCE_B<-loglin(dat.table,list(2), fit=T,param=T)
dat.loglinIND<-loglin(dat.table,list(1,2), fit=T,param=T)
dat.loglinSAT<-loglin(dat.table,list(c(1,2)), fit=T,param=T)
freq<-c(9,5,2,4,16,10,26,28)
gener<-factor(c(rep('female',4),c(rep('male',4))))
affil<-factor(rep(c('L','L','E','E'),2))
circle<-factor(rep(c('tennis','astro'),4))
dat<-data.frame(gener,affil,circle,freq)
dat.table<-table(gender,affil,circle)
dat.table[cbind(gender,affil,circle)]<-freq
dat.loglin2<-loglin(dat.table,list(1), fit=T,param=T)
dat.loglin3<-loglin(dat.table,list(1,3), fit=T,param=T)
dat.loglin4<-loglin(dat.table,list(1,2,3), fit=T,param=T)
dat.loglin5<-loglin(dat.table,list(c(1,3),2), fit=T,param=T)
dat.loglin6<-loglin(dat.table,list(c(1,3),c(1,2)), fit=T,param=T)
dat.loglin7<-loglin(dat.table,list(c(1,3),c(1,2),c(2,3)), fit=T,param=T)
dat.loglin8<-loglin(dat.table,list(c(1,2,3)), fit=T,param=T)
source('http://peach.l.chiba-u.ac.jp/course_folder/cuUtil02.R')
データ解析基礎論B テスト理論
dat<-read.table("http://peach.l.chiba-u.ac.jp/course_folder/survey_data01.txt")
install.packages("psych")
library("psych")
ca<-alpha(dat)
dat<-read.csv("http://peach.l.chiba-u.ac.jp/course_folder/survey2.csv")
image(cor(dat)[10:1,1:10])
ca1_5 = alpha(dat[,1:5])
ca1_5
ca6_10 = alpha(dat[,6:10])
ca6_10
F1<-factanal(dat[,1:5],1)
F2<-factanal(dat[,6:10],1)
library(sem)
fa.model=cfa(reference.indicator=FALSE)
F1: q1,q2,q3,q4,q5
F2: q6,q7,q8,q9,q10
fa.model<-update(fa.model)
delete, F1<->F2
fa.result<-sem(fa.model, cov(dat), 300)
summary(fa.result)
install.packages("ltm")
library(ltm)
dat<-read.table("http://peach.l.chiba-u.ac.jp/course_folder/survey_data01.txt")
dat = dat-1
descript(dat)
irt1P<-rasch(dat)
plot.rasch(irt1P)
GoF.rasch(irt1P)
person.fit(irt1P)
item.fit(irt1P)
theta = factor.scores(irt1P)
cor(rowSums(theta[[1]][,1:10]),theta[[1]]$z1)
irt2P<-ltm(dat~z1)
plot.ltm(irt2P)
person.fit(irt2P)
item.fit(irt2P)
theta2P = factor.scores(irt2P)
cor(rowSums(theta2P[[1]][,1:10]),theta2P[[1]]$z1)
認知情報解析演習B DL2 Ch 3
# p99
ctxt = c(1,rep(0,6))
W = matrix(rnorm(7*3),nrow=7)
h = ctxt%*%W
# p 100
MatMult.forwd <- function(x, W){
return(x%*%W)
}
res <- MatMult.forwd(ctxt,W)
# p105
c1 = c(1,rep(0,6))
c2 = rep(0,7); c2[3]=1
W.in = matrix(rnorm(7*3),nrow=7)
W.out = matrix(rnorm(7*3),nrow=3)
h1 = MatMult.forwd(c1,W.in)
h2 = MatMult.forwd(c2,W.in)
h = 0.5*(h1+h2)
s = MatMult.forwd(h,W.out)
print(s)
# p107
softmax1.pred <- function(x){
max.x = max(x)
x = x - max.x
y = exp(x)/sum(exp(x))
return(y)
}
softmaxCE1.forwd <- function(x, target){
max.x = max(x)
x = x - max.x
y = exp(x)/sum(exp(x))
delta = 1e-7;
return(-sum(target*log(y + delta)))
}
targ=rep(0,7); targ[2]=1
# p112
txt = "You say goodbye and I say hello."
txt = tolower(txt)
txt = gsub('[.]', ' .',txt)
words = unlist(strsplit(txt, " "))
uniq.words = unique(words)
n.uniq = length(uniq.words)
n.words = length(words)
corpus = rep(0,n.words)
corpus = match(words,uniq.words)
txt = "You say goodbye and I say hello.";
txt2corpus <- function(txt){
txt = tolower(txt)
txt = gsub('[.]', ' .',txt)
words = unlist(strsplit(txt, " "))
uniq.words = unique(words)
n.uniq = length(uniq.words)
n.words = length(words)
corpus = rep(0,n.words)
corpus = match(words,uniq.words)
return(corpus)
}
corp = txt2corpus(txt)
len.corp = length(corp)
target = corp[2:(len.corp-1)]
col1 = corp[1:(len.corp-2)]
col2 = corp[3:len.corp]
context = cbind(col1,col2)
corp2contxt <- function(corpus){
len.corp = length(corpus)
target = corpus[2:(len.corp-1)]
col1 = corpus[1:(len.corp-2)]
col2 = corpus[3:len.corp]
context = cbind(col1,col2)
return(list(context=context,target = target))
}
# p114
len.corp = length(corpus)
idxT = cbind(1:(len.corp-2), corpus[2:(len.corp-1)])
targ1S = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
targ1S[idxT]=1
idxC1 = cbind(1:(len.corp-2),corpus[1:(len.corp-2)])
idxC2 = cbind(1:(len.corp-2),corpus[3:len.corp])
contxt1 = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
contxt2 = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
contxt1[idxC1]=1
contxt2[idxC2]=1
corp2contxt1S = function(corpus){
len.corp = length(corpus)
# creating target matrix
idxT = cbind(1:(len.corp-2), corpus[2:(len.corp-1)])
targ1S = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
targ1S[idxT]=1
# creating context matrices
idxC1 = cbind(1:(len.corp-2),corpus[1:(len.corp-2)])
idxC2 = cbind(1:(len.corp-2),corpus[3:len.corp])
contxt1 = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
contxt2 = matrix(0,nrow=len.corp-2,ncol=length(unique(corpus)))
contxt1[idxC1]=1
contxt2[idxC2]=1
return(list(target=targ1S,contxt1=contxt1,contxt2=contxt2))
}
# p116~
init.W2V <- function(n.uniq,size.hidden){
W.in = matrix(rnorm(n.uniq*size.hidden),nrow=n.uniq)*0.01
W.out = matrix(rnorm(n.uniq*size.hidden),nrow=size.hidden)*0.01
return(list(W.in = W.in, W.out= W.out))
}
softmax.forwd <- function(x, target){
max.x = apply(x,1,max)
C = ncol(x)
x = x - max.x%*%matrix(1,nrow=1,ncol=C)
y = exp(x)/rowSums(exp(x))
delta = 1e-7;
R = nrow(as.matrix(y))
return(-sum(target*log(y + delta))/R)
}
softmax.bckwd <- function(x, target, dout = 1){
max.x = apply(x, 1, max)
R = nrow(x)
C = ncol(x)
x = x - max.x%*%matrix(1,nrow=1,ncol=C)
y = exp(x)/rowSums(exp(x))
return((y-target)/R)
}
MatMult.bckwd <- function(x, W, dout){
dx = dout%*%t(W)
dW = t(x)%*%dout
return(list(dx = dx, dW = dW))
}
softmax.pred <- function(x, target){
max.x = apply(x,1,max)
C = ncol(x)
x = x - max.x%*%matrix(1,nrow=1,ncol=C)
y = exp(x)/rowSums(exp(x))
return(y)
}
network<-init.W2V(7,5)
n.batch = 3;
n.data = nrow(dat$target)
n.iter = 1000;
lambda=0.1;
loss = rep(0, n.iter)
for (i.iter in 1:n.iter){
samp.idx = sample(1:n.data,n.batch)
batch.c1 = dat$contxt1[samp.idx,]
batch.c2 = dat$contxt2[samp.idx,]
batch.y = dat$target[samp.idx,]
h1 <- MatMult.forwd(batch.c1, network$W.in)
h2 <- MatMult.forwd(batch.c2, network$W.in)
h = 0.5 * (h1 + h2)
s = MatMult.forwd(h,network$W.out)
z = softmax.forwd(s,batch.y)
loss[i.iter] = z
ds = softmax.bckwd(s, batch.y, 1)
da = MatMult.bckwd(h,network$W.out,ds)
da$dW = da$dW*0.5
dIn1 = MatMult.bckwd(batch.c1,network$W.in,da$dx)
dIn2 = MatMult.bckwd(batch.c2,network$W.in,da$dx)
network$W.out = network$W.out - lambda*da$dW
network$W.in = network$W.in - lambda*dIn1$dW
network$W.in = network$W.in - lambda*dIn2$dW
}
plot(loss, type='l')
データ解析基礎論B GLM
dat<-read.csv("http://peach.l.chiba-u.ac.jp/course_folder/logisticReg01.txt ")
plot(dat$study, dat$pass, pch=20,ylab="Passed or not",xlab="Hours studied",cex=2,cex.lab=1.5,yaxt='n')
dat.lr <- glm(pass~study,family=binomial, data=dat)
summary(dat.lr)
coef = coefficients(dat.glm)
pred.pass.p = 1/(1+exp(-(coef[1]+coef[2]*0:30)))
##
pred.pass.p = 1/(1+exp(-(coef[1]+coef[2]*c(10:15))))
odds=pred.pass.p/(1-pred.pass.p)
exp(coef[2])
dat<-read.csv("http://www.matsuka.info/data_folder/datWA01.txt")
dat.lr<-glm(gender~shoesize,family=binomial,data=dat)
anova(dat.lr, test ="Chisq")
dat.lr0<-glm(gender~1,family="binomial",data=dat)
dat.lrS<-glm(gender~shoesize,family=binomial,data=dat)
dat.lrh<-glm(gender~h,family="binomial",data=dat)
M=matrix(c(52,48,8,42),nrow=2)
rownames(M)<-c("present", "absent")
colnames(M)<-c("smoker",'non-smoker')
dat<-as.data.frame((as.table(M)))
colnames(dat)<-c("cancer","smoking","freq")
dat=dat[rep(1:nrow(dat),dat$freq),1:2]
rownames(dat)<-c()
dat.glm<-glm(cancer~smoking,family=binomial,data=dat)
dat<-read.csv("http://www.matsuka.info/data_folder/cda7-16.csv")
dat.glm<-glm(survival~age, family=binomial,data=dat)
dat.glm2<-glm(survival~Ncigarettes,family=binomial,data=dat)
dat.glm3<-glm(survival~NdaysGESTATION,family=binomial,data=dat)
dat.glmAllAdd=glm(survival~age+Ncigarettes+NdaysGESTATION,family=binomial,data=dat)
dat.glmAllMult=glm(survival~age*Ncigarettes*NdaysGESTATION,family=binomial,data=dat)
library(MASS)
dat<-read.csv("http://peach.l.chiba-u.ac.jp/course_folder/poissonReg01.txt ")
dat.pr<-glm(eye.count~attr,family = "poisson",data=dat)
データ解析基礎論B 因子分析+クラスター分析
source('http://peach.l.chiba-u.ac.jp/course_folder/cuUtil02.R')
dat<-read.csv("http://www.matsuka.info/data_folder/tdkCFA.csv")
dat.model1<-factanal(dat,1)
dat.model2<-factanal(dat,2)
dat.model3<-factanal(dat,3)
dat.model4<-factanal(dat,4)
library(sem)
model01=cfa(reference.indicator=FALSE)
F1:extrovert,cheerful, leadership, antisocial, talkative, motivated, hesitance, popularity
model02=cfa(reference.indicator=FALSE)
F1: extrovert, leadership, motivated, hesitance
F2: cheerful, antisocial, talkative, popularity
mod2<-sem(model02, cov(dat), 100)
summary(mod2)
opt <- options(fit.indices = c("RMSEA"))
cldata<-data.frame(var1=c(4,1,5,1,5), var2=c(1,5,4,3,1))
cldata.cluster=hclust(dist(cldata),method="average")
plot(cldata.cluster,cex=2)
dat<-read.csv("http://matsuka.info/data_folder/tdkClust.csv", header=TRUE, row.names=1)
dat.cluster=hclust(dist(dat),method="average")
plot(dat.cluster,cex=1.5)
dat.kmeans=kmeans(dat, centers=3, nstart=10)
plot(dat,col=dat.kmeans$cluster+2,pch=20,cex=2)
plot(dat[,1:2],col=dat.kmeans$cluster+1,pch=20,cex=5)
text(dat[,1:2],row.names(dat),cex=2)
res<-cu.KMC.rep(dat,10,1000)
dat<-read.csv("http://www.matsuka.info/data_folder/tdkCFA.csv")
res<-cu.KMC.rep(dat,10,1000)
# MDS
dat<-data.frame(p1=c(4,1,5,1,5),p2=c(1,5,4,3,1))
rownames(dat)<-c('a','b','c','d','e')
dat.mds<-cmdscale(dist(dat),2)
plot(dat.mds[,1],dat.mds[,2], type='n')
text(dat.mds[,1],dat.mds[,2],labels=row.names(dat))
dat.cluster=hclust(dist(dat))
plot(dat.cluster,cex=1.5)
dat<-read.csv("http://matsuka.info/data_folder/tdkMDS02.csv", row.name=1)
dat.mds<-cmdscale(dat,2,eig=T)
plot(dat.mds$points[,1],dat.mds$points[,2], type='n')
text(dat.mds$points[,1],dat.mds$points[,2],labels=row.names(dat), cex=2)