library('sm')
Package 'sm', version 2.2-6.0: type help(sm) for summary information
library(MASS)
Attachement du package : 'MASS'
L'objet suivant est masqué depuis 'package:sm':
muscle
data(galaxies)
?galaxies
hist(galaxies)
hist(galaxies,freq=F)
hist(galaxies,freq=F,nclass=20)
hist(galaxies,breaks=quantile(galaxies,seq(0,1,len=20)))
data(faithful)
attach(faithful)
?faithful
hist(waiting,freq=F)
hist(waiting,freq=F,nclass=20)
hist(waiting,breaks=quantile(waiting,seq(0,1,len=20)))
hist(eruptions,freq=F)
hist(eruptions,freq=F,nclass=20)
hist(eruptions,breaks=quantile(eruptions,seq(0,1,len=30)))
rmixing=function(n,alpha,l0,l1,p0,p1)
# Generate data from a mixing model
{
z=rbinom(n,1,alpha)
f1=eval(parse(text=paste('r',l1,'(',paste(c(n,p1),collapse=','),')',sep='')))
f0=eval(parse(text=paste('r',l0,'(',paste(c(n,p0),collapse=','),')',sep='')))
x=z*f1+(1-z)*f0
return(x=x)
}
dmixing=function(t,alpha,l0,l1,p0,p1)
# draw the density of the mixing model
{
res=alpha*eval(parse(text=paste('d',l1,'(t,',paste(p1,collapse=','),')',sep='')))+(1-alpha)*eval(parse(text=paste('d',l0,'(t,',paste(p0,collapse=','),')',sep='')))
}
#Example
n=300
alpha=0.3
l0='norm'
p0=c(8,1)
l1='norm'
p1=c(0,2)
s=seq(-10,10,0.001)
x=rmixing(n,alpha,l0,l1,p0,p1)
#### histogram
par(mfrow=c(1,3))
hist(x,freq=F,ylim=c(0,0.4))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
hist(x,freq=F,ylim=c(0,0.4),nclass=20)
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
hist(x,breaks=quantile(x,seq(0,1,len=20)),ylim=c(0,0.4))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
par(mfrow=c(2,3))
plot(density(x,bw=0.001,kernel='rectangular'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=0.01,kernel='rectangular'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=0.1,kernel='rectangular'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,kernel='rectangular'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=10,kernel='rectangular'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,kernel='rectangular',bw=100),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
par(mfrow=c(1,1))
hist(x,freq=F,ylim=c(0,0.4),xlim=c(-7,12))
lines(density(x,kernel='rectangular'),col='blue')
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
# Galaxies
hist(galaxies,freq=F,ylim=range(density(galaxies,kernel='rectangular')$y))
lines(density(galaxies,kernel='rectangular'),col='blue')
# Faithful
hist(waiting,freq=F,ylim=range(density(waiting,kernel='rectangular')$y))
lines(density(waiting,kernel='rectangular'),col='blue')
hist(eruptions,freq=F,ylim=range(density(eruptions,kernel='rectangular')$y))
lines(density(eruptions,kernel='rectangular'),col='blue')
par(mfrow=c(2,3))
plot(density(x,bw=0.001,kernel='g'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=0.01,kernel='g'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=0.1,kernel='g'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,kernel='g'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=10,kernel='g'),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,kernel='g',bw=100),ylim=c(0,0.4),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
par(mfrow=c(1,1))
hist(x,freq=F,ylim=c(0,0.4),xlim=c(-7,12))
lines(density(x,kernel='g'),col='blue')
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
par(mfrow=c(2,3))
plot(density(x,bw=1,kernel='r'),main='Uniform',ylim=c(0,0.3),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=1,kernel='g'),main='Gaussian',ylim=c(0,0.3),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=1,kernel='e'),main='Epanechnikov',ylim=c(0,0.3),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=1,kernel='triangular'),main='Triangular',ylim=c(0,0.3),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,bw=1,kernel='b'),main='Biweight',ylim=c(0,0.3),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
plot(density(x,kernel='cosine',bw=1),main='Cosine',ylim=c(0,0.3),xlim=c(-7,12))
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
######## Quadratic loss
# number of simulations
J=100
hs=(1:20)/20
s=seq(-10,10,0.01)
h0=5
s0=1001
QUAD_LOSS=function(s,hs,J,n,alpha,l0,l1,p0,p1,h0,s0)
{
ls= length(s)
lh=length(hs)
EST=array(NA,c(J,ls,lh))
for (j in 1:J)
{
x=rmixing(n,alpha,l0,l1,p0,p1)
for (h in 1:lh)
{
EST[j,,h]=sm.density(x,h=hs[h],display='none',ylim=c(0,0.4),nbins=0,eval.points=s)$estimate
}
}
BIAS=apply(EST,c(2,3),mean)-dmixing(s,alpha,l0,l1,p0,p1)
VAR=apply(EST,c(2,3),var)
EQ=BIAS^2+VAR
nl=2
if (!is.null(s0)) nl=nl+1
layout(matrix(c(1:3,rep(4,3),5:(3*nl+1)),byrow=TRUE, ncol=3))
plot(hs,abs(apply(BIAS,2,mean)),type='l',ylab='|BIAS|')
plot(hs,apply(VAR,2,mean),type='l',ylab='VAR')
plot(hs,apply(EQ,2,mean),type='l',ylab='EQ')
abline(v=sm.density(x,method='normal',display="none")$h,col='blue')
abline(v=sm.density(x,method='sj',display="none")$h,col='green')
abline(v=sm.density(x,method='cv',display="none")$h,col='red')
hopt=which(apply(EQ,2,mean)==min(apply(EQ,2,mean)))
if (is.null(h0)) h0=hopt
EST2=EST[,,h0]
plot(s,EST2[1,],type='l',ylab='Estimates',main=paste('h=',hs[h0],sep=''))
for (j in 1:J) lines(s,EST2[j,])
lines(s,dmixing(s,alpha,l0,l1,p0,p1),col='red')
if (!is.null(h0))
{
plot(s,abs(BIAS[,h0]),type='l',ylab='|BIAS|',main=paste('h=',hs[h0],sep=''))
plot(s,VAR[,h0],type='l',ylab='VAR',main=paste('h=',hs[h0],sep=''))
plot(s,EQ[,h0],type='l',ylab='EQ',main=paste('h=',hs[h0],sep=''))
}
if (!is.null(s0))
{
plot(hs,abs(BIAS[s0,]),type='l',ylab='|BIAS|',main=paste('s=',s[s0],sep=''))
plot(hs,VAR[s0,],type='l',ylab='VAR',main=paste('s=',s[s0],sep=''))
plot(hs,EQ[s0,],type='l',ylab='EQ',main=paste('s=',s[s0],sep=''))
}
return(list(BIAS=BIAS,VAR=VAR,EQ=EQ,hopt=hs[hopt]))
}
RES=QUAD_LOSS(s,hs,J,n,alpha,l0,l1,p0,p1,NULL,NULL)
plot(s,dmixing(s,alpha,l0,l1,p0,p1),col='red',type='l')
lines(density(x,kernel='e'),col='blue')
lines(density(x,bw='nrd',kernel='e'))
lines(density(x,bw='SJ',kernel='e'),col='green')
lines(density(x,bw='ucv',kernel='e'),col='orange')
Warning in bw.ucv(x): minimum occurred at one end of the range
plot(s,dmixing(s,alpha,l0,l1,p0,p1),col='red',type='l')
sm.density(x,method='normal',kernel='e',add=T)
sm.density(x,method='sj',kernel='e',col='green',add=T)
sm.density(x,method='cv',kernel='e',col='orange',add=T)
# Galaxies
hist(galaxies,freq=F,ylim=range(density(galaxies,kernel='e',bw='ucv')$y))
lines(density(galaxies,kernel='rectangular',bw='ucv'),col='blue')
lines(density(galaxies,kernel='e',bw='ucv'),col='orange')
# Faithful
hist(waiting,freq=F,ylim=range(density(waiting,kernel='e',bw='ucv')$y))
lines(density(waiting,kernel='rectangular',bw='ucv'),col='blue')
lines(density(waiting,kernel='e',bw='ucv'),col='orange')
hist(eruptions,freq=F,ylim=range(density(eruptions,kernel='e',bw='ucv')$y))
lines(density(eruptions,kernel='rectangular',bw='ucv'),col='blue')
lines(density(eruptions,kernel='e',bw='ucv'),col='orange')
## 1.4. Applications
### 1.4.1. Mode estimation
density.mode=function(x,a,b,M,bw='ucv',kernel='e',plot=T)
{
disc=seq(a,b,length.out=M)
dens=density(x,from=a,to=b,n=M,bw=bw,kernel=kernel)$y
mod=disc[(order(dens))[M]]
max=max(dens)
if (plot) {plot(disc,dens,type='l')}
return(list(mode=mod,max=max))
}
sm.mode=function(x,a,b,M,method='cv',plot=T)
{
disc=seq(a,b,length.out=M)
display="line"
if (plot) {display="none"}
dens=sm.density(x,eval.points=disc,method=method,nbins=0)$estimate
mod=disc[(order(dens))[M]]
max=max(dens)
return(list(mode=mod,max=max))
}
plot(s,dmixing(s,alpha,l0,l1,p0,p1),col='red',type='l')
lines(density(x,bw='ucv',kernel='e'),col='orange')
Warning in bw.ucv(x): minimum occurred at one end of the range
re=density.mode(x,-10,10,1000,bw='ucv',kernel='e',F)
Warning in bw.ucv(x): minimum occurred at one end of the range
segments(re$mod,0,re$mod,re$max)
re1=density.mode(x,-10,3,1000,bw='ucv',kernel='e',F)
Warning in bw.ucv(x): minimum occurred at one end of the range
segments(re1$mod,0,re1$mod,re1$max)
sm.clustering.level.sets=function(x,level=0.20,a=min(x),b=max(x),M=1000,method='sj',plot=T)
{
disc=seq(a,b,length.out=M)
n=length(x)
o=order(x)
x.and.disc=c(x,disc)
#type=
names(x.and.disc)=rep(c('data','disc'),c(n,M))
x1=sort(x.and.disc)
type1=names(x1)
n1=length(x1)
dens=sm.density(x,eval.points=x1,method=method,nbins=0)$estimate
adjusted.level=quantile(dens,level)
is.over.level=dens>adjusted.level
cluster=rep(0,M+n)
k=1
for (j in 1:(M+n))
{
if (j>1) {if (is.over.level[j]==0 & is.over.level[j-1]==1 ) {k=k+1}}
if (is.over.level[j]==1) {cluster[j]=k}
}
if (plot) {plot(x1,dens,type='l')
abline(adjusted.level,0,col='orange')}
k=max(cluster)
cluster.bounds=matrix(NA,2,k)
palette=rainbow(k)
for (j in 1:k)
{
cluster.bounds[,j]=range(x1[cluster==j])
if (plot) { segments(cluster.bounds[1,j],adjusted.level,cluster.bounds[2,j],adjusted.level,col=palette[j])}
}
cluster.on.data=(cluster[type1=='data'])
#cluster.on.data[cluster.on.data==0]=NA
return(list(cluster=cluster.on.data,cluster.bounds=cluster.bounds))
}
sm.clustering.level.sets(x,0.3)
$cluster
[1] 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
[38] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
[75] 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
[112] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
[149] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
[186] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
[223] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
[260] 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
[297] 2 0 0 0
$cluster.bounds
[,1] [,2]
[1,] -2.241888 5.605418
[2,] 2.595019 10.471365
sm.clustering.extrema=function(x,tau=10,a=min(x),b=max(x),M=1000,method='sj',plot=T)
{
disc=seq(a,b,length.out=M)
n=length(x)
dens=sm.density(x,eval.points= disc,method=method,nbins=0)$estimate
inc=rep(0,M)
valleys=rep(0,M)
k=1
for (j in 1:M)
{
i1=max(1,j-tau)
i2=min(M,j+tau)
ratio=(dens[i2]-dens[i1])/(disc[i2]-disc[i1])
if (ratio>0) {inc[j]=1}
if (j>1) {if (inc[j]==0 & inc[j-1]==1 ) {k=k+1}}
valleys[j]=k
}
if (plot) {plot(disc,dens,type='l')}
k=max(valleys)
thresholds=rep(NA,k)
mins=rep(NA,k)
for (j in 1:k)
{
candidates=disc[valleys==j]
thresholds[j]=candidates[which.min(dens[valleys==j])]
mins[j]=min(dens[valleys==j])
#if (plot) {segments(thresholds[j],0,thresholds[j],mins[j],col='orange')}
}
n=length(x)
cluster=rep(0,n)
for (j in 1:k) {if (j==1) {cluster[x==thresholds[j]]=j}
cluster[x>thresholds[j]]=j}
cluster.disc=rep(0,M)
for (j in 1:k) {if (j==1) {cluster.disc[disc==thresholds[j]]=j}
cluster.disc[disc>thresholds[j]]=j}
K=max(cluster.disc)
palette=rainbow(K+1)
if (plot) {for (j in 0:K) {if (sum(cluster.disc==j)!=0) {
disc.j=disc[cluster.disc==j]
dens.j=dens[cluster.disc==j]
n.j=sum(cluster.disc==j)
polygon(c(disc.j[1],disc.j,disc.j[n.j]),c(0,dens.j,0),
col = palette[j+1])}
} }
#cluster.on.data[cluster.on.data==0]=NA
return(list(cluster=cluster,thresholds=thresholds))
}
sm.clustering.extrema(x,10)
$cluster
[1] 2 1 1 2 1 1 2 2 2 2 2 2 2 2 2 2 2 2 1 1 2 1 1 1 2 2 2 1 1 2 1 2 2 2 1 2 2
[38] 1 2 2 1 2 1 2 2 2 1 2 2 1 2 2 2 1 2 2 1 2 1 2 1 2 1 2 2 1 2 2 2 2 1 2 2 1
[75] 2 1 1 2 2 2 2 2 2 1 2 2 2 2 1 2 2 2 2 2 2 1 2 2 2 1 2 1 2 2 1 2 1 2 2 2 1
[112] 2 1 2 1 2 2 1 2 2 1 1 2 2 2 2 2 2 2 1 2 2 2 2 2 2 2 1 2 1 1 2 2 2 2 1 2 1
[149] 1 2 2 2 2 1 2 2 2 2 1 1 2 2 2 2 2 1 1 2 2 2 2 2 1 2 2 2 2 2 2 2 2 2 2 1 2
[186] 1 1 2 2 2 2 2 2 2 2 1 1 2 1 1 2 2 1 1 1 2 2 1 1 2 1 1 2 2 2 2 2 2 1 2 2 2
[223] 1 2 2 2 2 2 2 2 2 2 2 1 2 2 2 2 2 1 1 2 1 2 2 2 2 2 2 2 2 1 1 1 2 2 2 2 2
[260] 2 2 1 2 1 2 2 2 2 2 2 2 1 2 2 1 2 2 1 2 1 1 2 2 2 1 2 1 2 2 2 2 2 2 2 2 1
[297] 2 2 2 2
$thresholds
[1] -3.832976 4.524096 11.599029
data("pressure")
t=pressure$temperature
p=pressure$pressure
plot(t,p)
res=sm.regression(t,p,method='cv')
names(res)
[1] "eval.points" "estimate" "model.y" "se" "sigma"
[6] "h" "hweights" "weights" "data" "call"
res$h
[1] 7.034694
res$eval.points
[1] 0.000000 7.346939 14.693878 22.040816 29.387755 36.734694
[7] 44.081633 51.428571 58.775510 66.122449 73.469388 80.816327
[13] 88.163265 95.510204 102.857143 110.204082 117.551020 124.897959
[19] 132.244898 139.591837 146.938776 154.285714 161.632653 168.979592
[25] 176.326531 183.673469 191.020408 198.367347 205.714286 213.061224
[31] 220.408163 227.755102 235.102041 242.448980 249.795918 257.142857
[37] 264.489796 271.836735 279.183673 286.530612 293.877551 301.224490
[43] 308.571429 315.918367 323.265306 330.612245 337.959184 345.306122
[49] 352.653061 360.000000
res$estimate
[1] 1.999996e-04 5.675549e-04 9.324045e-04 1.671898e-03 3.456180e-03
[6] 5.149423e-03 1.085884e-02 1.972357e-02 2.846339e-02 4.836680e-02
[11] 7.041253e-02 9.758724e-02 1.635183e-01 2.291547e-01 3.373181e-01
[16] 5.150429e-01 6.883220e-01 1.018800e+00 1.423762e+00 1.837755e+00
[21] 2.665554e+00 3.527883e+00 4.566661e+00 6.266176e+00 7.944415e+00
[26] 1.035057e+01 1.348534e+01 1.658134e+01 2.152680e+01 2.696677e+01
[31] 3.269061e+01 4.175806e+01 5.088840e+01 6.170955e+01 7.610754e+01
[36] 9.033653e+01 1.096621e+02 1.321099e+02 1.545683e+02 1.863899e+02
[41] 2.194453e+02 2.548252e+02 3.022988e+02 3.495649e+02 4.055299e+02
[46] 4.725901e+02 5.391172e+02 6.237562e+02 7.149016e+02 8.060000e+02
n=300
x=runif(n,.1,3)
y=10*exp(-3*x)+7*cos(2*pi*x)/sqrt(x)+rnorm(n,0,2*exp(x/7))
plot(x,y)
res=sm.regression(x,y,h=.007)
res=sm.regression(x,y,h=.07)
res=sm.regression(x,y,h=.7)
res=sm.regression(x,y,method='cv')
z=seq(.1,3,len=1000)
lines(z,10*exp(-3*z)+7*cos(2*pi*z)/sqrt(z),col='red')
sm.regression(x,y,add=T,col='green')
sm.regression(x,y,add=T,col='blue',method='aicc')
names(res)
[1] "eval.points" "estimate" "model.y" "se" "sigma"
[6] "h" "hweights" "weights" "data" "call"
res$h
[1] 0.05477502
res$eval.points
[1] 0.1273684 0.1858097 0.2442510 0.3026922 0.3611335 0.4195747 0.4780160
[8] 0.5364572 0.5948985 0.6533397 0.7117810 0.7702223 0.8286635 0.8871048
[15] 0.9455460 1.0039873 1.0624285 1.1208698 1.1793110 1.2377523 1.2961935
[22] 1.3546348 1.4130761 1.4715173 1.5299586 1.5883998 1.6468411 1.7052823
[29] 1.7637236 1.8221648 1.8806061 1.9390474 1.9974886 2.0559299 2.1143711
[36] 2.1728124 2.2312536 2.2896949 2.3481361 2.4065774 2.4650187 2.5234599
[43] 2.5819012 2.6403424 2.6987837 2.7572249 2.8156662 2.8741074 2.9325487
[50] 2.9909899
res$estimate
[1] 20.11276678 12.45629990 5.71176184 -0.28079430 -4.42235490 -6.28804198
[7] -6.69466615 -6.37143986 -5.26706665 -3.33710116 -1.11549263 0.91240486
[13] 3.06732639 5.34587969 7.02740926 6.76069413 5.49063965 3.73246571
[19] 1.96408487 0.04600724 -1.81258233 -3.40755684 -4.70969353 -5.71062079
[25] -6.14054995 -5.36660502 -3.66340610 -1.43509827 0.66264250 2.38432873
[31] 4.00777965 5.05853088 4.73226831 4.48533112 3.97167792 2.60929584
[37] 1.26231794 -0.34652994 -1.96030997 -3.05810007 -3.23771757 -3.11169427
[43] -2.91278296 -2.36164057 -1.36826755 -0.02259091 1.74685948 3.12868144
[49] 3.93435717 5.22962117
res=sm.regression(x,y,method='cv',eval.points=sort(x))
plot(res$estimate,y[order(x)])
abline(0,1)
Compute_CV=function(x,y,method,weights=NA)
{
hopt=sm.regression(x,y,method=method,display='none')$h
n=length(x)
if (is.na(weights)) weights=rep(1,n)
CV=0
for (i in 1:n)
{
CV=CV+(y[i]-sm.regression(x[-i],y[-i],h=hopt,eval.points=x[i],display='none')$estimate)^2*weights[i]
}
return(CV)
}
Compute_CV(x,y,'cv')
[1] 1828.23
load('Dopage.RData')
hema
[1] 35.81801 57.69442 54.68696 45.73142 57.02237 57.03312 46.94246 51.21867
[9] 37.23734 44.22872 43.19019 55.46140 48.31791 43.52074 44.31600 46.83335
[17] 43.98864 45.58534 40.67590 41.64725 46.58286 45.40957 46.14132 41.32432
[25] 51.92825 57.57791 45.10522 49.81766 54.59558 59.94339 46.68741 45.03787
[33] 43.78030 44.32881 47.85452 55.01740 49.64086 44.75317 53.80269 41.54966
[41] 52.33685 43.40389 57.02227 45.73978 56.13190 56.19256 45.33341 56.38840
[49] 45.36690 55.29222 42.93230 44.22211 48.11121 44.84885 46.35119 49.09645
[57] 46.76331 50.31146 44.09170 59.08955 42.67297 47.43126 54.06605 56.46029
[65] 39.68902 52.62125 45.54387 45.69921 53.13572 43.71261 34.72460 48.33640
[73] 51.22310 39.82190 45.93546
test
[1] "negatif" "positif" "positif" "negatif" "positif" "positif" "negatif"
[8] "negatif" "negatif" "negatif" "negatif" "positif" "positif" "negatif"
[15] "negatif" "positif" "negatif" "negatif" "negatif" "negatif" "negatif"
[22] "negatif" "negatif" "negatif" "negatif" "positif" "negatif" "positif"
[29] "positif" "positif" "negatif" "negatif" "negatif" "negatif" "negatif"
[36] "positif" "negatif" "negatif" "positif" "negatif" "negatif" "negatif"
[43] "positif" "negatif" "positif" "positif" "negatif" "positif" "negatif"
[50] "positif" "negatif" "negatif" "positif" "negatif" "negatif" "negatif"
[57] "negatif" "negatif" "negatif" "positif" "negatif" "negatif" "positif"
[64] "positif" "negatif" "positif" "negatif" "negatif" "positif" "negatif"
[71] "negatif" "negatif" "positif" "negatif" "negatif"
####### ROC
ROC=function(z,p1,plot=FALSE)
{
p=c(-0.0001,sort(unique(p1)),1.0001)#seq(-0.01,1.01,0.01)c(-0.0001,sort(unique(p1))-0.00000000001,1.0001)
lp=length(p)
ROC=matrix(NA,lp,2)
for (s in 1:lp)
{
ROC[s,1]=mean(p1[z==0]>=p[s])
ROC[s,2]=mean(p1[z==1]>=p[s])
}
#AUC=sum((ROC[-(lp),2])*(ROC[-(lp),1]-ROC[-(1),1]))
AUC=sum((ROC[-(lp),2]+ROC[-1,2])*(ROC[-(lp),1]-ROC[-(1),1])/2)
colnames(ROC)=c("FPR","TPR")
if (plot)
{
plot(ROC,type='l',main=paste('AUC =', AUC))
abline(0,1,col='orange')
}
return(list(ROC=ROC,AUC=AUC))
}
#######
Classif_NP=function(X,Y,X0=NA,plot=FALSE)
{
X=as.matrix(X)
n=length(Y)
V=sort(unique(Y))
n_V=length(V)
Prob=matrix(NA,n,n_V)
colnames(Prob)=V
Class=rep(NA,n)
if (!is.na(max(X0)))
{
X0=as.matrix(X0)
P0=matrix(NA,nrow(X0),n_V)
Class0=rep(NA,n)
}
for (v in 1:n_V)
{
z=as.numeric(Y==V[v])
for (i in 1:n )
{
value=sm.regression(X[-i,],z[-i], eval.points=X,eval.grid=FALSE,display='none',method='cv')$estimate[i]
if (!is.na(value)) {Prob[i,v]=value}
if (is.na(value)) {Prob[i,v]=mean(z[-i])}
}
if (!is.na(max(X0))) {P0[,v]=sm.regression(X,z,eval.points=X0,eval.grid=FALSE,display='none',method='cv')$estimate
P0[is.na(P0[,v]),v]=mean(z)}
}
if (n_V==2) {Roc=ROC(Y==V[2],Prob[,2],plot)}
Class=V[apply(Prob,1,which.max)]
V_est=sort(unique(Class))
if (length(V_est)==n_V){M_table=table(Y,Class)}
else {
M_table=matrix(0,n_V,n_V)
M_table0=table(Y,Class)
for (j in 1:length(V_est)) {M_table[,which(V==V_est[j])]=M_table0[,j]}
}
Err=1-(sum(diag(M_table))/sum(M_table))
if (!is.na(max(X0))) {Class0=V[apply(P0,1,which.max)]}
if (!is.na(max(X0))) {return(list(Class=Class, Prob=Prob, M_table=M_table, Err=Err, Class0=Class0,Prob0=P0,Auc=ifelse(n_V==2,Roc$AUC,NA)))}
else {return(list(Class=Class, Prob=Prob, M_table=M_table, Err=Err,Auc=ifelse(n_V==2,Roc$AUC,NA)))}
}
(R=Classif_NP(hema,test))
$Class
[1] "negatif" "positif" "positif" "negatif" "positif" "positif" "negatif"
[8] "positif" "negatif" "negatif" "negatif" "positif" "negatif" "negatif"
[15] "negatif" "negatif" "negatif" "negatif" "negatif" "negatif" "negatif"
[22] "negatif" "negatif" "negatif" "positif" "positif" "negatif" "negatif"
[29] "positif" "positif" "negatif" "negatif" "negatif" "negatif" "negatif"
[36] "positif" "negatif" "negatif" "positif" "negatif" "positif" "negatif"
[43] "positif" "negatif" "positif" "positif" "negatif" "positif" "negatif"
[50] "positif" "negatif" "negatif" "negatif" "negatif" "negatif" "negatif"
[57] "negatif" "negatif" "negatif" "positif" "negatif" "negatif" "positif"
[64] "positif" "negatif" "positif" "negatif" "negatif" "positif" "negatif"
[71] "negatif" "negatif" "negatif" "negatif" "negatif"
$Prob
negatif positif
[1,] 1.00154507 -0.0015450745
[2,] -0.04382121 1.0438212062
[3,] 0.15434486 0.8456551429
[4,] 0.90091255 0.0990874512
[5,] -0.01638242 1.0163824152
[6,] -0.01690106 1.0169010600
[7,] 0.81826853 0.1817314683
[8,] 0.44026871 0.5597312871
[9,] 1.00271923 -0.0027192346
[10,] 0.97256267 0.0274373255
[11,] 0.99912319 0.0008768104
[12,] 0.08722628 0.9127737158
[13,] 0.74966874 0.2503312641
[14,] 0.99257122 0.0074287785
[15,] 0.96949129 0.0305087136
[16,] 0.85954431 0.1404556941
[17,] 0.98031247 0.0196875314
[18,] 0.90954426 0.0904557440
[19,] 1.01159987 -0.0115998736
[20,] 1.01219351 -0.0121935133
[21,] 0.84465421 0.1553457920
[22,] 0.91948677 0.0805132308
[23,] 0.87502350 0.1249764977
[24,] 1.01244526 -0.0124452622
[25,] 0.37195543 0.6280445651
[26,] -0.03976347 1.0397634697
[27,] 0.93550456 0.0644954388
[28,] 0.70593566 0.2940643421
[29,] 0.16263064 0.8373693607
[30,] -0.10987017 1.1098701736
[31,] 0.83712560 0.1628744014
[32,] 0.93883629 0.0611637069
[33,] 0.98622358 0.0137764204
[34,] 0.96902890 0.0309710958
[35,] 0.74592644 0.2540735614
[36,] 0.12494026 0.8750597443
[37,] 0.58845052 0.4115494755
[38,] 0.95204114 0.0479588551
[39,] 0.23653296 0.7634670375
[40,] 1.01233017 -0.0123301720
[41,] 0.33199241 0.6680075853
[42,] 0.99507654 0.0049234647
[43,] -0.01637771 1.0163777104
[44,] 0.90040899 0.0995910059
[45,] 0.03615177 0.9638482341
[46,] 0.03197847 0.9680215341
[47,] 0.92364019 0.0763598122
[48,] 0.01908133 0.9809186730
[49,] 0.92182592 0.0781740837
[50,] 0.10130621 0.8986937855
[51,] 1.00314294 -0.0031429360
[52,] 0.97278976 0.0272102447
[53,] 0.76563576 0.2343642391
[54,] 0.94776350 0.0522365039
[55,] 0.86088772 0.1391122774
[56,] 0.63806219 0.3619378147
[57,] 0.83158518 0.1684148177
[58,] 0.52611805 0.4738819468
[59,] 0.97711055 0.0228894489
[60,] -0.07255244 1.0725524359
[61,] 1.00631785 -0.0063178519
[62,] 0.78037033 0.2196296735
[63,] 0.21164314 0.7883568648
[64,] 0.01457321 0.9854267891
[65,] 1.00865833 -0.0086583277
[66,] 0.35111770 0.6488822958
[67,] 0.91193400 0.0880660046
[68,] 0.90284398 0.0971560181
[69,] 0.30071516 0.6992848429
[70,] 0.98798550 0.0120145043
[71,] 1.00131155 -0.0013115545
[72,] 0.70514409 0.2948559066
[73,] 0.69715038 0.3028496153
[74,] 1.00909475 -0.0090947457
[75,] 0.88832274 0.1116772601
$M_table
Class
Y negatif positif
negatif 47 3
positif 5 20
$Err
[1] 0.1066667
$Auc
[1] 0.7456
plot(hema,R$Prob[,2])
Classif_NP(hema,test,c(37,42,58,57))
$Class
[1] "negatif" "positif" "positif" "negatif" "positif" "positif" "negatif"
[8] "positif" "negatif" "negatif" "negatif" "positif" "negatif" "negatif"
[15] "negatif" "negatif" "negatif" "negatif" "negatif" "negatif" "negatif"
[22] "negatif" "negatif" "negatif" "positif" "positif" "negatif" "negatif"
[29] "positif" "positif" "negatif" "negatif" "negatif" "negatif" "negatif"
[36] "positif" "negatif" "negatif" "positif" "negatif" "positif" "negatif"
[43] "positif" "negatif" "positif" "positif" "negatif" "positif" "negatif"
[50] "positif" "negatif" "negatif" "negatif" "negatif" "negatif" "negatif"
[57] "negatif" "negatif" "negatif" "positif" "negatif" "negatif" "positif"
[64] "positif" "negatif" "positif" "negatif" "negatif" "positif" "negatif"
[71] "negatif" "negatif" "negatif" "negatif" "negatif"
$Prob
negatif positif
[1,] 1.00154507 -0.0015450745
[2,] -0.04382121 1.0438212062
[3,] 0.15434486 0.8456551429
[4,] 0.90091255 0.0990874512
[5,] -0.01638242 1.0163824152
[6,] -0.01690106 1.0169010600
[7,] 0.81826853 0.1817314683
[8,] 0.44026871 0.5597312871
[9,] 1.00271923 -0.0027192346
[10,] 0.97256267 0.0274373255
[11,] 0.99912319 0.0008768104
[12,] 0.08722628 0.9127737158
[13,] 0.74966874 0.2503312641
[14,] 0.99257122 0.0074287785
[15,] 0.96949129 0.0305087136
[16,] 0.85954431 0.1404556941
[17,] 0.98031247 0.0196875314
[18,] 0.90954426 0.0904557440
[19,] 1.01159987 -0.0115998736
[20,] 1.01219351 -0.0121935133
[21,] 0.84465421 0.1553457920
[22,] 0.91948677 0.0805132308
[23,] 0.87502350 0.1249764977
[24,] 1.01244526 -0.0124452622
[25,] 0.37195543 0.6280445651
[26,] -0.03976347 1.0397634697
[27,] 0.93550456 0.0644954388
[28,] 0.70593566 0.2940643421
[29,] 0.16263064 0.8373693607
[30,] -0.10987017 1.1098701736
[31,] 0.83712560 0.1628744014
[32,] 0.93883629 0.0611637069
[33,] 0.98622358 0.0137764204
[34,] 0.96902890 0.0309710958
[35,] 0.74592644 0.2540735614
[36,] 0.12494026 0.8750597443
[37,] 0.58845052 0.4115494755
[38,] 0.95204114 0.0479588551
[39,] 0.23653296 0.7634670375
[40,] 1.01233017 -0.0123301720
[41,] 0.33199241 0.6680075853
[42,] 0.99507654 0.0049234647
[43,] -0.01637771 1.0163777104
[44,] 0.90040899 0.0995910059
[45,] 0.03615177 0.9638482341
[46,] 0.03197847 0.9680215341
[47,] 0.92364019 0.0763598122
[48,] 0.01908133 0.9809186730
[49,] 0.92182592 0.0781740837
[50,] 0.10130621 0.8986937855
[51,] 1.00314294 -0.0031429360
[52,] 0.97278976 0.0272102447
[53,] 0.76563576 0.2343642391
[54,] 0.94776350 0.0522365039
[55,] 0.86088772 0.1391122774
[56,] 0.63806219 0.3619378147
[57,] 0.83158518 0.1684148177
[58,] 0.52611805 0.4738819468
[59,] 0.97711055 0.0228894489
[60,] -0.07255244 1.0725524359
[61,] 1.00631785 -0.0063178519
[62,] 0.78037033 0.2196296735
[63,] 0.21164314 0.7883568648
[64,] 0.01457321 0.9854267891
[65,] 1.00865833 -0.0086583277
[66,] 0.35111770 0.6488822958
[67,] 0.91193400 0.0880660046
[68,] 0.90284398 0.0971560181
[69,] 0.30071516 0.6992848429
[70,] 0.98798550 0.0120145043
[71,] 1.00131155 -0.0013115545
[72,] 0.70514409 0.2948559066
[73,] 0.69715038 0.3028496153
[74,] 1.00909475 -0.0090947457
[75,] 0.88832274 0.1116772601
$M_table
Class
Y negatif positif
negatif 47 3
positif 5 20
$Err
[1] 0.1066667
$Class0
[1] "negatif" "negatif" "positif" "positif"
$Prob0
[,1] [,2]
[1,] 1.00181472 -0.001814722
[2,] 1.01045534 -0.010455342
[3,] -0.04880557 1.048805567
[4,] -0.01445100 1.014450998
$Auc
[1] 0.7456
load('Randonnee.RData')
library(knitr)
library(kableExtra)
kable(cbind(long,lat,alti),'html',caption="Data given in Randonnee.RData") %>%
kable_styling() %>%
scroll_box(width = "100%", height='7cm')
long | lat | alti |
---|---|---|
99.0624679 | 53.3076627 | 1503.552 |
-6.8700422 | 28.2190488 | 1565.064 |
-3.9917884 | 92.2394410 | 1523.288 |
31.8887100 | -9.1710346 | 1495.187 |
20.8955966 | 25.1860652 | 1612.269 |
15.3682732 | 80.5044866 | 1511.161 |
11.4690357 | 88.5174201 | 1503.051 |
79.8319344 | 102.4300348 | 1535.229 |
56.0075673 | 107.6692620 | 1503.432 |
29.4260498 | 16.7352382 | 1573.805 |
-4.3391908 | 102.0056042 | 1478.104 |
72.7352515 | 32.1038612 | 1557.071 |
80.2107291 | 0.1943822 | 1509.054 |
13.0087572 | 93.5295195 | 1519.488 |
17.4792537 | -7.7909738 | 1517.155 |
35.5514487 | 5.6133995 | 1532.295 |
13.5712102 | 25.8517935 | 1614.147 |
2.0481796 | -7.4330306 | 1508.660 |
22.9454398 | 27.9979667 | 1606.543 |
36.9931913 | 31.7602686 | 1551.006 |
17.0726327 | 80.8224924 | 1498.533 |
97.9897970 | 77.7829623 | 1547.869 |
100.2870167 | 61.8244704 | 1510.328 |
105.5622748 | 106.6027542 | 1507.775 |
21.9907795 | 12.9867447 | 1559.080 |
46.8235776 | 29.8245296 | 1565.675 |
45.4129347 | 61.9143907 | 1490.260 |
34.3569008 | 93.7161598 | 1505.586 |
35.6348959 | -3.0641687 | 1517.431 |
42.1465047 | 90.6220438 | 1496.808 |
54.9246438 | 71.5075653 | 1522.724 |
-0.6032085 | 107.4425999 | 1503.936 |
38.9608041 | 79.0489369 | 1504.162 |
107.5811816 | 23.9735269 | 1519.739 |
52.8454450 | 100.7978796 | 1512.662 |
50.1340882 | -0.9997788 | 1531.310 |
7.7555722 | 32.0321676 | 1648.128 |
-5.8852304 | 3.6869886 | 1522.290 |
105.2549906 | 78.7011588 | 1518.597 |
47.1467618 | 66.8373312 | 1522.303 |
-4.6626037 | 92.5977188 | 1479.269 |
8.7576174 | 34.3697844 | 1641.704 |
52.2500823 | 13.6763844 | 1536.718 |
-7.6250490 | -0.9289558 | 1501.063 |
36.5993508 | 54.0141936 | 1499.579 |
92.0424269 | 84.5729018 | 1555.299 |
103.4379530 | 33.0318845 | 1510.353 |
50.7240071 | 2.2425787 | 1509.026 |
97.4716941 | 77.9736253 | 1515.126 |
73.7016949 | 99.7308117 | 1536.054 |
93.0219061 | 67.1765613 | 1547.277 |
65.6176874 | 98.6371979 | 1526.580 |
-1.3699311 | 73.6878124 | 1495.551 |
52.3760663 | 78.5963519 | 1530.826 |
84.7114132 | 42.2642248 | 1495.388 |
107.5771911 | 48.0871036 | 1506.657 |
64.4604991 | 27.6326412 | 1591.445 |
59.9686836 | 51.0937702 | 1505.332 |
8.4859424 | 75.3498196 | 1493.466 |
60.8453952 | 81.6616607 | 1542.620 |
32.6244900 | 26.3138900 | 1555.251 |
109.0847392 | 29.1766504 | 1516.603 |
72.0450547 | 19.7782183 | 1586.880 |
62.9426072 | 27.6257777 | 1591.079 |
49.7946765 | 51.4461836 | 1512.832 |
-3.7844638 | 108.6813870 | 1496.539 |
14.6025297 | 102.8902643 | 1511.974 |
68.9487006 | 98.7188453 | 1524.002 |
19.4557040 | 103.9766379 | 1498.990 |
23.4424702 | 91.6052843 | 1507.005 |
96.9559850 | 17.2084007 | 1540.036 |
17.4085176 | 16.5421165 | 1569.178 |
7.6359436 | 93.0814601 | 1499.439 |
105.9557061 | 88.8294518 | 1510.976 |
-0.6395778 | 27.0807526 | 1583.589 |
40.0386443 | 95.0945388 | 1494.300 |
108.1165074 | 91.5913831 | 1499.777 |
37.4706876 | 36.5362536 | 1548.957 |
85.6727815 | 15.5733686 | 1548.695 |
83.4007558 | 32.6764199 | 1546.176 |
34.5829461 | 80.8233364 | 1503.971 |
74.1981223 | 56.3901347 | 1552.938 |
65.7956928 | 21.5336391 | 1590.259 |
63.1200929 | 44.2666265 | 1501.393 |
54.6831032 | 75.6658447 | 1534.508 |
25.2570909 | 8.4122134 | 1540.408 |
109.0367705 | -4.0753386 | 1503.623 |
45.4518286 | 7.7297729 | 1527.511 |
77.6834594 | 45.9806080 | 1499.680 |
40.0775024 | 78.3242334 | 1501.483 |
46.8420351 | 9.7027086 | 1520.665 |
67.0032970 | 47.9183023 | 1531.010 |
62.9415996 | 28.0067210 | 1580.939 |
4.6844503 | 59.6427374 | 1497.629 |
12.4587184 | 55.0600018 | 1535.396 |
97.3825485 | 59.4960008 | 1513.649 |
0.6584921 | 90.8339782 | 1488.929 |
34.1886764 | 36.2218561 | 1548.870 |
-3.9022830 | -6.3884918 | 1494.956 |
15.7665207 | 64.0273335 | 1494.208 |
109.2956715 | 107.3541429 | 1498.673 |
103.5520772 | 6.5037241 | 1505.485 |
73.3759137 | 22.8530218 | 1614.789 |
108.1589985 | 64.7802229 | 1505.992 |
38.7293840 | 58.6801093 | 1505.095 |
26.9991154 | 52.6071416 | 1543.953 |
93.2730551 | 14.1047208 | 1539.745 |
74.6520478 | 74.5988890 | 1719.653 |
-3.8600043 | 32.2399566 | 1569.702 |
91.0284812 | 61.8065544 | 1530.437 |
39.7020352 | 44.2541240 | 1507.814 |
89.3797159 | -0.0059296 | 1513.673 |
8.9815342 | 12.6989354 | 1558.054 |
37.3474934 | 83.3984831 | 1521.442 |
82.5426909 | 72.2339545 | 1730.754 |
41.5408641 | -3.0519692 | 1507.140 |
69.1754561 | 49.5537411 | 1536.695 |
107.2183426 | 59.4791128 | 1494.653 |
95.5583194 | 36.3866017 | 1522.613 |
45.1385816 | 32.8278674 | 1529.149 |
71.4557269 | 53.9506069 | 1527.618 |
0.6201439 | 52.5379462 | 1534.111 |
86.4482111 | -1.9838803 | 1485.289 |
49.4328418 | 57.9250941 | 1511.933 |
8.9252843 | -6.5767055 | 1510.112 |
48.3882847 | 15.9442132 | 1544.139 |
101.8168975 | 85.5506530 | 1505.108 |
73.1332982 | 36.1948201 | 1537.994 |
78.2796414 | 75.6409429 | 1763.630 |
33.9245340 | 69.5515850 | 1504.757 |
64.2372093 | 101.3554996 | 1507.480 |
54.9992699 | 56.6553987 | 1510.998 |
34.5305953 | 9.1060737 | 1533.052 |
86.1025130 | 53.3294582 | 1527.883 |
17.8944715 | 18.5229879 | 1591.969 |
65.3099775 | 92.7764085 | 1540.220 |
68.3107806 | 78.4274628 | 1609.276 |
-9.1280097 | 74.9568642 | 1500.164 |
-8.4378068 | 7.7611201 | 1515.473 |
-9.4375850 | 58.2351528 | 1500.782 |
72.9383652 | 9.1428750 | 1545.672 |
3.1518398 | 44.8344709 | 1593.204 |
-4.5201100 | 82.0173986 | 1506.172 |
85.1011407 | 94.7242777 | 1540.134 |
58.4218492 | 85.5438200 | 1514.645 |
21.5381986 | 84.0624910 | 1490.193 |
79.5683257 | 107.7563988 | 1519.539 |
16.8094861 | 0.1065805 | 1518.925 |
13.7168195 | 52.0209065 | 1557.457 |
61.6293227 | 22.2596313 | 1603.621 |
68.8874839 | 72.1662832 | 1591.796 |
31.9630115 | 53.6803757 | 1522.681 |
-2.4530409 | 104.8597823 | 1496.549 |
67.5898129 | 64.8423330 | 1574.086 |
-7.8298760 | 86.5592916 | 1505.518 |
106.1900164 | 109.1123272 | 1505.576 |
30.3883853 | 70.1951086 | 1501.087 |
37.8230056 | 88.7199266 | 1506.887 |
85.1603546 | 60.2615274 | 1588.606 |
11.7026249 | 76.6645402 | 1495.859 |
-1.5148323 | 36.7445473 | 1582.416 |
80.6039050 | 108.0380414 | 1492.412 |
42.9912725 | 23.7753807 | 1542.679 |
65.6447334 | 51.4841565 | 1528.102 |
31.0267306 | 82.7948433 | 1496.188 |
15.2377335 | 60.5768411 | 1516.101 |
35.1865419 | 32.5634608 | 1557.464 |
59.5301555 | 44.2504127 | 1521.483 |
93.0327959 | 66.2506154 | 1540.498 |
19.7201658 | 20.2465274 | 1598.686 |
4.4496954 | 60.1801283 | 1517.926 |
43.5310138 | 89.3777994 | 1512.035 |
57.3573328 | 66.7790963 | 1515.571 |
96.1369763 | 99.0289473 | 1507.688 |
53.9646099 | 38.8948529 | 1522.677 |
6.4655638 | 55.8074369 | 1521.504 |
109.9032347 | 14.6825246 | 1513.087 |
57.0984624 | 80.8084841 | 1528.505 |
108.2334729 | -7.0929847 | 1508.686 |
66.5735029 | 16.4661541 | 1569.290 |
26.3783911 | 74.6614091 | 1489.932 |
95.4501844 | 92.9089064 | 1525.671 |
12.1398498 | -1.6929547 | 1516.118 |
61.3491014 | -3.2044426 | 1497.463 |
97.0415713 | 92.2378615 | 1507.057 |
67.8564440 | 57.2680875 | 1536.328 |
94.5919541 | 17.4978882 | 1538.884 |
107.7176833 | 48.5589143 | 1500.362 |
86.2763029 | 52.1070050 | 1527.315 |
109.9107141 | 85.6347466 | 1509.714 |
82.0665086 | 102.9683853 | 1507.577 |
74.3138162 | 72.2160242 | 1685.401 |
32.8671800 | 107.6644860 | 1503.280 |
74.6963442 | 26.8117389 | 1586.010 |
24.5364777 | 75.7511033 | 1495.289 |
64.5714931 | 40.3632850 | 1511.456 |
29.4825232 | 36.9292619 | 1589.894 |
42.2030134 | 95.1776168 | 1505.560 |
32.9716050 | 12.2117467 | 1553.082 |
47.3855472 | 74.9843602 | 1537.770 |
32.9537145 | 16.3360995 | 1574.133 |
19.7007396 | 86.4568328 | 1487.686 |
82.3053686 | 85.2028623 | 1698.334 |
29.7235724 | 66.4521574 | 1509.959 |
41.6190261 | 99.0089405 | 1511.693 |
0.4811028 | 77.2090162 | 1503.002 |
33.8853365 | 31.3755255 | 1569.472 |
69.1828809 | -0.2142083 | 1484.409 |
-8.5955022 | 68.7260890 | 1498.053 |
83.8354765 | 80.6439016 | 1710.081 |
38.8785215 | 81.4825360 | 1509.792 |
101.6520975 | 95.0324957 | 1500.674 |
38.2653881 | 15.0770843 | 1554.486 |
92.0097578 | 79.4081350 | 1580.155 |
48.5082542 | 7.1348003 | 1499.173 |
37.4501140 | 0.8510836 | 1508.732 |
97.5671286 | 36.7153758 | 1514.219 |
32.2246511 | 47.0856551 | 1547.203 |
27.6389411 | -4.2890589 | 1510.215 |
-9.0511564 | 93.6098017 | 1493.465 |
77.5580233 | 39.0210219 | 1550.051 |
94.9180675 | 40.9643174 | 1498.312 |
58.0855414 | 86.3287291 | 1528.816 |
41.4742304 | 85.9292547 | 1518.739 |
-4.9285801 | 19.5426376 | 1537.191 |
12.5128935 | 54.1810474 | 1522.310 |
98.8614764 | 29.0301088 | 1512.817 |
-2.2395305 | 13.0767943 | 1530.683 |
-7.8879634 | 96.4831019 | 1502.368 |
106.0577077 | 29.4490712 | 1521.374 |
56.8031475 | 103.1553699 | 1519.785 |
3.2581064 | 16.1983463 | 1553.751 |
55.3891077 | 81.7767231 | 1514.695 |
109.5003712 | 34.9192071 | 1507.356 |
103.9531725 | 2.6824025 | 1515.738 |
90.9163884 | 64.6651965 | 1565.437 |
17.5675969 | 40.4335865 | 1605.220 |
56.6706462 | 53.7304172 | 1530.561 |
103.7406261 | 108.0123845 | 1510.335 |
83.7976525 | 10.6557069 | 1517.980 |
36.3899025 | 58.6566456 | 1503.187 |
41.1020887 | 37.8390720 | 1530.025 |
-8.1780918 | 100.2869428 | 1519.485 |
11.0855115 | 82.6831211 | 1510.155 |
78.5711419 | 67.2791163 | 1704.259 |
10.5236860 | 19.0974620 | 1572.168 |
87.2162916 | 51.8274711 | 1526.432 |
88.9293157 | 85.9404894 | 1574.387 |
10.9053526 | 68.4225406 | 1505.185 |
9.4655780 | 81.5465188 | 1491.694 |
49.8283107 | 108.2866624 | 1508.596 |
-9.5947153 | 92.2925736 | 1499.622 |
84.1258040 | 96.2783463 | 1542.454 |
43.7018672 | 44.9876156 | 1516.929 |
39.2791223 | 80.1971819 | 1513.394 |
34.7361794 | 19.1695937 | 1561.411 |
59.4643598 | 34.0714970 | 1570.119 |
54.2132162 | -0.7885425 | 1527.899 |
95.8608577 | 69.9028278 | 1516.370 |
79.3937709 | 8.9799858 | 1507.082 |
-3.4154369 | 74.7767093 | 1503.140 |
102.3425618 | 18.2671573 | 1507.488 |
42.2433895 | 75.0631941 | 1512.044 |
80.3533921 | 12.8802847 | 1550.120 |
-2.8782635 | -1.4969069 | 1519.303 |
95.1028680 | 15.3441908 | 1528.015 |
53.0508449 | 67.9876337 | 1532.086 |
75.5722174 | 3.9519753 | 1502.610 |
92.1211206 | 17.0929100 | 1532.950 |
66.5940911 | 61.7554280 | 1547.222 |
68.3343318 | 24.4713866 | 1617.225 |
89.6319729 | 92.8701326 | 1535.141 |
100.8202775 | 87.0977263 | 1507.188 |
21.3615679 | 33.1135690 | 1628.742 |
88.6541885 | 39.6670205 | 1526.841 |
108.5622098 | 74.8217273 | 1485.196 |
56.0671732 | 2.7101703 | 1484.626 |
69.1604764 | 34.1565359 | 1550.667 |
63.3533009 | 91.0166136 | 1529.457 |
-1.9578196 | 98.7146702 | 1502.354 |
81.3425835 | 21.5284310 | 1589.093 |
44.2450981 | 57.6431321 | 1514.573 |
88.3049812 | -8.4029509 | 1487.451 |
98.2021578 | 45.7207011 | 1481.716 |
11.6492440 | 64.9348700 | 1507.737 |
5.7902776 | 24.8026485 | 1599.414 |
82.7875774 | 59.4499204 | 1592.806 |
74.1220887 | 2.9185357 | 1502.794 |
32.0810399 | 57.3191678 | 1507.491 |
19.2687937 | -0.0460234 | 1525.650 |
24.6226843 | 23.3138199 | 1599.437 |
66.6141810 | 17.6189217 | 1577.792 |
35.0466244 | 5.9158673 | 1538.361 |
61.7549079 | 39.3237693 | 1528.888 |
89.1798913 | 57.3417052 | 1548.715 |
68.3177293 | 3.7032293 | 1525.710 |
30.9856599 | 101.5114146 | 1496.134 |
-5.1436744 | 21.6430878 | 1556.844 |
-6.5555396 | 62.7678419 | 1513.506 |
9.8439612 | 49.0257246 | 1556.732 |
31.3041346 | 79.5129649 | 1504.517 |
2.0242539 | 11.9979869 | 1539.037 |
84.7063420 | 13.1007343 | 1557.251 |
29.3631448 | -8.1769586 | 1503.023 |
67.0596468 | 54.3304944 | 1524.949 |
93.6544835 | 20.8858303 | 1522.962 |
95.7701951 | 109.8266496 | 1501.593 |
20.8136802 | 9.6575482 | 1552.333 |
89.8029411 | -3.7470808 | 1498.926 |
89.6190648 | 104.9048206 | 1507.858 |
18.5791090 | 94.9981342 | 1514.110 |
68.5679271 | 26.6114738 | 1588.468 |
-2.7874155 | 52.1470068 | 1542.644 |
45.4848944 | -1.7112577 | 1523.542 |
55.9512635 | 7.8452460 | 1506.643 |
48.4728493 | 103.3569716 | 1502.020 |
15.6096432 | 72.9310911 | 1503.216 |
77.1553687 | 32.2952721 | 1586.116 |
101.7259463 | 90.9876603 | 1532.920 |
91.0791836 | 50.7894269 | 1493.142 |
68.2606661 | 82.4318992 | 1604.794 |
6.7598813 | 96.3158633 | 1492.373 |
107.6118367 | -0.7938033 | 1497.330 |
36.7417797 | 67.2738524 | 1499.485 |
11.3383602 | 11.4739348 | 1546.386 |
56.3474607 | 61.0645675 | 1523.533 |
49.9432404 | 44.6066225 | 1512.207 |
12.7349585 | 100.0015666 | 1500.048 |
34.2485292 | 90.7136576 | 1509.930 |
-1.1026503 | 29.0980841 | 1590.837 |
88.5980615 | 23.1009244 | 1552.859 |
93.1525725 | 13.3031028 | 1534.580 |
92.8625568 | 90.9390584 | 1524.078 |
58.3477348 | 1.2338420 | 1519.394 |
50.7536376 | 95.3763274 | 1498.563 |
72.8735053 | 21.9861983 | 1617.793 |
89.6411641 | 69.1690246 | 1581.261 |
25.1111707 | 56.3215340 | 1496.755 |
1.5955143 | 86.9238931 | 1492.788 |
65.3676301 | 32.1324569 | 1570.713 |
61.0056921 | 5.5268771 | 1504.325 |
80.1208224 | 6.9373866 | 1519.488 |
13.9519384 | -3.6889649 | 1519.616 |
91.4471487 | 90.8848947 | 1531.403 |
35.9693894 | -0.9320418 | 1509.471 |
91.3207917 | 96.3104715 | 1513.320 |
51.3916477 | 43.7737725 | 1498.221 |
101.1608726 | -2.4532839 | 1502.029 |
18.7078511 | 64.4911674 | 1508.166 |
50.1847349 | 97.4818228 | 1508.136 |
-6.5284906 | 65.4102213 | 1495.612 |
22.9682395 | 78.3188111 | 1504.898 |
-1.6257895 | 22.5323168 | 1557.789 |
37.6809647 | 86.0740078 | 1505.012 |
44.5150829 | -8.0919361 | 1492.447 |
98.6483570 | 37.1308551 | 1508.043 |
-3.8899612 | 63.9407826 | 1510.358 |
76.6220504 | 50.6024568 | 1542.991 |
34.5582256 | 6.0812392 | 1536.248 |
61.5165984 | 100.7611068 | 1505.314 |
1.4280745 | -4.7681216 | 1501.668 |
9.9571036 | 108.8500554 | 1515.744 |
58.1042503 | 53.5467778 | 1530.612 |
97.6922949 | 21.7455534 | 1522.981 |
57.5637215 | 17.2815492 | 1557.595 |
73.2940064 | 86.8986099 | 1636.235 |
42.2275999 | 108.1269370 | 1513.672 |
79.2007804 | 26.6891237 | 1589.146 |
25.0378917 | -7.3017076 | 1501.583 |
104.9990713 | 30.7436422 | 1517.994 |
60.1676537 | 38.1424053 | 1528.900 |
28.1064449 | 103.5866842 | 1501.682 |
46.2679811 | 45.6117505 | 1503.142 |
0.1100654 | 80.1602309 | 1481.154 |
9.7661822 | 20.6649067 | 1583.420 |
15.3392095 | 8.7165088 | 1546.996 |
86.2068609 | 24.2161395 | 1568.255 |
95.9371390 | 87.4804205 | 1505.517 |
71.5423485 | 75.8261241 | 1665.263 |
85.0595993 | 1.4235339 | 1522.612 |
62.7279188 | 35.0926339 | 1524.308 |
0.3784555 | 84.3519681 | 1500.357 |
109.1100813 | -6.7318973 | 1504.116 |
10.1067326 | 8.1089910 | 1523.197 |
14.6993165 | 74.4807939 | 1505.789 |
19.4556634 | 5.5710324 | 1528.607 |
53.8164148 | 40.7650830 | 1529.546 |
2.2608089 | -5.2131376 | 1515.577 |
69.6403102 | 95.0177066 | 1558.416 |
9.2092220 | 49.7994056 | 1572.438 |
67.8009084 | 44.0925053 | 1514.321 |
84.3480678 | 103.9824313 | 1536.524 |
61.4812275 | 62.2715877 | 1520.285 |
31.4714729 | -8.9967737 | 1488.522 |
48.6934323 | 10.0242483 | 1527.049 |
35.9006849 | 109.7142090 | 1502.821 |
93.0243156 | -7.7038707 | 1499.589 |
82.1181352 | 29.1630460 | 1561.115 |
55.5579532 | 34.3340920 | 1528.703 |
50.3538442 | 61.4322998 | 1517.249 |
65.3020222 | 6.8285881 | 1509.494 |
81.8677705 | 72.0445888 | 1734.414 |
72.2282170 | 95.8624118 | 1550.477 |
97.0193113 | 15.6787341 | 1499.625 |
41.4619855 | 24.8536569 | 1563.564 |
55.1013032 | 98.4986717 | 1535.853 |
40.0393734 | 13.6109056 | 1530.488 |
-4.0411751 | -4.9687549 | 1517.070 |
26.2773770 | 47.8202663 | 1556.910 |
81.9814880 | 66.8288662 | 1680.413 |
24.8396541 | 77.3473062 | 1506.856 |
78.8747559 | -3.0755225 | 1484.717 |
26.6468710 | 67.3051887 | 1496.684 |
-1.6635758 | 91.2182948 | 1504.817 |
92.6802950 | 96.2200029 | 1513.405 |
-2.7809562 | 13.8046413 | 1512.486 |
39.1102889 | 2.5987683 | 1502.162 |
45.8712546 | 48.8523444 | 1499.620 |
89.2561780 | 20.1589514 | 1569.780 |
-7.0787753 | 73.4842318 | 1504.415 |
57.8336239 | 78.1569571 | 1525.848 |
74.3947873 | 94.5381534 | 1551.992 |
9.4485060 | 94.2673116 | 1496.655 |
38.3881533 | 55.6955801 | 1504.315 |
-6.6127293 | 40.7847500 | 1559.043 |
73.7611126 | 103.1028591 | 1527.117 |
17.9282801 | 5.0622493 | 1536.839 |
53.3900396 | 20.3366755 | 1574.441 |
45.4418399 | 73.3952099 | 1509.247 |
12.7222182 | 43.8270357 | 1616.504 |
77.6123913 | 64.6587233 | 1646.669 |
67.8873437 | 81.9926225 | 1582.662 |
37.6814209 | 38.7582247 | 1542.598 |
32.4833796 | 57.4068465 | 1516.853 |
10.6336045 | 41.3802584 | 1618.410 |
78.1553770 | 40.5129908 | 1527.939 |
103.1758267 | 75.3131802 | 1492.229 |
-0.0172123 | 93.9257011 | 1509.484 |
41.0459263 | 99.9712554 | 1523.821 |
109.0236550 | 96.4530513 | 1507.679 |
21.8874990 | 101.9317074 | 1497.830 |
81.7513145 | -2.9231695 | 1500.372 |
99.3454042 | 88.8938747 | 1501.742 |
94.9134218 | 57.8260363 | 1527.223 |
84.0662564 | 91.0539421 | 1610.124 |
109.9569393 | 69.4030961 | 1505.429 |
7.6394863 | 83.2413842 | 1507.087 |
34.0495309 | 100.4868590 | 1486.270 |
28.4740328 | -9.7608886 | 1504.520 |
88.9875232 | -6.1349549 | 1504.028 |
43.4492684 | 23.5005026 | 1550.172 |
96.5806337 | 25.7377867 | 1543.924 |
21.5609224 | 86.1965763 | 1496.312 |
87.7217712 | 74.6415199 | 1643.099 |
15.9316886 | 7.5897939 | 1547.601 |
20.5556190 | 24.5636674 | 1609.879 |
80.5813421 | 4.1914909 | 1514.706 |
88.6072139 | 46.5673720 | 1496.160 |
20.1747839 | 102.8409574 | 1491.287 |
-5.0209855 | 56.1619027 | 1500.766 |
90.0875346 | 69.9890892 | 1577.988 |
27.6607426 | 23.9461600 | 1577.866 |
6.9553972 | 65.0182821 | 1516.048 |
34.1058517 | 92.7306515 | 1488.535 |
89.2058332 | 11.1390503 | 1520.170 |
61.5390024 | 90.9235002 | 1517.024 |
59.4918529 | 36.4783816 | 1538.244 |
31.9344792 | 16.2839703 | 1560.817 |
4.7098450 | 49.7922594 | 1562.877 |
53.3536919 | -1.2171213 | 1496.778 |
42.9603342 | 100.3160730 | 1504.924 |
16.9773379 | -9.3945445 | 1517.950 |
63.2456879 | 49.9425281 | 1524.966 |
6.0022059 | 100.6571232 | 1503.972 |
33.3386119 | 5.3754525 | 1514.375 |
97.9236387 | 94.2334175 | 1513.962 |
49.4804126 | 35.4222640 | 1526.181 |
52.8546455 | 27.6743391 | 1571.022 |
-5.0341750 | 68.9563156 | 1496.880 |
95.7745047 | 82.8733316 | 1502.377 |
82.7481690 | 107.2289593 | 1503.532 |
42.5722881 | 55.3928997 | 1514.392 |
51.5053520 | -4.3927146 | 1483.801 |
54.6841293 | 86.4875838 | 1534.524 |
70.0131909 | 108.8814492 | 1511.883 |
76.0295058 | 29.9439572 | 1587.988 |
85.6484853 | 26.1797903 | 1551.291 |
97.6697392 | 59.9019719 | 1523.718 |
20.8957098 | 0.5052321 | 1519.248 |
88.4729862 | 66.2547608 | 1590.906 |
20.1538164 | 77.0560286 | 1511.153 |
44.8187165 | 51.1533860 | 1518.425 |
68.0309262 | -3.8631841 | 1498.178 |
58.6289978 | 82.8772579 | 1525.467 |
25.7139866 | -3.9269104 | 1522.011 |
21.3003076 | 65.2011733 | 1497.728 |
62.4479316 | 79.3138901 | 1543.781 |
59.2408683 | 79.0786884 | 1548.860 |
63.8441639 | 101.3626718 | 1527.701 |
39.5953519 | 5.5493366 | 1525.488 |
58.4931732 | 23.2969240 | 1605.282 |
60.1602868 | 1.9332860 | 1503.454 |
1.9114440 | -5.7792464 | 1499.739 |
96.2715323 | 46.9828447 | 1494.714 |
39.0903538 | 104.7204956 | 1488.624 |
101.6557304 | 46.8468954 | 1492.734 |
105.0268924 | 37.1232838 | 1502.136 |
71.3148199 | 53.0517592 | 1534.068 |
24.2461192 | 61.9676012 | 1493.115 |
98.1935393 | 82.2192951 | 1544.857 |
39.4108665 | 22.5839583 | 1551.701 |
3.8308710 | -9.3380757 | 1513.457 |
14.3746430 | 27.5417999 | 1624.842 |
23.9683261 | 31.4856350 | 1597.986 |
70.1198113 | 109.7017905 | 1503.093 |
70.8996247 | 87.4279395 | 1612.542 |
96.7478936 | 11.5451906 | 1509.057 |
98.3181060 | 100.3779429 | 1513.373 |
42.5515716 | 23.9775342 | 1559.335 |
96.5981999 | 95.1618410 | 1516.647 |
41.9065579 | 16.1645199 | 1546.356 |
90.8175866 | 94.2590608 | 1538.461 |
33.8620463 | 107.5285164 | 1501.119 |
6.4959357 | 61.2849425 | 1508.263 |
92.4394683 | 77.7916470 | 1575.389 |
25.2709471 | 106.8835163 | 1491.833 |
20.8148430 | 27.7309045 | 1607.803 |
59.1652409 | 54.7402013 | 1518.248 |
26.7529195 | 12.0087846 | 1549.326 |
17.4847575 | 60.2774071 | 1508.729 |
95.9102697 | 71.3994509 | 1516.467 |
6.6121522 | 16.3705873 | 1573.759 |
36.5492713 | 100.5967313 | 1504.574 |
65.4099205 | 37.2396430 | 1546.641 |
-4.3319132 | 78.5934965 | 1510.538 |
22.2900263 | 21.6663253 | 1598.827 |
76.1188283 | 95.5553568 | 1582.666 |
62.2883352 | 22.4480367 | 1588.908 |
38.0815533 | 64.5434269 | 1499.889 |
106.6215866 | 23.3670923 | 1513.965 |
94.0356429 | 79.3797497 | 1545.206 |
66.5149667 | 63.9329539 | 1556.575 |
17.1773796 | 25.5647566 | 1611.464 |
77.2371694 | 37.3248484 | 1527.759 |
75.1846810 | 92.0785114 | 1597.508 |
106.7595465 | 64.9866935 | 1498.387 |
52.0815477 | 18.3104257 | 1569.869 |
76.4454821 | 107.6737522 | 1528.958 |
25.2439926 | 73.0578172 | 1507.565 |
22.0903061 | 102.0663570 | 1491.416 |
9.6399116 | 14.8480610 | 1564.207 |
44.1345382 | 90.2871030 | 1500.925 |
-3.0260081 | 43.1150788 | 1582.300 |
68.8843565 | 45.6053514 | 1517.184 |
15.9653080 | 75.8507296 | 1492.573 |
65.5993460 | -7.6848511 | 1491.313 |
26.7613682 | 16.0195621 | 1553.978 |
83.1410134 | -4.2698598 | 1501.872 |
21.2695025 | 37.2339109 | 1630.816 |
70.8444498 | 103.2475674 | 1516.087 |
103.9555270 | 65.6625380 | 1515.712 |
99.1280174 | 15.2649251 | 1505.829 |
2.7211394 | 79.0324802 | 1492.696 |
91.0219634 | 45.3216888 | 1500.330 |
22.3289261 | 85.5976951 | 1496.914 |
-1.9080134 | 81.3258636 | 1488.583 |
-5.7198598 | 72.1514194 | 1500.639 |
24.6421009 | 42.2301986 | 1590.441 |
30.2130708 | 53.5880265 | 1502.603 |
21.0951454 | 15.7748851 | 1562.054 |
83.8048238 | 53.9080075 | 1547.451 |
35.4662511 | -5.5924372 | 1509.002 |
84.4450979 | 31.4639356 | 1568.735 |
99.9266260 | 67.1487860 | 1518.432 |
46.1240466 | 15.5221679 | 1537.757 |
104.7312447 | 12.7352638 | 1515.059 |
19.2201695 | 69.6486299 | 1483.650 |
96.3367378 | 51.8959265 | 1510.629 |
102.1819532 | 50.7179467 | 1495.384 |
88.9371270 | 83.3708389 | 1606.308 |
46.7660689 | 69.0966059 | 1527.524 |
15.9327528 | 92.6088435 | 1489.406 |
77.0372848 | 32.9504997 | 1557.608 |
58.5118181 | 77.3999484 | 1541.762 |
56.9811699 | 53.7657752 | 1500.824 |
64.9082567 | -3.2443351 | 1505.016 |
38.2952744 | 94.5939577 | 1503.812 |
70.8722402 | 29.5239710 | 1598.947 |
72.0497879 | 89.0016703 | 1617.575 |
101.9334932 | 37.5686142 | 1495.838 |
109.8826672 | -2.9751045 | 1515.478 |
57.1366042 | 8.0383951 | 1517.843 |
12.3675564 | 84.2546153 | 1505.852 |
61.6454857 | 75.0934569 | 1543.400 |
49.7533377 | 57.2697188 | 1511.848 |
71.3972383 | 86.9312615 | 1603.218 |
45.5282608 | 65.5898775 | 1518.721 |
29.3858278 | 82.2726451 | 1506.451 |
22.8850113 | 64.8671459 | 1498.133 |
45.0699241 | 24.3136256 | 1569.493 |
65.9014948 | 5.8046858 | 1515.366 |
65.9690510 | 108.6768581 | 1525.685 |
10.7519959 | 86.2096313 | 1515.906 |
39.5647257 | 105.8044188 | 1501.214 |
20.8036622 | 47.6712879 | 1575.772 |
41.9689670 | 9.5280402 | 1520.868 |
79.5410652 | 41.5403248 | 1506.560 |
26.7119340 | 62.2381561 | 1491.121 |
87.0795461 | 107.8822616 | 1508.976 |
93.8081755 | 19.4210003 | 1555.853 |
91.5327959 | 29.0430997 | 1538.999 |
2.7030215 | 18.1596115 | 1567.092 |
100.7991998 | 64.0284909 | 1494.948 |
39.8600721 | 31.5285579 | 1542.262 |
44.1923083 | 55.2737889 | 1502.390 |
98.2832157 | 63.1467524 | 1517.818 |
-3.5277962 | 87.7756079 | 1495.102 |
41.0449207 | -2.0839615 | 1503.252 |
0.6568744 | -4.1652459 | 1498.135 |
103.1660420 | 10.9878643 | 1513.850 |
60.3913352 | 62.6394496 | 1521.664 |
49.3752225 | 28.5603801 | 1532.891 |
81.8617240 | 30.3232149 | 1565.344 |
11.6491422 | 85.2267429 | 1500.841 |
5.1677096 | 43.6305394 | 1595.774 |
-3.8947262 | 106.8776215 | 1509.734 |
66.4253047 | 105.6924791 | 1510.245 |
56.4852030 | 20.1350372 | 1581.431 |
77.0702226 | 51.6014682 | 1533.997 |
-7.2689226 | 88.2718319 | 1511.910 |
71.2353067 | -5.4746269 | 1501.750 |
31.1148323 | 78.4540884 | 1524.668 |
102.6538761 | 11.7575106 | 1511.470 |
109.5170704 | 51.2340740 | 1499.621 |
42.0342141 | 33.1769020 | 1531.316 |
72.8749498 | -6.4039993 | 1497.093 |
99.1043105 | 68.9134452 | 1511.582 |
91.2494428 | 81.2928025 | 1556.601 |
104.7505704 | 105.2019218 | 1498.370 |
39.5326475 | 63.9007516 | 1506.227 |
-9.0300590 | 20.7906424 | 1514.349 |
39.6397507 | -7.8658967 | 1488.148 |
108.9315627 | 74.0586210 | 1526.546 |
54.6685656 | -9.5554745 | 1478.984 |
48.0970196 | 2.8355904 | 1500.376 |
28.1432877 | 67.9735098 | 1498.586 |
22.2442323 | -5.0874047 | 1532.196 |
52.6888546 | 21.3399248 | 1556.263 |
74.2323138 | 94.1292340 | 1577.932 |
-4.5672527 | 78.0267880 | 1510.912 |
41.2776241 | 17.1847075 | 1546.162 |
106.8536079 | 64.6798782 | 1500.722 |
94.3077619 | 103.2697906 | 1513.920 |
53.5451763 | 40.5375573 | 1517.424 |
95.4244042 | -7.5664026 | 1505.027 |
-4.5798137 | -0.3586452 | 1508.937 |
89.6624644 | 99.3101567 | 1518.012 |
32.9560425 | 80.4317858 | 1512.044 |
-1.7280515 | 19.3834340 | 1536.298 |
104.1473564 | 92.3759427 | 1500.146 |
13.8418379 | -9.2615675 | 1520.128 |
10.3614003 | -6.9285953 | 1524.814 |
23.2121472 | 102.1444907 | 1489.344 |
66.9251651 | 79.4080454 | 1569.973 |
88.1680190 | 46.9285504 | 1500.956 |
14.9520326 | 13.5341534 | 1558.593 |
39.0940108 | 76.8594967 | 1495.935 |
15.9254863 | 9.6555981 | 1537.437 |
100.0806638 | 92.2636345 | 1513.651 |
-2.1782721 | 86.6995902 | 1506.084 |
54.2987261 | 2.3928148 | 1508.893 |
92.0805643 | 20.0592599 | 1534.972 |
72.5566401 | 4.6406742 | 1501.272 |
72.6624334 | 32.9188668 | 1563.051 |
89.5553570 | 77.8546149 | 1613.075 |
74.8512809 | 72.6723929 | 1713.797 |
47.9141089 | 30.5750335 | 1534.930 |
66.0321908 | 40.4925415 | 1515.921 |
37.1322801 | 98.1926650 | 1499.387 |
50.8429242 | -7.1966780 | 1506.301 |
-9.2976044 | 70.0467883 | 1496.489 |
34.1314216 | -1.0925443 | 1509.968 |
83.0657166 | 52.7460320 | 1541.175 |
60.6463539 | 87.1843896 | 1531.879 |
25.4753125 | -0.3565201 | 1537.802 |
63.9951391 | 95.0380162 | 1537.010 |
81.1837876 | 3.9920236 | 1493.454 |
74.3581717 | 43.8768766 | 1501.473 |
-3.1426746 | -0.0874256 | 1510.891 |
53.4858685 | 12.3895703 | 1527.188 |
63.1320570 | 32.2232916 | 1565.811 |
93.6906996 | 80.6107477 | 1547.330 |
41.8486828 | 89.5659122 | 1508.680 |
29.3908050 | 25.6783325 | 1591.965 |
97.1850477 | 50.6272007 | 1521.476 |
28.1837130 | -6.1465343 | 1520.011 |
37.7350850 | 42.0853067 | 1532.732 |
56.2257791 | -0.7359922 | 1513.249 |
52.2089753 | 92.6217944 | 1517.203 |
2.5385528 | 38.1774603 | 1590.946 |
96.4358540 | 42.6515159 | 1493.466 |
37.5199013 | 90.4118859 | 1508.448 |
55.8494664 | 11.3617214 | 1526.087 |
90.0510739 | 108.5290342 | 1509.947 |
81.5646677 | 27.8533115 | 1563.971 |
63.2692140 | 104.8468013 | 1512.176 |
21.9061902 | 29.5260864 | 1624.406 |
48.7354555 | 26.8942822 | 1549.051 |
65.8188237 | 83.8292525 | 1564.853 |
56.4777516 | 30.5094723 | 1561.733 |
66.6976356 | 85.4519467 | 1564.198 |
14.6818828 | 16.8287699 | 1555.984 |
68.9388928 | 51.3027483 | 1526.373 |
42.1837569 | 99.9531719 | 1490.173 |
59.7795871 | 83.3325606 | 1543.893 |
38.7832108 | 29.9532540 | 1557.967 |
1.6569089 | 48.9196271 | 1564.926 |
9.1434492 | 34.0696026 | 1627.608 |
2.9571689 | 90.6819687 | 1508.770 |
66.5165026 | 109.5292699 | 1511.177 |
70.1532239 | 42.7150412 | 1524.134 |
-8.3372651 | 14.7380619 | 1514.970 |
75.3393121 | 68.1212666 | 1657.606 |
69.4156640 | -8.4492454 | 1499.460 |
27.7118924 | -6.8979296 | 1515.345 |
56.2264132 | 35.3259819 | 1519.458 |
100.0337624 | 1.7270084 | 1499.681 |
-4.1995380 | 40.3426506 | 1580.441 |
2.6269938 | 26.4221574 | 1572.903 |
60.1177011 | 49.1849265 | 1514.151 |
49.5006055 | 15.8215728 | 1533.524 |
61.9652308 | 23.4285500 | 1597.530 |
34.6930682 | -1.9026141 | 1490.962 |
87.3495553 | 41.9261293 | 1514.999 |
17.6895303 | 82.4151665 | 1497.746 |
55.9984183 | 49.8219867 | 1493.417 |
58.3716456 | 105.7419121 | 1507.352 |
86.4720964 | 48.1162036 | 1523.178 |
77.7358201 | 17.3299498 | 1581.757 |
75.3203586 | 74.9087513 | 1721.552 |
82.3082523 | 42.0287462 | 1514.646 |
42.4700805 | 20.1111487 | 1541.480 |
73.4870868 | 1.7718971 | 1501.789 |
44.7152671 | 101.9405057 | 1506.463 |
0.6859109 | 27.2353932 | 1597.665 |
12.5278158 | 79.6288005 | 1525.128 |
31.5353685 | 24.1613830 | 1574.484 |
73.1009148 | 104.1031841 | 1528.343 |
7.6972773 | 54.0614536 | 1528.846 |
1.9605939 | 105.2482462 | 1511.436 |
84.4848087 | -8.5314229 | 1494.283 |
58.5529146 | 107.9059088 | 1501.270 |
3.9025865 | -3.1788485 | 1505.593 |
51.2491019 | 49.5866430 | 1501.208 |
54.3456158 | 18.5347186 | 1555.640 |
107.6847397 | 97.5226675 | 1500.701 |
24.2371608 | 45.0674648 | 1564.559 |
2.0416838 | 23.1091635 | 1552.594 |
58.9533605 | 17.0159400 | 1572.365 |
37.0426840 | 81.0124443 | 1520.064 |
41.0724442 | 93.7449142 | 1509.576 |
-6.3120728 | 2.5762643 | 1510.855 |
30.0663304 | 42.0985865 | 1545.147 |
60.5333997 | 97.5168953 | 1523.771 |
-5.7777314 | 17.7990940 | 1512.944 |
58.3574528 | 26.6432727 | 1591.132 |
99.5527244 | 14.1997530 | 1510.882 |
5.4314999 | 59.6360724 | 1502.675 |
27.2049331 | 58.1436156 | 1521.830 |
70.7133825 | 72.2600724 | 1628.442 |
30.2074410 | 15.7865605 | 1561.034 |
103.4756336 | 73.3337831 | 1497.947 |
52.2111962 | 97.3944622 | 1513.381 |
53.4971582 | 62.7045896 | 1522.696 |
107.2800456 | -7.6093157 | 1513.462 |
36.0828773 | 1.0648308 | 1516.719 |
9.8959398 | 38.0345417 | 1631.626 |
28.1724798 | 17.5128259 | 1569.083 |
11.4562865 | -1.4290530 | 1522.087 |
19.9634146 | 67.6977307 | 1510.486 |
77.8391698 | 84.2881631 | 1695.827 |
56.8943508 | 74.8307769 | 1534.896 |
13.6876207 | 19.0344309 | 1568.703 |
5.6377469 | 1.5353368 | 1530.449 |
98.3904664 | 47.9330029 | 1507.807 |
21.1893364 | 35.5331583 | 1619.810 |
65.8549123 | 35.4084335 | 1536.643 |
26.9715169 | 16.1515069 | 1558.015 |
68.4559015 | 62.8591251 | 1573.437 |
29.6779532 | 97.7635954 | 1499.683 |
38.5344473 | 62.5781893 | 1497.989 |
80.7834582 | 88.5448509 | 1662.155 |
93.9590048 | 104.6989836 | 1510.376 |
18.7237450 | 101.6639340 | 1515.993 |
66.4147318 | 79.8895007 | 1573.536 |
39.8472847 | 91.3630961 | 1490.340 |
47.8162886 | 58.5914050 | 1498.235 |
44.7660164 | 28.6819585 | 1544.230 |
97.9984412 | 40.8660252 | 1521.193 |
82.3198781 | 64.4363866 | 1644.222 |
54.7836843 | 84.7816309 | 1530.893 |
6.7822165 | 78.8207196 | 1513.144 |
50.7918310 | 10.1679152 | 1524.965 |
93.3292193 | 33.5288333 | 1529.130 |
27.8532724 | 67.4226551 | 1512.085 |
23.2761412 | 14.0967528 | 1548.178 |
109.4592876 | 84.4723061 | 1511.526 |
94.0956487 | 83.2652183 | 1533.264 |
81.4859804 | 25.0429054 | 1587.515 |
-4.2042275 | 51.4680932 | 1527.781 |
1.5873534 | 6.6489744 | 1513.738 |
81.1939345 | 27.5667786 | 1583.733 |
99.8942479 | 18.5305668 | 1505.506 |
36.0007531 | 13.2020122 | 1543.004 |
12.9720460 | 71.6244480 | 1503.957 |
106.9683536 | 25.0433458 | 1505.885 |
2.9125746 | -3.1916066 | 1515.022 |
4.3695793 | 1.3057712 | 1520.694 |
4.1776137 | -2.0047883 | 1507.063 |
55.6018076 | 93.3213405 | 1525.231 |
38.1397455 | 85.4323965 | 1507.045 |
107.1729795 | 3.4698220 | 1515.953 |
-8.0515425 | 32.4983143 | 1561.571 |
54.3171866 | 85.9232182 | 1527.439 |
39.9986217 | 88.7987212 | 1492.890 |
8.1586290 | 100.2437655 | 1483.974 |
30.5534911 | 106.8646714 | 1492.227 |
11.6125095 | 0.5649165 | 1499.356 |
17.5639922 | 12.4533953 | 1551.861 |
74.0025834 | 19.4443144 | 1587.757 |
63.8565898 | 50.4976860 | 1507.068 |
62.1451755 | 73.6397534 | 1566.301 |
108.5706315 | 39.9792884 | 1504.908 |
-8.6882303 | 35.6378750 | 1575.384 |
85.7286828 | 25.2717346 | 1575.418 |
51.2058432 | 88.0449695 | 1535.663 |
12.0071275 | 4.6950964 | 1522.949 |
65.0518122 | 90.6750049 | 1527.383 |
4.7636615 | 28.2655413 | 1591.190 |
97.1729092 | 52.6163154 | 1519.422 |
46.5810002 | 22.0714145 | 1554.407 |
-7.8478602 | 66.6177464 | 1493.155 |
49.6658752 | 51.2813031 | 1516.148 |
9.6442217 | 61.1968264 | 1516.189 |
0.2572076 | 70.8486255 | 1498.373 |
34.3806700 | 101.7533175 | 1500.140 |
102.8069192 | 60.0474518 | 1514.141 |
5.0835445 | 45.1251017 | 1581.161 |
104.9616358 | 86.3437259 | 1501.398 |
29.2241076 | 70.3265714 | 1494.560 |
51.9916461 | 89.2788112 | 1520.312 |
98.6504682 | 4.9058045 | 1507.826 |
87.9540403 | 90.5393373 | 1566.810 |
35.6069909 | 89.1899773 | 1510.483 |
27.3490404 | 72.2583796 | 1496.364 |
44.4005316 | 54.6944445 | 1505.193 |
78.0673646 | 108.2493421 | 1521.683 |
86.0345060 | 52.2455135 | 1521.489 |
50.7727878 | 78.7952264 | 1517.351 |
-6.3879282 | 28.1116613 | 1552.978 |
16.4694860 | 45.3933454 | 1607.031 |
14.2883132 | 36.7943439 | 1639.067 |
40.4163508 | -6.8347082 | 1493.901 |
59.4043694 | 107.4301601 | 1514.780 |
26.9125733 | 4.4445444 | 1505.026 |
93.9689351 | 4.9250892 | 1518.441 |
12.8351709 | 52.2356144 | 1540.502 |
-4.5957246 | 68.7504948 | 1504.623 |
90.9387196 | 38.9355121 | 1510.016 |
-1.6660295 | 69.8818550 | 1490.004 |
100.7527438 | 102.5080020 | 1495.332 |
105.9911643 | 37.0228226 | 1494.549 |
79.7167846 | 94.4482587 | 1574.811 |
25.5021479 | 95.8724883 | 1489.824 |
64.8679455 | 65.9269781 | 1552.497 |
34.6925810 | 107.7239428 | 1508.946 |
97.4436983 | 53.5945425 | 1499.629 |
34.9634855 | 14.2292793 | 1545.566 |
50.6543713 | 42.7736884 | 1528.498 |
78.6840622 | 54.9441999 | 1561.713 |
103.4864686 | 0.6000343 | 1493.714 |
67.8934639 | 18.1810981 | 1585.116 |
41.2633752 | 65.9674251 | 1499.151 |
54.3623894 | -3.3101534 | 1503.777 |
85.5901665 | -8.2802795 | 1508.463 |
105.2418536 | 59.6567587 | 1509.805 |
100.6187182 | -1.9953644 | 1500.282 |
56.6909134 | 31.9379110 | 1553.912 |
66.4071529 | 58.9636526 | 1549.707 |
101.4136140 | 42.7639144 | 1497.432 |
15.8244842 | 86.5912274 | 1504.906 |
25.3529834 | 23.0743360 | 1587.073 |
6.5443245 | 76.7723261 | 1516.060 |
21.3540380 | 100.5852605 | 1502.077 |
25.2019942 | 14.2011983 | 1580.727 |
69.1657058 | 93.3994231 | 1547.872 |
-0.6532370 | 49.1092228 | 1532.503 |
5.7016030 | 108.0617277 | 1493.535 |
50.6036347 | 8.3754791 | 1516.558 |
36.7557311 | 17.2599553 | 1566.080 |
91.0657953 | 105.8925509 | 1493.553 |
-5.3899947 | 94.7447042 | 1501.037 |
48.0334802 | 21.4244828 | 1549.708 |
50.4329633 | 103.7323134 | 1519.404 |
-5.0505084 | 66.8113191 | 1505.004 |
69.7325202 | 40.9201219 | 1515.943 |
-4.8887698 | 28.0602180 | 1557.802 |
46.4621114 | 61.2889474 | 1503.009 |
64.2938416 | 35.7760528 | 1561.403 |
77.1037695 | 42.3929282 | 1526.516 |
96.2017672 | 101.9963953 | 1519.433 |
0.4662857 | 89.0292998 | 1483.743 |
72.0708827 | 59.2056660 | 1588.294 |
109.5364165 | 91.5752343 | 1489.485 |
79.2936642 | 14.1492138 | 1560.668 |
-5.3118066 | 34.5973874 | 1585.209 |
63.0517392 | 6.3511438 | 1492.916 |
17.3393103 | 44.7068286 | 1585.357 |
62.9541152 | 80.9099935 | 1552.035 |
87.2019699 | 63.7778058 | 1582.010 |
90.3052440 | 69.0934487 | 1574.500 |
25.2688734 | 76.1048284 | 1493.799 |
21.4297049 | 38.1346542 | 1623.359 |
3.5175382 | -1.4756003 | 1509.298 |
76.0590468 | 91.6870374 | 1600.662 |
92.9337300 | 105.7332276 | 1511.960 |
71.6638974 | 35.1597497 | 1553.390 |
10.7209173 | 28.1735718 | 1633.483 |
51.3673127 | 108.6976360 | 1502.855 |
71.1383390 | 13.3389689 | 1549.392 |
75.5817593 | 14.6318629 | 1541.855 |
30.8308143 | 36.5810263 | 1589.775 |
67.7793316 | 16.1500442 | 1585.947 |
106.0849460 | 98.0246542 | 1485.666 |
11.2598220 | 15.8606408 | 1565.633 |
23.1088913 | 76.0537079 | 1479.768 |
10.4403996 | -7.6325232 | 1517.407 |
2.7615352 | 104.7978228 | 1529.142 |
89.0893202 | -8.5448018 | 1500.948 |
98.6259430 | -7.0139099 | 1510.369 |
14.6402051 | 103.9925252 | 1488.091 |
54.7124719 | 26.8612291 | 1572.300 |
47.1933835 | 32.0922527 | 1525.702 |
85.3700834 | 103.6307418 | 1515.363 |
1.7721502 | 15.0269578 | 1563.127 |
12.5727107 | 8.6170472 | 1526.653 |
45.5713691 | 53.7627569 | 1508.066 |
44.2215382 | 67.0370509 | 1497.606 |
74.5813477 | 101.3129812 | 1528.128 |
30.0115687 | 11.3819921 | 1541.190 |
90.4531909 | 61.6848080 | 1554.888 |
20.0863997 | 46.7913048 | 1570.847 |
47.1320663 | 38.5187942 | 1531.695 |
21.5359278 | 2.8504028 | 1525.109 |
102.1136188 | 50.2343568 | 1501.812 |
48.5367774 | 12.0062433 | 1535.235 |
-2.3180114 | 55.7169349 | 1506.025 |
50.6102194 | 88.1279888 | 1510.649 |
77.8522285 | 1.2872542 | 1504.101 |
48.3893854 | 104.5696926 | 1492.684 |
66.3933251 | 101.9618268 | 1509.666 |
85.9375359 | 28.1935550 | 1555.860 |
67.8682558 | 91.8400077 | 1543.947 |
102.2557618 | 50.6659800 | 1493.771 |
89.4257922 | 46.7036010 | 1519.883 |
61.1501857 | 38.6312221 | 1523.934 |
3.1689991 | 21.9017038 | 1571.345 |
54.2624584 | 75.0700301 | 1520.234 |
-2.1329539 | 0.6990997 | 1517.794 |
14.5326373 | 14.6623044 | 1569.137 |
92.0923677 | 14.5583923 | 1540.265 |
50.9768518 | 11.4237147 | 1523.812 |
25.7224895 | 62.8373531 | 1499.869 |
9.0913331 | 91.3946745 | 1497.405 |
56.7395689 | 65.3898101 | 1526.841 |
-6.9715546 | 108.3857356 | 1505.603 |
86.1255730 | 37.8573761 | 1513.962 |
7.6243359 | -5.6062683 | 1508.418 |
14.3570259 | 78.8911665 | 1492.319 |
78.8684186 | 86.3794514 | 1701.078 |
50.1203243 | 6.5330506 | 1525.042 |
78.2810920 | 101.1884724 | 1548.463 |
91.9315635 | 32.0111449 | 1514.694 |
-1.8781652 | 45.9827446 | 1553.455 |
19.2320025 | 48.2957502 | 1577.783 |
52.5425185 | 5.6003292 | 1518.864 |
5.5162289 | 101.0176554 | 1511.460 |
68.3529132 | 96.7494079 | 1542.238 |
29.1369224 | 15.5334408 | 1544.346 |
-3.6228530 | 94.7946062 | 1485.693 |
80.2698530 | -8.4745485 | 1514.719 |
63.7084272 | 92.6554637 | 1534.721 |
5.0221692 | 5.5771130 | 1521.852 |
23.1965916 | 22.3894481 | 1595.744 |
107.4721211 | 61.1590065 | 1511.029 |
91.2873364 | 85.5051300 | 1557.369 |
35.2431964 | 29.0336821 | 1547.582 |
67.6516760 | 10.0403570 | 1512.336 |
88.2066043 | 14.8927116 | 1537.623 |
69.6267306 | 84.3787444 | 1594.476 |
library(plotly)
Le chargement a nécessité le package : ggplot2
Attachement du package : 'ggplot2'
L'objet suivant est masqué _par_ '.GlobalEnv':
Position
Attachement du package : 'plotly'
L'objet suivant est masqué depuis 'package:ggplot2':
last_plot
L'objet suivant est masqué depuis 'package:MASS':
select
L'objet suivant est masqué depuis 'package:stats':
filter
L'objet suivant est masqué depuis 'package:graphics':
layout
library(sm)
longitude=seq(min(long),max(long),length=100)
latitude=seq(min(lat),max(lat),length=100)
RES=sm.regression(cbind(long,lat),alti,eval.points=cbind(longitude,latitude), eval.grid=TRUE,method='cv',display='none')$estimate
Est_altitude=matrix(RES,100,100)
DT=data.frame(longitude,latitude,Est_altitude)
plot_ly(DT,x=~latitude,y=~longitude,z = ~Est_altitude, type = "surface")
#%>% layout(xaxis = list(autorange = "reversed"))
Estimated.Altitude=sm.regression(cbind(long,lat),alti,eval.points=Position, eval.grid=FALSE,method='cv',display='none')$estimate
lo=Position[,1]
la=Position[,2]
DT2=data.frame(Time,lo,la,Estimated.Altitude)
plot_ly(DT,x=~latitude,y=~longitude,z = ~Est_altitude, type = "surface")%>%
add_trace(x=~la,y=~lo,z = ~Estimated.Altitude, type = "scatter3d",mode='markers')
plot_ly(DT2,x=~Time,y=~Estimated.Altitude, type = 'scatter', mode = 'lines')
#plot_ly(DT2,x=~la,y=~lo,z = ~RES, type = "scatter3d",trace='add')
#%>% layout(xaxis = list(autorange = "reversed"))
load('Jussac.RData')
library(knitr)
library(kableExtra)
kable(cbind(Type,LCB,LMS,LBM,LP,LM,LAM),'html',caption="Data given in Jussac.RData") %>%
kable_styling() %>%
scroll_box(width = "100%", height='7cm')
Type | LCB | LMS | LBM | LP | LM | LAM |
---|---|---|---|---|---|---|
1 | 129 | 64 | 95 | 17.5 | 11.2 | 13.8 |
1 | 154 | 74 | 76 | 20.0 | 14.2 | 16.5 |
1 | 170 | 87 | 71 | 17.9 | 12.3 | 15.9 |
1 | 188 | 94 | 73 | 19.5 | 13.3 | 14.8 |
1 | 161 | 81 | 55 | 17.1 | 12.1 | 13.0 |
1 | 164 | 90 | 58 | 17.5 | 12.7 | 14.7 |
1 | 203 | 109 | 65 | 20.7 | 14.0 | 16.8 |
1 | 178 | 97 | 57 | 17.3 | 12.8 | 14.3 |
1 | 212 | 114 | 65 | 20.5 | 14.3 | 15.5 |
1 | 221 | 123 | 62 | 21.2 | 15.2 | 17.0 |
1 | 183 | 97 | 52 | 19.3 | 12.9 | 13.5 |
1 | 212 | 112 | 65 | 19.7 | 14.2 | 16.0 |
1 | 220 | 117 | 70 | 19.8 | 14.3 | 15.6 |
1 | 216 | 113 | 72 | 20.5 | 14.4 | 17.7 |
1 | 216 | 112 | 75 | 19.6 | 14.0 | 16.4 |
1 | 205 | 110 | 68 | 20.8 | 14.1 | 16.4 |
1 | 228 | 122 | 78 | 22.5 | 14.2 | 17.8 |
1 | 218 | 112 | 65 | 20.3 | 13.9 | 17.0 |
1 | 190 | 93 | 78 | 19.7 | 13.2 | 14.0 |
1 | 212 | 111 | 73 | 20.5 | 13.7 | 16.6 |
1 | 201 | 105 | 70 | 19.8 | 14.3 | 15.9 |
1 | 196 | 106 | 67 | 18.5 | 12.6 | 14.2 |
1 | 158 | 71 | 71 | 16.7 | 12.5 | 13.3 |
1 | 255 | 126 | 86 | 21.4 | 15.0 | 18.0 |
1 | 234 | 113 | 83 | 21.3 | 14.8 | 17.0 |
1 | 205 | 105 | 70 | 19.0 | 12.4 | 14.9 |
1 | 186 | 97 | 62 | 19.0 | 13.2 | 14.2 |
1 | 241 | 119 | 87 | 21.0 | 14.7 | 18.3 |
1 | 220 | 111 | 88 | 22.5 | 15.4 | 18.0 |
1 | 242 | 120 | 85 | 19.9 | 15.3 | 17.6 |
3 | 199 | 105 | 73 | 23.4 | 15.0 | 19.1 |
3 | 227 | 117 | 77 | 25.0 | 15.3 | 18.6 |
3 | 228 | 122 | 82 | 24.7 | 15.0 | 18.5 |
3 | 232 | 123 | 83 | 25.3 | 16.8 | 15.5 |
3 | 231 | 121 | 78 | 23.5 | 16.5 | 19.6 |
3 | 215 | 118 | 74 | 25.7 | 15.7 | 19.0 |
3 | 184 | 100 | 69 | 23.3 | 15.8 | 19.7 |
3 | 175 | 94 | 73 | 22.2 | 14.8 | 17.0 |
3 | 239 | 124 | 77 | 25.0 | 16.8 | 27.0 |
3 | 203 | 109 | 70 | 23.3 | 15.0 | 18.7 |
3 | 226 | 118 | 72 | 26.0 | 16.0 | 19.4 |
3 | 226 | 119 | 77 | 26.5 | 16.8 | 19.3 |
Quant=cbind(LCB,LMS,LBM,LP,LM,LAM)
choice=combn(1:6,2)
NC=ncol(choice)
for (j in 1:NC)
{
x=Quant[,choice[,j]]
ju=Jussac[choice[,j]]
print(colnames(x))
print(Classif_NP(x,Type,ju))
}
[1] "LCB" "LMS"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Loup Chien Chien
[13] Chien Chien Chien Chien Loup Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Chien Chien Chien Chien Loup Chien Chien Chien
[37] Chien Chien Chien Chien Chien Chien
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 0.9968071 0.003192929
[2,] 1.0396700 -0.039670038
[3,] 0.8711861 0.128813903
[4,] 0.9114322 0.088567841
[5,] 0.9256400 0.074360023
[6,] 0.6479006 0.352099411
[7,] 0.5958009 0.404199089
[8,] 0.6427308 0.357269248
[9,] 0.5284826 0.471517429
[10,] 0.1008899 0.899110081
[11,] 0.7340450 0.265955012
[12,] 0.6196508 0.380349220
[13,] 0.5273338 0.472666184
[14,] 0.6415822 0.358417824
[15,] 0.6826343 0.317365693
[16,] 0.5871850 0.412814999
[17,] 0.4072262 0.592773809
[18,] 0.7127609 0.287239090
[19,] 0.9767716 0.023228367
[20,] 0.6613262 0.338673787
[21,] 0.7241818 0.275818245
[22,] 0.6002263 0.399773727
[23,] 1.2721424 -0.272142439
[24,] 0.6115807 0.388419343
[25,] 0.9283272 0.071672824
[26,] 0.7841586 0.215841352
[27,] 0.7815437 0.218456342
[28,] 0.7142495 0.285750511
[29,] 0.7817515 0.218248484
[30,] 0.7313537 0.268646280
[31,] 0.7280969 0.271903065
[32,] 0.6883376 0.311662387
[33,] 0.4915187 0.508481306
[34,] 0.5081294 0.491870592
[35,] 0.5773703 0.422629739
[36,] 0.6483357 0.351664251
[37,] 0.7237995 0.276200468
[38,] 0.8072750 0.192724995
[39,] 0.5818795 0.418120549
[40,] 0.7581497 0.241850325
[41,] 0.6250374 0.374962581
[42,] 0.5827946 0.417205426
$M_table
Class
Y Chien Jussac Loup
Chien 28 0 2
Jussac 0 0 0
Loup 11 0 1
$Err
[1] 0.3095238
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.9431089 0.05689111
$Auc
[1] 0.5861111
[1] "LCB" "LBM"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Loup Chien Loup Chien Loup Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Chien Chien Chien Loup Chien Chien Loup Chien
[37] Chien Chien Loup Chien Chien Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.7941288 -0.794128819
[2,] 0.7788844 0.221115627
[3,] 0.6426749 0.357325061
[4,] 0.5500336 0.449966389
[5,] 1.0404019 -0.040401922
[6,] 0.9633000 0.036699981
[7,] 0.9217973 0.078202680
[8,] 0.9908199 0.009180114
[9,] 0.9671192 0.032880827
[10,] 1.1715196 -0.171519632
[11,] 1.1499001 -0.149900088
[12,] 0.9671192 0.032880827
[13,] 0.6564391 0.343560892
[14,] 0.5907119 0.409288116
[15,] 0.4829629 0.517037142
[16,] 0.7948145 0.205185547
[17,] 0.3265101 0.673489942
[18,] 0.9592956 0.040704405
[19,] 0.4434532 0.556546752
[20,] 0.5615024 0.438497562
[21,] 0.6804689 0.319531134
[22,] 0.7788681 0.221131923
[23,] 0.8067945 0.193205498
[24,] 1.1761676 -0.176167625
[25,] 0.5417162 0.458283805
[26,] 0.6968803 0.303119737
[27,] 0.8959084 0.104091638
[28,] 0.9177806 0.082219445
[29,] 0.7820513 0.217948687
[30,] 0.7579148 0.242085246
[31,] 0.6762616 0.323738441
[32,] 0.4509495 0.549050524
[33,] 0.6399355 0.360064477
[34,] 0.6827756 0.317224357
[35,] 0.4315022 0.568497838
[36,] 0.5926531 0.407346880
[37,] 0.8065273 0.193472736
[38,] 0.8378498 0.162150157
[39,] 0.4297505 0.570249523
[40,] 0.7669815 0.233018465
[41,] 0.6363004 0.363699568
[42,] 0.4614977 0.538502267
$M_table
Class
Y Chien Jussac Loup
Chien 27 0 3
Jussac 0 0 0
Loup 8 0 4
$Err
[1] 0.2619048
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.6338983 0.3661017
$Auc
[1] 0.5805556
[1] "LCB" "LP"
$Class
[1] Chien Loup Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Chien Chien Loup Loup Loup Loup Chien Loup
[37] Loup Loup Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.000000000 -3.402009e-26
[2,] 0.134541725 8.654583e-01
[3,] 0.998576475 1.423525e-03
[4,] 0.958623019 4.137698e-02
[5,] 1.000104133 -1.041328e-04
[6,] 0.999686013 3.139871e-04
[7,] 0.848168511 1.518315e-01
[8,] 1.003595517 -3.595517e-03
[9,] 0.971498030 2.850197e-02
[10,] 0.913305985 8.669402e-02
[11,] 0.988090217 1.190978e-02
[12,] 1.011132675 -1.113268e-02
[13,] 1.015954426 -1.595443e-02
[14,] 0.985735396 1.426460e-02
[15,] 1.014525335 -1.452533e-02
[16,] 0.848469751 1.515302e-01
[17,] 0.595281242 4.047188e-01
[18,] 1.000917407 -9.174067e-04
[19,] 0.939660213 6.033979e-02
[20,] 0.971498030 2.850197e-02
[21,] 0.988473935 1.152607e-02
[22,] 1.010433450 -1.043345e-02
[23,] 1.000263286 -2.632863e-04
[24,] 0.810934779 1.890652e-01
[25,] 0.921095265 7.890473e-02
[26,] 1.012330725 -1.233072e-02
[27,] 0.992210077 7.789923e-03
[28,] 0.968746819 3.125318e-02
[29,] 0.544725908 4.552741e-01
[30,] 1.073350138 -7.335014e-02
[31,] 0.092848287 9.071517e-01
[32,] 0.063628544 9.363715e-01
[33,] 0.122885364 8.771146e-01
[34,] -0.002361877 1.002362e+00
[35,] 0.545244135 4.548052e-01
[36,] -0.045940159 1.045940e+00
[37,] -0.212893305 1.212893e+00
[38,] 0.292479009 7.075210e-01
[39,] -0.058326860 1.058327e+00
[40,] 0.197557601 8.024424e-01
[41,] -0.019099470 1.019099e+00
[42,] -0.036862879 1.036863e+00
$M_table
Class
Y Chien Jussac Loup
Chien 29 0 1
Jussac 0 0 0
Loup 1 0 11
$Err
[1] 0.04761905
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.9630371 0.0369629
$Auc
[1] 0.6555556
[1] "LCB" "LM"
$Class
[1] Chien Loup Chien Chien Chien Chien Chien Chien Chien Loup Chien Chien
[13] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Loup Chien Loup Chien Chien Loup Loup Chien
[37] Loup Chien Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.0000000000 -3.590287e-21
[2,] 0.0957844879 9.042155e-01
[3,] 1.0000000002 -2.100731e-10
[4,] 0.9998079685 1.920315e-04
[5,] 1.0000000000 -4.829945e-12
[6,] 0.9999999031 9.694703e-08
[7,] 1.0071266387 -7.126639e-03
[8,] 0.9999999452 5.481227e-08
[9,] 0.9319331918 6.806681e-02
[10,] 0.4877520748 5.122479e-01
[11,] 1.0000005158 -5.158245e-07
[12,] 0.9775954241 2.240458e-02
[13,] 0.9395988870 6.040111e-02
[14,] 0.8977249684 1.022750e-01
[15,] 1.0026028477 -2.602848e-03
[16,] 0.9847949805 1.520502e-02
[17,] 1.0507846013 -5.078460e-02
[18,] 1.0040736371 -4.073637e-03
[19,] 0.9999946113 5.388734e-06
[20,] 1.0047089428 -4.708943e-03
[21,] 0.7036228145 2.963772e-01
[22,] 1.0000000021 -2.121634e-09
[23,] 0.9999999999 8.075307e-11
[24,] 1.0472226527 -4.722265e-02
[25,] 0.8365726205 1.634274e-01
[26,] 0.9999999996 4.329199e-10
[27,] 0.9999448330 5.516703e-05
[28,] 0.9778640777 2.213592e-02
[29,] 0.3437279543 6.562720e-01
[30,] 0.6803879730 3.196120e-01
[31,] 0.4163297477 5.836703e-01
[32,] 0.5796864845 4.203135e-01
[33,] 0.7574011078 2.425989e-01
[34,] -0.0003649075 1.000365e+00
[35,] 0.0083537536 9.916462e-01
[36,] 0.9394903785 6.050963e-02
[37,] -0.1559031242 1.155903e+00
[38,] 0.5086558818 4.913441e-01
[39,] 0.0014215707 9.985784e-01
[40,] 0.4490770128 5.509230e-01
[41,] 0.2380195946 7.619804e-01
[42,] -0.0009418316 1.000942e+00
$M_table
Class
Y Chien Jussac Loup
Chien 27 0 3
Jussac 0 0 0
Loup 4 0 8
$Err
[1] 0.1666667
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 1.002257 -0.002257412
$Auc
[1] 0.7333333
[1] "LCB" "LAM"
$Class
[1] Chien Loup Loup Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Chien Chien Loup Loup Loup Chien Loup Loup
[37] Loup Chien Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.000107378 -1.073780e-04
[2,] -35.064982969 3.606498e+01
[3,] 0.485695218 5.143048e-01
[4,] 0.977496896 2.250310e-02
[5,] 1.000069523 -6.952280e-05
[6,] 0.987691434 1.230857e-02
[7,] 0.813406745 1.865933e-01
[8,] 0.993403714 6.596286e-03
[9,] 0.938884536 6.111546e-02
[10,] 0.896018217 1.039818e-01
[11,] 1.001407574 -1.407574e-03
[12,] 0.975308174 2.469183e-02
[13,] 0.634098528 3.659015e-01
[14,] 0.657943120 3.420569e-01
[15,] 0.950372911 4.962709e-02
[16,] 0.972855994 2.714401e-02
[17,] 0.685738029 3.142620e-01
[18,] 0.914538832 8.546117e-02
[19,] 1.000988869 -9.888690e-04
[20,] 0.973178194 2.682180e-02
[21,] 0.983060307 1.693969e-02
[22,] 1.000750338 -7.503382e-04
[23,] 0.999997845 2.155482e-06
[24,] 1.458273181 -4.582732e-01
[25,] 0.813096944 1.869031e-01
[26,] 0.982951464 1.704854e-02
[27,] 0.999256095 7.439048e-04
[28,] 0.614743986 3.852560e-01
[29,] 0.541680862 4.583191e-01
[30,] 0.999177628 8.223715e-04
[31,] -0.012323031 1.012323e+00
[32,] 0.385451937 6.145481e-01
[33,] 0.460535104 5.394649e-01
[34,] 1.019302353 -1.930235e-02
[35,] -0.095647511 1.095648e+00
[36,] 0.067755790 9.322442e-01
[37,] -0.157020719 1.157021e+00
[38,] 0.999275365 1.198849e-03
[39,] -0.003647941 1.003648e+00
[40,] 0.222875772 7.771242e-01
[41,] -0.079138579 1.079139e+00
[42,] -0.023749794 1.023750e+00
$M_table
Class
Y Chien Jussac Loup
Chien 28 0 2
Jussac 0 0 0
Loup 2 0 10
$Err
[1] 0.0952381
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.9635584 0.03644162
$Auc
[1] 0.6736111
[1] "LMS" "LBM"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Loup Chien Chien Chien Chien Chien Chien Loup
[25] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Loup Chien
[37] Chien Chien Loup Chien Chien Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.4394796 -0.43947957
[2,] 0.9412932 0.05870681
[3,] 0.7681495 0.23185050
[4,] 0.6305956 0.36940441
[5,] 1.0984759 -0.09847590
[6,] 0.9652619 0.03473807
[7,] 0.9126600 0.08734002
[8,] 1.0103298 -0.01032976
[9,] 0.9351001 0.06489987
[10,] 1.1592268 -0.15922680
[11,] 1.1505786 -0.15057861
[12,] 0.9323131 0.06768694
[13,] 0.6128769 0.38712307
[14,] 0.5801157 0.41988428
[15,] 0.5226151 0.47738492
[16,] 0.7723066 0.22769338
[17,] 0.1353025 0.86469749
[18,] 0.9323131 0.06768694
[19,] 0.6789587 0.32104128
[20,] 0.5629352 0.43706480
[21,] 0.6653289 0.33467105
[22,] 0.7972834 0.20271660
[23,] 1.1124567 -0.11245670
[24,] 0.3078150 0.69218497
[25,] 0.7703063 0.22969370
[26,] 0.6653289 0.33467105
[27,] 0.8853278 0.11467215
[28,] 0.8371818 0.16281818
[29,] 1.3792849 -0.37928491
[30,] 0.6559248 0.34407519
[31,] 0.6730716 0.32692842
[32,] 0.5068424 0.49315757
[33,] 0.5144496 0.48555044
[34,] 0.5664894 0.43351060
[35,] 0.3889101 0.61108988
[36,] 0.5094019 0.49059809
[37,] 0.8043109 0.19568910
[38,] 0.8338612 0.16613882
[39,] 0.2902466 0.70975344
[40,] 0.7421588 0.25784122
[41,] 0.6004308 0.39956921
[42,] 0.4459567 0.55404327
$M_table
Class
Y Chien Jussac Loup
Chien 28 0 2
Jussac 0 0 0
Loup 9 0 3
$Err
[1] 0.2619048
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.6363852 0.3636148
$Auc
[1] 0.5694444
[1] "LMS" "LP"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Loup Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Loup Chien Loup Loup Loup Loup Chien Loup
[37] Loup Loup Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 7.073171e-01 0.000000e+00
[2,] 8.213317e-01 1.786683e-01
[3,] 9.997883e-01 2.116830e-04
[4,] 9.534227e-01 4.657729e-02
[5,] 1.000008e+00 -7.720626e-06
[6,] 1.000076e+00 -7.645688e-05
[7,] 9.303569e-01 6.964305e-02
[8,] 1.000220e+00 -2.199508e-04
[9,] 9.938211e-01 6.178891e-03
[10,] 9.789227e-01 2.107728e-02
[11,] 9.891281e-01 1.087194e-02
[12,] 1.003492e+00 -3.491864e-03
[13,] 1.005449e+00 -5.448592e-03
[14,] 9.910253e-01 8.974663e-03
[15,] 1.003504e+00 -3.503567e-03
[16,] 9.318504e-01 6.814963e-02
[17,] 4.384134e-01 5.615085e-01
[18,] 9.960116e-01 3.988394e-03
[19,] 8.916444e-01 1.083556e-01
[20,] 9.812575e-01 1.874246e-02
[21,] 9.879072e-01 1.209282e-02
[22,] 1.001629e+00 -1.628801e-03
[23,] 1.000000e+00 -2.196221e-07
[24,] 9.886692e-01 1.133081e-02
[25,] 8.768034e-01 1.231966e-01
[26,] 1.001951e+00 -1.950877e-03
[27,] 9.983103e-01 1.689712e-03
[28,] 9.754970e-01 2.450298e-02
[29,] 4.012648e-01 5.987352e-01
[30,] 1.009789e+00 -9.789196e-03
[31,] 7.264794e-02 9.273521e-01
[32,] 1.302973e-12 1.000000e+00
[33,] 7.922616e-02 9.207738e-01
[34,] -8.258978e-03 1.008259e+00
[35,] 5.348084e-01 4.651249e-01
[36,] 1.650124e-03 9.983499e-01
[37,] -1.899749e-01 1.189975e+00
[38,] 2.093153e-01 7.906847e-01
[39,] 1.195267e-02 9.880473e-01
[40,] 2.817952e-01 7.182048e-01
[41,] -2.661182e-03 1.002661e+00
[42,] -7.817637e-03 1.007818e+00
$M_table
Class
Y Chien Jussac Loup
Chien 28 0 2
Jussac 0 0 0
Loup 1 0 11
$Err
[1] 0.07142857
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.8624162 0.1375838
$Auc
[1] 0.7611111
[1] "LMS" "LM"
$Class
[1] Chien Loup Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Loup Chien Loup Chien Chien Loup Loup Loup
[37] Loup Loup Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.01069929 -0.0106992918
[2,] 0.20243149 0.7975685129
[3,] 1.03903304 -0.0390330372
[4,] 0.88527350 0.1147264983
[5,] 1.02504741 -0.0250474141
[6,] 0.99924296 0.0007570422
[7,] 0.88909139 0.1109086065
[8,] 1.02490376 -0.0249037615
[9,] 0.88382512 0.1161748792
[10,] 0.62430414 0.3756958554
[11,] 1.00802892 -0.0080289183
[12,] 0.87914247 0.1208575348
[13,] 0.95234038 0.0476596154
[14,] 0.81700784 0.1829921648
[15,] 0.95849324 0.0415067641
[16,] 0.87294003 0.1270599691
[17,] 1.12950539 -0.1295053881
[18,] 0.99524501 0.0047549934
[19,] 0.90767345 0.0923265534
[20,] 1.03841469 -0.0384146895
[21,] 0.66283640 0.3371636016
[22,] 1.11796989 -0.1179698916
[23,] 0.94184526 0.0581547444
[24,] 0.77059645 0.2294035459
[25,] 0.62919578 0.3708042178
[26,] 1.24724022 -0.2472402166
[27,] 0.94168638 0.0583136194
[28,] 0.80929593 0.1907040664
[29,] 0.24280533 0.7571946659
[30,] 0.52344695 0.4765530515
[31,] 0.43399122 0.5660087778
[32,] 0.53197113 0.4680288681
[33,] 0.84729678 0.1527032214
[34,] -0.05142195 1.0514219525
[35,] 0.05283981 0.9471601899
[36,] 0.36298431 0.6370156877
[37,] -0.24535586 1.2453558641
[38,] 0.39492693 0.6050730672
[39,] -0.02294511 1.0229451116
[40,] 0.49977694 0.5002230628
[41,] 0.22050620 0.7794938022
[42,] -0.16475575 1.1647557466
$M_table
Class
Y Chien Jussac Loup
Chien 28 0 2
Jussac 0 0 0
Loup 2 0 10
$Err
[1] 0.0952381
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.7724708 0.2275292
$Auc
[1] 0.6416667
[1] "LMS" "LAM"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Loup
[25] Chien Chien Chien Loup Loup Chien Loup Loup Chien Chien Loup Loup
[37] Loup Chien Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.000000e+00 -2.109889e-08
[2,] 9.531285e-01 4.687147e-02
[3,] 6.717125e-01 3.282875e-01
[4,] 9.598445e-01 4.015551e-02
[5,] 1.000071e+00 -7.064228e-05
[6,] 1.001949e+00 -1.949123e-03
[7,] 9.339045e-01 6.609551e-02
[8,] 9.985148e-01 1.485209e-03
[9,] 8.548089e-01 1.451911e-01
[10,] 7.678428e-01 2.321572e-01
[11,] 1.001966e+00 -1.965804e-03
[12,] 9.749288e-01 2.507116e-02
[13,] 6.305578e-01 3.694422e-01
[14,] 7.021250e-01 2.978750e-01
[15,] 9.828330e-01 1.716703e-02
[16,] 9.934115e-01 6.588502e-03
[17,] 7.567813e-01 2.432187e-01
[18,] 9.347093e-01 6.529070e-02
[19,] 1.002546e+00 -2.546269e-03
[20,] 9.842375e-01 1.576249e-02
[21,] 9.258548e-01 7.414517e-02
[22,] 9.939020e-01 6.098045e-03
[23,] 1.000000e+00 -4.012877e-07
[24,] 4.076343e-01 5.923657e-01
[25,] 9.319928e-01 6.800715e-02
[26,] 9.987467e-01 1.253331e-03
[27,] 9.999496e-01 5.036889e-05
[28,] 3.295119e-01 6.590382e-01
[29,] 4.972126e-01 5.027874e-01
[30,] 8.256132e-01 1.743868e-01
[31,] 1.035903e-01 8.964097e-01
[32,] 3.922032e-01 6.077968e-01
[33,] 5.883643e-01 4.116357e-01
[34,] 1.023880e+00 -2.388002e-02
[35,] -2.143617e-01 1.214362e+00
[36,] 1.511650e-01 8.488350e-01
[37,] -3.599950e-01 1.359995e+00
[38,] 8.232866e-01 1.767134e-01
[39,] -8.677953e-05 1.000087e+00
[40,] 3.213723e-01 6.786277e-01
[41,] -1.039122e-01 1.103912e+00
[42,] -1.564087e-02 1.015641e+00
$M_table
Class
Y Chien Jussac Loup
Chien 27 0 3
Jussac 0 0 0
Loup 3 0 9
$Err
[1] 0.1428571
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.6542453 0.3457547
$Auc
[1] 0.7875
[1] "LBM" "LP"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Loup Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Chien Chien Loup Loup Loup Loup Chien Loup
[37] Loup Chien Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.000019753 -1.975302e-05
[2,] 0.980853685 1.914632e-02
[3,] 1.000447584 -4.475844e-04
[4,] 1.002423405 -2.423405e-03
[5,] 1.000000089 -8.922781e-08
[6,] 0.999999942 5.771260e-08
[7,] 0.945526700 5.447330e-02
[8,] 1.000000085 -8.491915e-08
[9,] 0.975391525 2.460848e-02
[10,] 0.859826131 1.401739e-01
[11,] 0.999976613 2.338708e-05
[12,] 1.003043320 -3.043320e-03
[13,] 0.989718663 1.028134e-02
[14,] 0.858619108 1.413809e-01
[15,] 1.008214265 -8.214265e-03
[16,] 0.834794690 1.652053e-01
[17,] 0.029962841 9.931038e-01
[18,] 0.990913428 9.086572e-03
[19,] 1.028070078 -2.807008e-02
[20,] 0.855753065 1.442469e-01
[21,] 0.989718663 1.028134e-02
[22,] 1.001245139 -1.245139e-03
[23,] 1.000148314 -1.483141e-04
[24,] 0.987108335 1.289166e-02
[25,] 0.922798455 7.720155e-02
[26,] 1.002633329 -2.633329e-03
[27,] 1.000175568 -1.755679e-04
[28,] 1.018471071 -1.847107e-02
[29,] 0.685209489 3.147905e-01
[30,] 1.030694845 -3.069484e-02
[31,] 0.128269574 8.717304e-01
[32,] 0.022513394 9.774866e-01
[33,] 0.144506967 8.554930e-01
[34,] -0.012779433 1.012779e+00
[35,] 0.575555485 4.244445e-01
[36,] -0.003354162 1.003354e+00
[37,] 0.080760580 9.192394e-01
[38,] 0.548653198 4.513468e-01
[39,] 0.022513394 9.774866e-01
[40,] 0.058112137 9.418879e-01
[41,] -0.012248954 1.012249e+00
[42,] -0.020606376 1.020606e+00
$M_table
Class
Y Chien Jussac Loup
Chien 29 0 1
Jussac 0 0 0
Loup 2 0 10
$Err
[1] 0.07142857
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.8792215 0.1207785
$Auc
[1] 0.6111111
[1] "LBM" "LM"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Chien Chien Loup Loup Chien Chien Loup Loup
[37] Loup Loup Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.006267278 -6.267278e-03
[2,] 0.837977693 1.620223e-01
[3,] 1.000017102 -1.710184e-05
[4,] 0.997925637 2.074363e-03
[5,] 1.000000003 -3.104118e-09
[6,] 0.999999970 3.018117e-08
[7,] 1.006374765 -6.374765e-03
[8,] 0.999999883 1.169261e-07
[9,] 0.941696974 5.830303e-02
[10,] 0.514238557 4.857614e-01
[11,] 1.000000012 -1.152800e-08
[12,] 0.975014745 2.498525e-02
[13,] 0.810642337 1.893577e-01
[14,] 0.676109358 3.238906e-01
[15,] 0.944273438 5.572656e-02
[16,] 0.976878679 2.312132e-02
[17,] 0.883728678 1.162713e-01
[18,] 1.009338134 -9.338134e-03
[19,] 1.035786657 -3.578666e-02
[20,] 0.996829363 3.170637e-03
[21,] 0.810642337 1.893577e-01
[22,] 1.000027833 -2.783275e-05
[23,] 0.999999293 7.065808e-07
[24,] 0.933654853 6.634515e-02
[25,] 0.761215898 2.387841e-01
[26,] 1.000010233 -1.023271e-05
[27,] 0.999925709 7.429136e-05
[28,] 1.440744196 -4.407442e-01
[29,] 1.351458374 -3.514584e-01
[30,] 0.708027304 2.919727e-01
[31,] 0.269828188 7.301718e-01
[32,] 0.309883664 6.901163e-01
[33,] 0.771335724 2.286643e-01
[34,] 0.816009272 1.839907e-01
[35,] 0.002536301 9.974637e-01
[36,] 0.029838852 9.701611e-01
[37,] 0.235212890 7.647871e-01
[38,] 0.426114402 5.738856e-01
[39,] -0.002457640 1.002458e+00
[40,] 0.427975936 5.720241e-01
[41,] 0.001168787 9.988312e-01
[42,] -0.002457640 1.002458e+00
$M_table
Class
Y Chien Jussac Loup
Chien 30 0 0
Jussac 0 0 0
Loup 2 0 10
$Err
[1] 0.04761905
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.9682595 0.03174052
$Auc
[1] 0.7722222
[1] "LBM" "LAM"
$Class
[1] Loup Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Loup Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Chien Chien Loup Loup Chien Chien Loup Loup
[37] Loup Chien Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] -3.579416170 4.579416e+00
[2,] 0.765246547 2.347535e-01
[3,] 0.884158467 1.158415e-01
[4,] 0.954840827 4.515917e-02
[5,] 1.008238835 -8.238835e-03
[6,] 0.995831740 4.168260e-03
[7,] 0.787938023 2.120620e-01
[8,] 1.001041265 -1.041265e-03
[9,] 0.971226766 2.877323e-02
[10,] 0.749698151 2.503018e-01
[11,] 1.000000000 -1.142315e-45
[12,] 0.922289623 7.771038e-02
[13,] 0.930774329 6.922567e-02
[14,] 0.487317728 5.126823e-01
[15,] 0.781505021 2.184950e-01
[16,] 0.836444002 1.635560e-01
[17,] 0.522478742 4.775213e-01
[18,] 0.740526358 2.594736e-01
[19,] 0.796968174 2.030318e-01
[20,] 0.754847777 2.451522e-01
[21,] 0.894496897 1.055031e-01
[22,] 1.030437062 -3.043706e-02
[23,] 1.049693908 -4.969391e-02
[24,] 0.768927177 2.310728e-01
[25,] 0.773897253 2.261027e-01
[26,] 0.988721815 1.127819e-02
[27,] 1.015506234 -1.550623e-02
[28,] 0.765370904 2.346291e-01
[29,] 0.883021908 1.169781e-01
[30,] 0.781165533 2.188345e-01
[31,] 0.101888153 8.981118e-01
[32,] 0.334254184 6.657458e-01
[33,] 0.576304456 4.236955e-01
[34,] 1.077379764 -7.737976e-02
[35,] -0.002347413 1.002347e+00
[36,] 0.144088231 8.559118e-01
[37,] -0.140710392 1.140710e+00
[38,] 0.732115031 2.678850e-01
[39,] -2.348756299 3.348756e+00
[40,] 0.267267277 7.327327e-01
[41,] -0.014428943 1.014429e+00
[42,] 0.094590505 9.054095e-01
$M_table
Class
Y Chien Jussac Loup
Chien 28 0 2
Jussac 0 0 0
Loup 3 0 9
$Err
[1] 0.1190476
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.755392 0.244608
$Auc
[1] 0.7152778
[1] "LP" "LM"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Loup Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Loup Chien Loup Loup Loup Loup Loup Loup
[37] Loup Chien Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.000022595 -2.259530e-05
[2,] 1.005035517 -5.035517e-03
[3,] 1.000076728 -7.672811e-05
[4,] 0.993512566 6.487434e-03
[5,] 1.000050860 -5.085980e-05
[6,] 1.000121969 -1.219693e-04
[7,] 0.885991413 1.140086e-01
[8,] 1.000142292 -1.422916e-04
[9,] 0.944521550 5.547845e-02
[10,] 0.807924675 1.920753e-01
[11,] 0.996321263 3.678737e-03
[12,] 1.033042623 -3.304262e-02
[13,] 1.034794443 -3.479444e-02
[14,] 0.949579590 5.042041e-02
[15,] 1.020030080 -2.003008e-02
[16,] 0.870812350 1.291876e-01
[17,] -0.213390647 1.213391e+00
[18,] 0.954680594 4.531941e-02
[19,] 0.987922901 1.207710e-02
[20,] 0.909319997 9.068000e-02
[21,] 1.034794443 -3.479444e-02
[22,] 0.999862706 1.372936e-04
[23,] 1.000097186 -9.718628e-05
[24,] 0.761915520 2.380845e-01
[25,] 0.787581288 2.124187e-01
[26,] 0.999371075 6.289251e-04
[27,] 0.999394667 6.053326e-04
[28,] 0.860148610 1.398514e-01
[29,] 0.374506283 6.254937e-01
[30,] 1.285507830 -2.855078e-01
[31,] 0.331165858 6.688341e-01
[32,] 0.005905740 9.940943e-01
[33,] 0.062633030 9.373670e-01
[34,] -0.005396005 1.005396e+00
[35,] 0.360209395 6.397906e-01
[36,] -0.041047550 1.041048e+00
[37,] 0.359614887 6.403851e-01
[38,] 0.711055638 2.889444e-01
[39,] 0.003254312 9.967457e-01
[40,] 0.352040844 6.479592e-01
[41,] -0.041956042 1.041956e+00
[42,] -0.048680399 1.048680e+00
$M_table
Class
Y Chien Jussac Loup
Chien 28 0 2
Jussac 0 0 0
Loup 1 0 11
$Err
[1] 0.07142857
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.9381193 0.06188067
$Auc
[1] 0.6972222
[1] "LP" "LAM"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Loup Chien Chien Chien Chien Chien Chien Chien
[25] Chien Chien Chien Chien Loup Chien Loup Loup Loup Loup Loup Loup
[37] Loup Chien Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.000255e+00 -2.546781e-04
[2,] 9.951940e-01 4.806018e-03
[3,] 1.007967e+00 -7.966628e-03
[4,] 9.901074e-01 9.892572e-03
[5,] 1.000183e+00 -1.825095e-04
[6,] 1.000664e+00 -6.638512e-04
[7,] 9.202638e-01 7.973624e-02
[8,] 1.000364e+00 -3.638306e-04
[9,] 8.987586e-01 1.012414e-01
[10,] 8.221209e-01 1.778791e-01
[11,] 9.978787e-01 2.121330e-03
[12,] 9.953817e-01 4.618319e-03
[13,] 9.821855e-01 1.781447e-02
[14,] 1.000442e+00 -4.418637e-04
[15,] 1.013108e+00 -1.310780e-02
[16,] 8.768309e-01 1.231691e-01
[17,] 4.819352e-01 5.180648e-01
[18,] 9.914723e-01 8.527663e-03
[19,] 9.869788e-01 1.302125e-02
[20,] 9.430705e-01 5.692945e-02
[21,] 9.878487e-01 1.215130e-02
[22,] 9.999518e-01 4.818591e-05
[23,] 1.000129e+00 -1.289290e-04
[24,] 7.882186e-01 2.117814e-01
[25,] 7.955892e-01 2.044108e-01
[26,] 9.983084e-01 1.691617e-03
[27,] 9.982625e-01 1.737471e-03
[28,] 8.686287e-01 1.313713e-01
[29,] 4.802339e-01 5.197661e-01
[30,] 1.086552e+00 -8.655181e-02
[31,] 2.067179e-01 7.932821e-01
[32,] 8.261469e-02 9.173853e-01
[33,] 1.328166e-01 8.671834e-01
[34,] -3.359782e-05 1.001543e+00
[35,] 2.860779e-02 9.713922e-01
[36,] -1.051961e-02 1.010520e+00
[37,] 3.371560e-02 9.662844e-01
[38,] 9.912356e-01 8.764378e-03
[39,] -6.391207e-01 1.639121e+00
[40,] 3.150334e-01 6.849666e-01
[41,] -5.278270e-02 1.052783e+00
[42,] -5.132187e-02 1.051322e+00
$M_table
Class
Y Chien Jussac Loup
Chien 28 0 2
Jussac 0 0 0
Loup 1 0 11
$Err
[1] 0.07142857
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.9514529 0.04854711
$Auc
[1] 0.6611111
[1] "LM" "LAM"
$Class
[1] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[13] Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien Chien
[25] Loup Chien Chien Loup Chien Chien Loup Loup Loup Loup Loup Loup
[37] Loup Chien Loup Loup Loup Loup
Levels: Chien Jussac Loup
$Prob
Chien Loup
[1,] 1.000000000 -8.444785e-21
[2,] 0.946957631 5.304237e-02
[3,] 0.999999984 1.584453e-08
[4,] 0.999991162 8.838320e-06
[5,] 1.000000000 -1.652703e-17
[6,] 0.999999999 1.364058e-09
[7,] 0.997233368 2.766632e-03
[8,] 1.000000000 1.116284e-10
[9,] 1.013049223 -1.304922e-02
[10,] 0.834250689 1.657493e-01
[11,] 1.000000000 -5.188620e-12
[12,] 0.994780937 5.219063e-03
[13,] 1.001479611 -1.479611e-03
[14,] 0.871425895 1.285741e-01
[15,] 1.011524881 -1.152488e-02
[16,] 0.989412666 1.058733e-02
[17,] 1.015272729 -1.527273e-02
[18,] 1.018226781 -1.822678e-02
[19,] 0.999999992 7.655264e-09
[20,] 1.035495613 -3.549561e-02
[21,] 0.965453913 3.454609e-02
[22,] 1.000000000 -1.570561e-11
[23,] 1.000000000 -7.055253e-15
[24,] 0.563149934 4.368501e-01
[25,] 0.047757173 9.522453e-01
[26,] 1.000000000 -1.376477e-10
[27,] 0.999999959 4.091530e-08
[28,] 0.447797917 5.522021e-01
[29,] 0.589745874 4.102541e-01
[30,] 0.846136437 1.538636e-01
[31,] -0.295358448 1.295358e+00
[32,] 0.334758812 6.652412e-01
[33,] 0.410713121 5.892869e-01
[34,] 0.055612685 9.443873e-01
[35,] -0.015501642 1.015502e+00
[36,] 0.200930943 7.990691e-01
[37,] -0.219924842 1.219925e+00
[38,] 1.000049336 -4.930692e-05
[39,] -2.310839328 3.310839e+00
[40,] 0.240858384 7.591416e-01
[41,] 0.014449831 9.855502e-01
[42,] 0.006676652 9.933233e-01
$M_table
Class
Y Chien Jussac Loup
Chien 28 0 2
Jussac 0 0 0
Loup 1 0 11
$Err
[1] 0.07142857
$Class0
[1] Chien
Levels: Chien Jussac Loup
$Prob0
[,1] [,2]
[1,] 0.9988722 0.001127843
$Auc
[1] 0.7194444
fdaplot=function(X,ylim=NULL)
{
matplot(t(X),type='l',ylim=ylim)
}
library("fda")
Le chargement a nécessité le package : splines
Le chargement a nécessité le package : fds
Le chargement a nécessité le package : rainbow
Le chargement a nécessité le package : pcaPP
Le chargement a nécessité le package : RCurl
Le chargement a nécessité le package : deSolve
Attachement du package : 'fda'
L'objet suivant est masqué depuis 'package:graphics':
matplot
TECATOR=read.table('npfda-spectrometric.dat')
CURVES=as.matrix(TECATOR[,-101])
Y=TECATOR[,101]
fdaplot(CURVES)
source('npfda.R')
R0=funopare.kernel.cv(Y,CURVES, CURVES,0,10,c(0,1))
R0$Mse
[1] 117.2638
plot(R0$Estimated.values,Y)
abline(0,1)
R1=funopare.kernel.cv(Y,CURVES, CURVES,1,10,c(0,1))
R1$Mse
[1] 44.27052
plot(R1$Estimated.values,Y)
abline(0,1)
R2=funopare.kernel.cv(Y,CURVES, CURVES,2,10,c(0,1))
R2$Mse
[1] 5.784178
plot(R2$Estimated.values,Y)
abline(0,1)
R3=funopare.kernel.cv(Y,CURVES, CURVES,3,10,c(0,1))
R3$Mse
[1] 9.118275
plot(R3$Estimated.values,Y)
abline(0,1)
Rpca=funopare.kernel.cv(Y,CURVES, CURVES,semimetric='pca',10)
Rpca$Mse
[1] 122.7028
plot(Rpca$Estimated.values,Y)
abline(0,1)
load("PHONEMES.RData")
par(mfrow=c(2,3))
Phon=sort(unique(PHONEME))
for (j in Phon)
{
fdaplot(CURVES[PHONEME==j,],ylim=range(CURVES))
title(j)
}
#######
Classif_NP_fun=function(X,Y,X0=NA,plot=FALSE,semimetric,...)
{
X=as.matrix(X)
X0=as.matrix(X0)
n=length(Y)
V=sort(unique(Y))
n_V=length(V)
Prob=matrix(NA,n,n_V)
colnames(Prob)=V
Class=rep(NA,n)
test_sample=FALSE
if (!is.na(max(X0)) & (any(dim(X)!=dim(X0))))
{
test_sample=TRUE
}
if (!is.na(max(X0)) & (all(dim(X)==dim(X0))))
{
if (any(X!=X0)){test_sample=TRUE}
}
if (test_sample)
{
P0=matrix(NA,nrow(X0),n_V)
Class0=rep(NA,n)
}
for (v in 1:n_V)
{
z=as.numeric(Y==V[v])
Prob[,v]=funopare.kernel.cv(z,X,X,semimetric=semimetric,...)$Estimated.values
if (test_sample) {P0[,v]=funopare.kernel.cv(z,X,X0,semimetric=semimetric,...)$Predicted.values}
}
if (n_V==2) {Roc=ROC(Y==V[2],Prob[,2],plot)}
Class=V[apply(Prob,1,which.max)]
V_est=sort(unique(Class))
if (length(V_est)==n_V){M_table=table(Y,Class)}
else {
M_table=matrix(0,n_V,n_V)
M_table0=table(Y,Class)
for (j in 1:length(V_est)) {M_table[,which(V==V_est[j])]=M_table0[,j]}
}
Err=1-(sum(diag(M_table))/sum(M_table))
if (test_sample) {Class0=V[apply(P0,1,which.max)]}
if (test_sample) {return(list(Class=Class, Prob=Prob, M_table=M_table, Err=Err, Class0=Class0,Prob0=P0,Auc=ifelse(n_V==2,Roc$AUC,NA)))}
else {return(list(Class=Class, Prob=Prob, M_table=M_table, Err=Err,Auc=ifelse(n_V==2,Roc$AUC,NA)))}
}
learn=1:1000
RE=Classif_NP_fun(CURVES[learn,],PHONEME[learn],CURVES[-learn,],semimetric = 'mplsr',q=4)
RE$M_table
Class
Y AA AO DCL IY SH
AA 153 47 0 0 0
AO 26 173 1 0 0
DCL 0 0 199 1 0
IY 0 0 1 199 0
SH 0 0 0 0 200
table(PHONEME[-learn],RE$Class0)
AA AO DCL IY SH
AA 152 48 0 0 0
AO 29 171 0 0 0
DCL 1 0 199 0 0
IY 0 0 0 200 0
SH 0 0 0 0 200