---
title: "TP_REG"
output: html_document
date: "2025-01-28"
---
```{r setup, include=FALSE}
knitr::opts_chunk$set(echo = TRUE)
library(sm)
Compute_CV=function(x,y,method,weights=NA)
{
hopt=sm.regression(x,y,method=method,display='none')$h
n=length(x)
if (is.na(weights)) weights=rep(1,n)
CV=0
for (i in 1:n)
{
CV=CV+(y[i]-sm.regression(x[-i],y[-i],h=hopt,eval.points=x[i],display='none')$estimate)^2*weights[i]
}
return(CV)
}
####
ROC=function(z,p1)
{
p=sort(unique(p1))
lp=length(p)
ROC=matrix(NA,lp,2)
for (s in 1:lp)
{
ROC[s,1]=mean(p1[z==0]>p[s])
ROC[s,2]=mean(p1[z==1]>p[s])
}
AUC=sum(ROC[-lp,2]*(ROC[-lp,1]-ROC[-1,1]))
colnames(ROC)=c("FPR","TPR")
plot(ROC,type='l',main=paste('AUC =', AUC))
return(list(ROC=ROC,AUC=AUC))
}
#######
Classif_NP=function(X,Y,X0=NA)
{
X=as.matrix(X)
n=length(Y)
V=sort(unique(Y))
n_V=length(V)
Prob=matrix(NA,n,n_V)
colnames(Prob)=V
Class=rep(NA,n)
if (!is.na(max(X0)))
{
X0=as.matrix(X0)
P0=matrix(NA,nrow(X0),n_V)
Class0=rep(NA,n)
}
for (v in 1:n_V)
{
z=as.numeric(Y==V[v])
for (i in 1:n )
{
Prob[i,v]=sm.regression(X[-i,],z[-i], eval.points=X,eval.grid=FALSE,display='none')$estimate[i]
}
if (!is.na(max(X0))) {P0[,v]=sm.regression(X,z,eval.points=X0,eval.grid=FALSE,display='none')$estimate}
}
if (n_V==2) {ROC(Y==V[2],Prob[,2])}
Class=V[apply(Prob,1,which.max)]
M_table=table(Y,Class)
if (!is.na(max(X0))) {Class0=V[apply(P0,1,which.max)]}
if (!is.na(max(X0))) {return(list(Class=Class, Prob=Prob, M_table=M_table, err=1-sum(diag(M_table))/n, Class0=Class0,Prob0=P0))}
else {return(list(Class=Class, Prob=Prob, M_table=M_table, err=1-sum(diag(M_table))/n))}
}
```
# Regression
## Simulation
### Regression 1
\[Y=10e^{-3X}+e^{-\frac{X}{2}}\epsilon, \]
avec $X \sim \mathcal{E}(1)$ et $\epsilon \sim \mathcal{N}(0,1)$.
```{r}
n=100
x=rexp(n,2)
e=rnorm(n,0,exp(-x/2))
y=10*exp(-3*x)+e
plot(function(x) 10*exp(-3*x),0,3,ylab='y')
lines(x,y,col="red",type="p")
sm.regression(x,y,h=0.01,col="blue",eval.points=sort(x),add=T,nbins=0)
sm.regression(x,y,h=.1,col="orange",eval.points=sort(x),add=T,nbins=0)
sm.regression(x,y,h=1,col="green",eval.points=sort(x),add=T,nbins=0)
legend(x="topright",leg=c('h=0.01','h=0.1','h=1'), col=c("blue","orange","green"),lty=1)
plot(function(x) 10*exp(-3*x),0,3)
lines(x,y,col="red",type="p")
reg3=sm.regression(x,y,method="cv",col="blue",eval.points=sort(x),add=T,nbins=0)
reg1=sm.regression(x,y,method="df",col="orange",add=T,eval.points=sort(x),nbins=0)
reg2=sm.regression(x,y,method="aicc",col="green",add=T,eval.points=sort(x),nbins=0)
legend(x="topright",leg=c('cv',"df",'aicc'), col=c("blue","orange","green"),lty=1)
plot(reg1$estimate,y[order(x)],col="orange")
lines(reg2$estimate,y[order(x)],col="green",type='p')
lines(reg3$estimate,y[order(x)],col="blue",type='p')
abline(0,1)
```
#### Erreur quadratique et Validation croisée
```{r}
# Erreur quadratique
mean((reg1$estimate-y[order(x)])^2)
mean((reg2$estimate-y[order(x)])^2)
mean((reg3$estimate-y[order(x)])^2)
#Validation croisée
Compute_CV(x,y,'cv')/n
Compute_CV(x,y,'df')/n
Compute_CV(x,y,'aicc')/n
# CV avec moyenne Y
n=length(x)
CV_y=0
for (j in 1:n)
{
CV_y=CV_y+(y[j]-mean(y[-j]))^2
}
CV_y/n
# Variance non corrigée e
var(e)*(n-1)/n
```
### Regression 2
\[Y=7cos(7X)+10e^{-3X}+3e^{-\frac{X}{2}}\epsilon, \]
avec $X \sim \mathcal{E}(1)$ et $\epsilon \sim \mathcal{N}(0,1)$.
```{r}
n=1000
x=rexp(n,2)
e=3*rnorm(n,0,exp(-x/2))
y=7*cos(7*x)+10*exp(-3*x)+e
plot(function(x) 10*exp(-3*x)+7*cos(7*x),0,3)
lines(x,y,col="red",type="p")
sm.regression(x,y,h=0.01,col="blue",eval.points=sort(x),add=T,nbins=0)
sm.regression(x,y,h=.1,col="orange",eval.points=sort(x),add=T,nbins=0)
sm.regression(x,y,h=1,col="green",eval.points=sort(x),add=T,nbins=0)
legend(x="topright",leg=c('h=0.01','h=0.1','h=1'), col=c("blue","orange","green"),lty=1)
plot(function(x) 10*exp(-3*x)+7*cos(7*x),0,3)
lines(x,y,col="red",type="p")
plot(function(x) 10*exp(-3*x)+7*cos(7*x),0,3)
lines(x,y,col="red",type="p")
reg3=sm.regression(x,y,method="cv",col="blue", eval.points=sort(x),add=T,nbins=0)
legend(x="topright",leg=c('cv',"df",'aicc'), col=c("blue","orange","green"),lty=1)
reg1=sm.regression(x,y,method="df",col="orange",eval.points=sort(x),add=T,nbins=0)
reg2=sm.regression(x,y,method="aicc",col="green", eval.points=sort(x),add=T,nbins=0)
plot(reg1$estimate,y[order(x)],col="orange")
lines(reg2$estimate,y[order(x)],col="green",type='p')
lines(reg3$estimate,y[order(x)],col="blue",type='p')
abline(0,1)
```
#### Erreur quadratique et Validation croisée
```{r}
# Erreur quadratique
mean((reg1$estimate-y[order(x)])^2)
mean((reg2$estimate-y[order(x)])^2)
mean((reg3$estimate-y[order(x)])^2)
#Validation croisée
Compute_CV(x,y,'cv')/n
Compute_CV(x,y,'df')/n
Compute_CV(x,y,'aicc')/n
# CV avec moyenne Y
n=length(x)
CV_y=0
for (j in 1:n)
{
CV_y=CV_y+(y[j]-mean(y[-j]))^2
}
CV_y/n
# Variance non corrigée e
var(e)*(n-1)/n
```
## Real world data
```{r}
# load data
library(MASS)
data(mcycle)
head(mcycle)
x=mcycle$times
y=mcycle$accel
n=length(x)
```
```{r}
# plot data
plot(x, y, xlab = "Time (ms)", ylab = "Acceleration (g)")
sm.regression(x,y,h=0.1,col="blue",eval.points=sort(x),add=T,nbins=0)
sm.regression(x,y,h=1,col="orange",eval.points=sort(x),add=T,nbins=0)
sm.regression(x,y,h=10,col="green",eval.points=sort(x),add=T,nbins=0)
legend(x="topright",leg=c('h=0.1','h=1','h=10'), col=c("blue","orange","green"),lty=1)
plot(x,y,col="red",type="p")
reg3=sm.regression(x,y,method="cv",col="blue", eval.points=sort(x),add=T,nbins=0)
legend(x="topright",leg=c('cv',"df",'aicc'), col=c("blue","orange","green"),lty=1)
reg1=sm.regression(x,y,method="df",col="orange",eval.points=sort(x),add=T,nbins=0)
reg2=sm.regression(x,y,method="aicc",col="green", eval.points=sort(x),add=T,nbins=0)
plot(reg1$estimate,y[order(x)],col="orange")
lines(reg2$estimate,y[order(x)],col="green",type='p')
lines(reg3$estimate,y[order(x)],col="blue",type='p')
abline(0,1)
```
#### Erreur quadratique et Validation croisée
```{r}
# Erreur quadratique
mean((reg1$estimate-y[order(x)])^2)
mean((reg2$estimate-y[order(x)])^2)
mean((reg3$estimate-y[order(x)])^2)
#Validation croisée
Compute_CV(x,y,'cv')/n
Compute_CV(x,y,'df')/n
Compute_CV(x,y,'aicc')/n
# CV avec moyenne Y
n=length(x)
CV_y=0
for (j in 1:n)
{
CV_y=CV_y+(y[j]-mean(y[-j]))^2
}
CV_y/n
```
# Classification supervisée
## Simulation
```{r}
rmixing2=function(n,alpha,l0,l1,p0,p1)
# Generate data from a mixing model
{
z=rbinom(n,1,alpha)
f1=eval(parse(text=paste('r',l1,'(',paste(c(n,p1),collapse=','),')',sep='')))
f0=eval(parse(text=paste('r',l0,'(',paste(c(n,p0),collapse=','),')',sep='')))
x=z*f1+(1-z)*f0
return(list(x=x,z=z))
}
dmixing2=function(t,alpha,l0,l1,p0,p1)
# draw the density of the mixing model
{
mix=alpha*eval(parse(text=paste('d',l1,'(t,',paste(p1,collapse=','),')',sep='')))+(1-alpha)*eval(parse(text=paste('d',l0,'(t,',paste(p0,collapse=','),')',sep='')))
p1_t=eval(parse(text=paste('d',l1,'(t,',paste(p1,collapse=','),')',sep='')))/mix
p0_t=eval(parse(text=paste('d',l0,'(t,',paste(p0,collapse=','),')',sep='')))/mix
return(list(mix=mix,p0_t=p0_t,p1_t=p1_t))
}
#Example
n=300
alpha=0.3
l0='norm'
p0=c(4,1)
l1='norm'
p1=c(0,2)
s=seq(-10,10,0.001)
r=rmixing2(n,alpha,l0,l1,p0,p1)
x=r$x
z=r$z
p0_x=dmixing2(x,alpha,l0,l1,p0,p1)$p0_t
p1_x=dmixing2(x,alpha,l0,l1,p0,p1)$p1_t
u=sort(x)
plot(u,p0_x[order(x)],ylab='p0_x',type='l')
plot(u,p1_x[order(x)],ylab='p1_x',type='l')
Classif_NP(x,z)
ROC(z,p1_x)
Class_Bayes=as.numeric(p1_x>0.5)
(M_table=table(z,Class_Bayes))
(err=1-sum(diag(M_table))/n)
```
## Real world data
### Dopage
```{r}
load('Dopage.RData')
x=hema
z=test
Classif_NP(x,z)
```
```{r}
load('Dopage.RData')
x=hema
z=test
Classif_NP(x,z)
```
### Iris
```{r}
data('iris')
attach(iris)
for (j in colnames(iris)[1:4])
{
print(j)
x=get(j)
z=Species
r=Classif_NP(x,z)
print(r$M_table)
print(r$err)
}
```