GAL-Buckle95/GAL-Buckle95.Rnw

464 lines
13 KiB
Text
Raw Normal View History

\documentclass[letter]{report}
\usepackage[margin=0.5in]{geometry}
2014-03-04 03:55:26 +00:00
\usepackage{Sweave}
\usepackage{graphicx}
\usepackage[francais]{babel}
\usepackage[utf8]{inputenc}
\usepackage[T1]{fontenc}
\usepackage{amsmath}
\usepackage{amsfonts}
\usepackage{verbatim}
\usepackage{float}
\usepackage{hyperref}
\usepackage{scrtime}
\begin{document}
\title{GAL Buckle 95}
\author{François Pelletier}
\maketitle
\tableofcontents
\chapter{Initialisation}
2014-03-04 03:55:26 +00:00
\section{Chargement des paquets}
<<>>=
2014-07-27 23:14:44 +00:00
setwd("/home/francois/git/GAL-Buckle95")
2014-03-06 03:42:39 +00:00
library(actuar)
library(MASS)
library(xtable)
library(moments)
library(FourierStuff)
library(GeneralizedAsymmetricLaplace)
library(GMMStuff)
library(OptionPricingStuff)
library(QuadraticEstimatingEquations)
2014-03-04 03:55:26 +00:00
@
2014-03-04 03:58:17 +00:00
\section{Constantes et données}
2014-03-04 03:55:26 +00:00
<<>>=
#Nombre de décimales affichées
options(digits=6)
#Marge pour intervalles de confiance
alpha.confint <- 0.05
#Marge pour test d'hypothèses
alpha.test <- 0.05
2014-03-04 03:58:17 +00:00
#Chargement des données
2014-03-04 03:55:26 +00:00
RETURNS <- head(read.csv("abbeyn.csv",sep="\t",header=TRUE)[,1],-1)
2014-03-08 06:00:44 +00:00
#Taille de l'échantillon
n <- length(RETURNS)
#Nom de l'échantillon
strData <- "Buckle95"
2014-03-04 03:55:26 +00:00
@
2014-03-06 03:42:39 +00:00
\section{Test de normalité}
<<>>=
EppsPulley.test(RETURNS)
@
\chapter{Estimation}
2014-03-06 03:42:39 +00:00
\section{Données mises à l'échelle}
<<>>=
sRET <- as.vector(scale(RETURNS))
2014-03-06 03:42:39 +00:00
@
2014-03-08 06:00:44 +00:00
\section{Première estimation par QEE}
2014-03-06 03:42:39 +00:00
<<>>=
## Point de départ
pt.depart <- startparamGAL(sRET)
2014-03-06 03:42:39 +00:00
## Fonctions pour les moments
meanQEE <- function(param) mGAL(param,1)
varianceQEE <- function(param) cmGAL(param,2)
2014-03-08 06:00:44 +00:00
sdQEE <- function(param) sqrt(cmGAL(param,2))
skewnessQEE <- function(param) cmGAL(param,3)
kurtosisQEE <- function(param) cmGAL(param,4)
2014-03-06 03:42:39 +00:00
## Fonctions pour les dérivées
dmeanQEE <- function(param) dmGAL(param,1)
2014-03-08 06:00:44 +00:00
dsdQEE <- function(param) dmGAL(param,2)
2014-03-06 03:42:39 +00:00
## Estimation gaussienne
optim1 <- optim(pt.depart,obj.gauss,gr=NULL,sRET,
meanQEE,varianceQEE,dmeanQEE,dsdQEE)
2014-03-08 06:00:44 +00:00
pt.optim1 <- optim1$par
## Estimation de crowder
optim2 <- optim(pt.depart,obj.Crowder,gr=NULL,sRET,
meanQEE,varianceQEE,skewnessQEE,kurtosisQEE,dmeanQEE,dsdQEE)
2014-03-08 06:00:44 +00:00
pt.optim2 <- optim2$par
## Estimation de crowder modifiée
optim3 <- optim(pt.depart,obj.Crowder.Mod,gr=NULL,sRET,
meanQEE,varianceQEE,dmeanQEE,dsdQEE)
2014-03-08 06:00:44 +00:00
pt.optim3 <- optim3$par
2014-03-06 03:42:39 +00:00
@
2014-03-04 03:55:26 +00:00
2014-03-08 06:00:44 +00:00
\section{Résultats de la première estimation par QEE}
<<>>=
cov.optim1 <- covariance.QEE(M.gauss(pt.optim1,sRET,
meanQEE,varianceQEE,dmeanQEE,dsdQEE),
V.gauss(pt.optim1,sRET,meanQEE,varianceQEE,
skewnessQEE,kurtosisQEE,dmeanQEE,dsdQEE),n)
cov.optim2 <- covariance.QEE(M.Crowder(pt.optim2,sRET,
varianceQEE,skewnessQEE,kurtosisQEE,dmeanQEE,dsdQEE),
V.Crowder(pt.optim2,sRET,varianceQEE,
skewnessQEE,kurtosisQEE,dmeanQEE,dsdQEE),n)
cov.optim3 <- covariance.QEE(M.Crowder.Mod(pt.optim3,sRET,
varianceQEE,skewnessQEE,kurtosisQEE,dmeanQEE,dsdQEE),
V.Crowder.Mod(pt.optim3,sRET,varianceQEE,dmeanQEE,dsdQEE),n)
confidence.interval.QEE(pt.optim1,cov.optim1,n)
confidence.interval.QEE(pt.optim2,cov.optim2,n)
confidence.interval.QEE(pt.optim3,cov.optim3,n)
2014-03-08 06:00:44 +00:00
@
\section{Seconde estimation par QEE}
<<>>=
## Estimation gaussienne
optim4 <- optim(pt.optim1,obj.gauss,gr=NULL,sRET,
meanQEE,varianceQEE,dmeanQEE,dsdQEE,
ginv(V.gauss(pt.optim1,sRET,meanQEE,
varianceQEE,skewnessQEE,kurtosisQEE,
dmeanQEE,dsdQEE)))
2014-03-08 06:00:44 +00:00
pt.optim4 <- optim4$par
## Estimation de crowder
optim5 <- optim(pt.optim2,obj.Crowder,gr=NULL,sRET,
meanQEE,varianceQEE,skewnessQEE,kurtosisQEE,dmeanQEE,dsdQEE,
ginv(V.Crowder(pt.optim2,sRET,varianceQEE,skewnessQEE,
kurtosisQEE,dmeanQEE,dsdQEE)))
2014-03-08 06:00:44 +00:00
pt.optim5 <- optim5$par
## Estimation de crowder modifiée
optim6 <- optim(pt.optim3,obj.Crowder.Mod,gr=NULL,sRET,
meanQEE,varianceQEE,dmeanQEE,dsdQEE,
ginv(V.Crowder.Mod(pt.optim3,sRET,varianceQEE,
dmeanQEE,dsdQEE)))
2014-03-08 06:00:44 +00:00
pt.optim6 <- optim6$par
@
\section{Résultats de la seconde estimation par QEE}
<<>>=
cov.optim4 <- covariance.QEE(M.gauss(pt.optim4,sRET,
meanQEE,varianceQEE,dmeanQEE,dsdQEE),
V.gauss(pt.optim4,sRET,meanQEE,varianceQEE,
skewnessQEE,kurtosisQEE,dmeanQEE,dsdQEE),n)
cov.optim5 <- covariance.QEE(M.Crowder(pt.optim5,sRET,
varianceQEE,skewnessQEE,kurtosisQEE,dmeanQEE,dsdQEE),
V.Crowder(pt.optim5,sRET,varianceQEE,skewnessQEE,
kurtosisQEE,dmeanQEE,dsdQEE),n)
cov.optim6 <- covariance.QEE(M.Crowder.Mod(pt.optim6,sRET,
varianceQEE,skewnessQEE,kurtosisQEE,dmeanQEE,dsdQEE),
V.Crowder.Mod(pt.optim6,sRET,varianceQEE,dmeanQEE,dsdQEE),n)
confidence.interval.QEE(pt.optim4,cov.optim4,n)
confidence.interval.QEE(pt.optim5,cov.optim5,n)
confidence.interval.QEE(pt.optim6,cov.optim6,n)
2014-03-08 06:00:44 +00:00
@
\section{Estimation par GMM}
<<>>=
## GMM régulier
optim7 <- optim.GMM(pt.depart,
conditions.vector=meanvariance.gmm.vector,
data=sRET,W=diag(2),
meanf=meanQEE,variancef=varianceQEE)
pt.optim7 <- optim7$par
cov.optim7 <- mean.variance.GMM.gradient.GAL(pt.optim7,sRET) %*%
covariance.GMM(pt.optim7,meanvariance.gmm.vector,sRET,
meanf=meanQEE,variancef=varianceQEE) %*%
t(mean.variance.GMM.gradient.GAL(pt.optim7,sRET)) / n
## GMM itératif
optim8 <- iterative.GMM(pt.depart,
conditions.vector=meanvariance.gmm.vector,
data=sRET,W=diag(2),
meanf=meanQEE,variancef=varianceQEE)
pt.optim8 <- optim8$par
cov.optim8 <- mean.variance.GMM.gradient.GAL(pt.optim8,sRET) %*%
optim8$cov %*%
t(mean.variance.GMM.gradient.GAL(pt.optim8,sRET)) / n
confidence.interval.QEE(pt.optim7,cov.optim7,n)
confidence.interval.QEE(pt.optim8,cov.optim8,n)
@
\chapter{Comparaison des résultats}
<<>>=
# Aggrégation des estimateurs (pour simplifier les calculs)
pts.estim <- cbind(pt.optim1,pt.optim2,pt.optim3,pt.optim4,
pt.optim5,pt.optim6,pt.optim7,pt.optim8)
l.pts.estim <- as.list(data.frame(pts.estim))
@
\section{Fonction de répartition}
<<>>=
# Points d'évaluation
xi <- seq(2*min(sRET),2*max(sRET),length.out=2^6)
# Fonction de répartition par intégration de la fonction caractéristique
dist1 <- cbind(cftocdf(xi,cfGAL,param=pt.optim1),
cftocdf(xi,cfGAL,param=pt.optim2),
cftocdf(xi,cfGAL,param=pt.optim3),
cftocdf(xi,cfGAL,param=pt.optim4),
cftocdf(xi,cfGAL,param=pt.optim5),
cftocdf(xi,cfGAL,param=pt.optim6),
cftocdf(xi,cfGAL,param=pt.optim7),
cftocdf(xi,cfGAL,param=pt.optim8))
# Fonction de répartition par point de selle
dist2 <- cbind(psaddleapproxGAL(xi,pt.optim1),
psaddleapproxGAL(xi,pt.optim2),
psaddleapproxGAL(xi,pt.optim3),
psaddleapproxGAL(xi,pt.optim4),
psaddleapproxGAL(xi,pt.optim5),
psaddleapproxGAL(xi,pt.optim6),
psaddleapproxGAL(xi,pt.optim7),
psaddleapproxGAL(xi,pt.optim8))
# Fonction de répartition par intégration de la fonction de densité
dist3 <- cbind(pGAL(xi,pt.optim1),
pGAL(xi,pt.optim2),
pGAL(xi,pt.optim3),
pGAL(xi,pt.optim4),
pGAL(xi,pt.optim5),
pGAL(xi,pt.optim6),
pGAL(xi,pt.optim7),
pGAL(xi,pt.optim8))
@
\pagebreak
\subsection{Graphiques}
<<results=tex>>=
for (i in 1:8)
{
file<-paste(strData,"-repart-",i,".pdf",sep="")
pdf(file=file,paper="special",width=6,height=6)
plot.ecdf(sRET,main=paste("Fonction de répartition ",i))
lines(xi,dist1[,i],col="green")
lines(xi,dist2[,1],col="red")
lines(xi,dist3[,1],col="pink")
lines(xi,pnorm(xi),type="l",col="blue")
dev.off()
cat("\\includegraphics[height=4in,width=4in]{",
file,"}\n",sep="")
}
@
\subsection{Statistiques}
Test du $\chi^2$, Méthode avec intégration
<<results=tex>>=
chisquare.test1 <- function(param,DATA.hist,FUN,method)
{
chisquare.test(DATA.hist,FUN,param,method=method)
}
xtable(do.call(rbind,lapply(l.pts.estim,chisquare.test1,hist(sRET),
cfGAL,"integral")),digits=6)
2014-03-08 06:00:44 +00:00
@
Test du $\chi^2$, Méthode avec point de selle
<<results=tex>>=
xtable(do.call(rbind,lapply(l.pts.estim,chisquare.test1,hist(sRET),
pGAL,"saddlepoint")),digits=6)
@
Statistique de Kolmogorov-Smirnov
<<results=tex>>=
ks.test1 <- function(param,x,y) ks.test(x,y,param)
xtable(do.call(rbind,mclapply(l.pts.estim,ks.test1,sRET,"pGAL")),digits=6)
@
Statistique de distance minimale
<<results=tex>>=
tvariate1 <- seq(-.1,.1,by=0.01)
xtable(do.call(rbind,mclapply(l.pts.estim,
md.test,sRET,tvariate1,cfGAL,empCF)),digits=6)
@
2014-03-23 15:45:47 +00:00
\section{Fonction de densité}
Intégration de la fonction de densité approximée avec le point de selle, pour la
normaliser en fonction qui intègre à 1.
<<>>=
f_integrale_saddle <- function(param,f,lower,upper)
integrate(f,lower,upper,param)$value
norm_int_saddle <- sapply(l.pts.estim,f_integrale_saddle,
f=dsaddleapproxGAL,lower=-Inf,upper=Inf)
@
Séquence de points pour les graphiques
<<>>=
x_sRET <- seq(min(sRET)-sd(sRET),max(sRET)+sd(sRET),length.out=50)
@
Graphique de la fonction de densité
<<results=tex>>=
colors2=c("black","red","green","blue","grey")
for (i in 1:dim(pts.estim)[2])
{
file=paste(strData,"-densite-", i, ".pdf", sep="")
pdf(file=file, paper="special", width=6, height=6)
plot(density(sRET),ylim=c(0,.7),type="l",
main=paste("Densité de",strData, i),xlab=strData,
ylab="f",lwd=2,lty=1)
points(x_sRET,
dGAL(x_sRET,pts.estim[,i]),
type="b",ylim=c(0,4),col="red",pch=19,lwd=2,lty=2)
points(x_sRET,
dsaddleapproxGAL(x_sRET,pts.estim[,i])/norm_int_saddle[i],
type="b",ylim=c(0,4),col="green",pch=20,lwd=2,lty=3)
lines(x_sRET,dnorm(x_sRET),type="b",col="blue",
pch=21,lwd=2,lty=4)
points(seq(-2,4,length.out=1000)[seq(40,1000,by=40)],
cftodensity.fft(cfGAL,1000,-2,4,pts.estim[,i])$dens[seq(40,1000,by=40)],
type="b",col="grey",pch=23,lty=6)
legend(quantile(sRET,0.9),0.7, c("emp","est.GAL","pt.selle","appx.nrm","fft"),
cex=0.8, col=colors2, pch=c(NA,19:23), lty=1:6, title="Courbes")
dev.off()
cat("\\includegraphics[height=4in, width=4in]{"
,file, "}\n", sep="")
}
@
\subsection{Tests avec contraintes}
Test de Wald
<<>>=
R <- matrix(c(0,0,1,0,
0,0,0,1),ncol=4)
r <- matrix(c(0,0),ncol=1)
V <- lapply(l.pts.estim,covariance.GMM,meanvariance.gmm.vector,
sRET,meanQEE,varianceQEE)
D <- lapply(l.pts.estim,mean.variance.GMM.gradient.GAL,sRET)
xtable(mapply(Wald.Test,l.pts.estim,n,list(R),list(r),V,D),
caption="Test de Wald", digits=2)
@
\subsection{Vrais paramètres}
Comme nous avons estimé avec des données centrées et réduites, nous utilisons
une propriété de la distribution GAL qui nous permet d'obtenir les paramètres
des rendements non réduits.
<<>>=
pts.estim.ns <- apply(pts.estim,2,scaleGAL,type="mu",
mean(RETURNS),sd(RETURNS))
@
<<results=tex>>=
xtable(pts.estim.ns,
caption="Paramètres des données non centrées et réduites",
digits=4)
@
\section{Prix d'options}
\subsection{Données de base}
<<>>=
#Taux sans risque
rfrate <- .05/365
#Échéance
2014-07-27 23:14:44 +00:00
Time_to_expiry <- 30
#Pas de discrétisation courbe des prix
2014-07-27 23:14:44 +00:00
pas <- 1/300
#Prix initial
stock0 <- 299
#Prix d'exercice dans le cours (put)
2014-07-27 23:14:44 +00:00
strike1 <- stock0*seq(0.90,1,pas)
#Prix d'exercice hors le cours (put)
2014-07-27 23:14:44 +00:00
strike2 <- stock0*seq(1+pas,1.10,pas)
#Prix d'exercice combinés
strike <- c(strike1,strike2)
#Damping parameter
alpha <- 3
@
\subsection{Paramètres neutres au risque}
<<>>=
pts.estim.ns.rn <- apply(pts.estim.ns,2,riskneutralparGAL,rfrate)
l.pts.estim.ns.rn <- as.list(data.frame(pts.estim.ns.rn))
@
<<results=tex>>=
xtable(pts.estim.ns.rn,caption="Paramètres neutres au risque",digits=4)
@
\subsection{Méthode de Epps}
<<>>=
2014-05-24 15:14:38 +00:00
f_putEpps <- function(param,strikeprice,char.fn,eval.time,expiry.time,rate,...)
putEpps(strikeprice,char.fn,param,eval.time,expiry.time,rate,...)
2014-07-27 23:14:44 +00:00
prix_put_Epps <- as.data.frame(cbind(strike/stock0,sapply(l.pts.estim.ns.rn,f_putEpps,strike/stock0,cfLM,0,Time_to_expiry,rfrate)))
@
<<results=tex>>=
2014-05-28 03:07:55 +00:00
xtable(prix_put_Epps,caption="Prix unitaire de l'option de vente, Méthode de Epps",digits=6)
@
2014-07-07 02:57:41 +00:00
\subsection{Méthode de Carr-Madan}
2014-05-28 03:07:55 +00:00
<<>>=
f_callCarrMadan <- function(param,strikeprice,char.fn,eval.time,expiry.time,rate,alpha,...)
{
callCarrMadan(strikeprice,char.fn,param,eval.time,expiry.time,rate,alpha,...)
}
2014-07-27 23:14:44 +00:00
prix_call_CarrMadan <- as.data.frame(cbind(strike/stock0,sapply(l.pts.estim.ns.rn,f_callCarrMadan,strike/stock0,cfLM,0,Time_to_expiry,rfrate,alpha)))
2014-05-28 03:07:55 +00:00
@
<<results=tex>>=
xtable(prix_call_CarrMadan,caption="Prix unitaire de l'option d'achat, Méthode de Carr-Madan",digits=6)
@
2014-07-07 02:57:41 +00:00
\subsection{Méthode de Heston}
<<>>=
prix_put_Heston <- as.data.frame(cbind(strike/stock0,
2014-07-27 23:14:44 +00:00
sapply(l.pts.estim.ns.rn,putHeston,strike/stock0,pnormapproxEsscherLM,0,Time_to_expiry,rfrate)))
2014-07-07 02:57:41 +00:00
@
<<results=tex>>=
xtable(prix_put_Heston,caption="Prix unitaire de l'option de vente, Méthode de Heston",digits=6)
@
\subsection{Méthode de Heston avec point de selle}
2014-07-27 23:14:44 +00:00
<<>>=
prix_put_Heston_Saddle <- as.data.frame(cbind(strike/stock0,
sapply(l.pts.estim.ns.rn,putHestonSaddle,strike/stock0,psaddleapproxGAL,psaddleapproxEsscherGAL,0,Time_to_expiry,rfrate)
)
)
@
<<results=tex>>=
xtable(prix_put_Heston_Saddle,caption="Prix unitaire de l'option de vente, Méthode de Heston avec point de selle",digits=6)
@
2014-07-07 02:57:41 +00:00
2014-05-28 03:07:55 +00:00
2014-03-08 06:00:44 +00:00
2014-03-04 03:55:26 +00:00
\end{document}