#MLE #load package micEcon and msm mydata<-rweibull(200,shape=4,scale=1) #minimization: myfun<-function(theta,x) { return(-1*((length(x)*log(theta)+(theta-1)*sum(log(x))-sum(x^theta)))) } nlm(myfun,p=0.1,hessian=TRUE,x=mydata) ######################################## myfun<-function(theta,x) { return((length(x)*log(theta)+(theta-1)*sum(log(x))-sum(x^theta))) } result<-optimize(myfun,interval=c(0,20),x=mydata,max=T) loglik <- function(theta) (length(mydata)*log(theta)+(theta-1)*sum(log(mydata))-sum(mydata^theta)) result<-maxNR(loglik,start=0.1) result<-maxBFGS(loglik,start=0.1) result<-maxBHHH(loglik,start=0.1) repetition<-2000 samplesize<-500 out<-numeric(repetition) for(i in 1:repetition) { mydata<-rweibull(samplesize,shape=4,scale=1) #generate weibull rv loglik <- function(theta) (length(mydata)*log(theta)+(theta-1)*sum(log(mydata))-sum(mydata^theta)) #log likelihood out[i]<-maxBFGS(loglik,start=3)$estimate #estimate of the shape parameter } out<-sqrt(samplesize)*(out-4) #asym. distri. of MLE hist(out) ########################################### #simu. and esti. right-truncated normal tp<-1 # truncation point mu<-0 sigma<-1 repetition<-2000 samplesize<-500 out<-numeric(repetition) for(i in 1:repetition) { mydata<-rtnorm(samplesize, mean=mu, sd=sigma, upper=tp) # gen. right truncated normal loglik <- function(theta) return(sum(log((1/sigma)*dnorm((mydata-theta)/sigma,mean=0,sd=1)*(pnorm((tp-theta)/sigma,mean=0,sd=1))^(-1)))) out[i]<-maxNR(loglik,start=-0.1)$estimate } out<-sqrt(samplesize)*(out-0) #asym. distri. of MLE without normalizing hist(out,br=50) ########################################### #simu. and esti. left-censored normal cp<- -1 # censoring point mu<-0 sigma<-1 repetition<-2000 samplesize<-500 out<-numeric(repetition) for(i in 1:repetition){ mydata<-rnorm(samplesize, mean=mu, sd=sigma) mydata[mydata <= cp] <- cp # gen. left-cencored normal y<-subset(mydata,mydata > cp) loglik <- function(theta) return(sum(log((1/sigma)*dnorm((y-theta)/sigma,mean=0,sd=1))) + (samplesize-length(y))*log(pnorm((cp-theta)/sigma,mean=0,sd=1))) out[i]<-maxBFGS(loglik,start=-0.1)$estimate } out<-sqrt(samplesize)*(out-0) #asym. distri. of MLE without normalizing hist(out,br=50) out[i]<-maxNR(loglik,start=-0.1)$estimate ################################################ ################################################ #linear regression #load package car and sandwich #generate and estimate a linear model y<-rnorm(100) x1<-rnorm(100) x2<-rnorm(100) result<-lm(y~x1+x2) #joint test of \beta_0=\beta_1=\beta_2 = 0 linear.hypothesis(result, hypothesis.matrix=diag(1,3,3), rhs=c(0,0,0), test="Chisq",vcov=NeweyWest(result,lag=4,prewhite=FALSE)) kernHAC(result,bw = bwAndrews, kernel = "Quadratic Spectral", sandwich = TRUE, prewhite=FALSE) ################################################# ################################################# #time series #load package urca, tseries, mydata<-arima.sim(n=500,list(ar=0.7,ma=c(0.5,0.3))) ts.plot(mydata) acf(mydata) #plot the autocorrelation function result<-acf(mydata) result<-ur.pp(mydata, type = "Z-tau", model = "constant", use.lag = 20) summary(result) #Phillips-Perron test result<-ur.kpss(mydata, type="tau", lags="short") summary(result) #KPSS test result<-ar(mydata,method="ols",order.max=1 ) #estimate AR(1) model summary(result) result$ar #display the estimated coefficeints Box.test(result$resid, lag = 10, type = "Ljung-Box") # Ljung-Box test result<-ar(mydata, aic = TRUE) #use aic to selec the order of ar model result$ar Box.test(result$resid, lag = 10, type = "Ljung-Box") # Ljung-Box test