rm(list = ls()) # delete everything from the memory # simulate random variable x<- rnorm(500,0,2) # GMM -------------------------- mean(x) var(x) # Loglikelihood --------------------------- loglik <- function(a, mu, sigma2){ sum( -log( sqrt(2*pi*sigma2) ) - (a-mu)^2/(2*sigma2)) } loglik <- function(a, mu, sigma2){ sum( log( dnorm(a, mu, sqrt(sigma2)) ) ) } loglik(x, -5, var(x)) loglik(x, -1, var(x)) loglik(x, 1, var(x)) loglik(x, 5, var(x)) play <- seq(-10,10,by=0.1) ll <- rep(NA,length(play)) for (i in 1:length(play)){ ll[i] <- loglik(x, play[i], var(x)) } plot(play,ll) likelihood <- function(a, mu, sigma2){ prod( dnorm(a, mu, sqrt(sigma2)) ) } likelihood(x,0,var(x)) # MLE ------------------------- mle <- function(par, a){ mu <- par[1] s2 <- exp(par[2]) ll <- loglik(a, mu, s2) #print( round( c(ll, mu,s2), 3) ) -ll } fit.mle <- optim(c(0,1), mle, a=x, method="BFGS") fit.mle fit.mle$par exp(fit.mle$par[2]) # Regression Estimation ----------------------------- rm(list = ls()) n <- 500 x <- rnorm(n,0,10) e <- rnorm(n, 0, 9) y <- 2 + 3*x + e lm(y~x) data <- data.frame(x,y) rm(n,e,x,y) min.square <- function(par, inde, dep){ X <- cbind(1,inde) y <- dep b <- par out <- sum( (y - X%*%b)^2 ) out } fit1 <- optim(par=c(1,1), min.square, inde=data$x, dep=data$y, method="BFGS") fit1$par ols <- function(inde, dep){ X <- cbind(1,inde) y <- dep out <- solve(t(X)%*%X)%*%t(X)%*%y out } fit2 <- ols(inde=data$x, dep=data$y) fit2 mle <- function(par, inde, dep){ X <- cbind(1,inde) y <- dep beta <- c(par[1], par[2]) sigma2 <-exp(par[3]) ll <- sum( -log( sqrt(2*pi*sigma2) ) - (y-X%*%beta)^2/(2*sigma2)) -ll } fit3 <- optim(par=c(1,1,1), mle, inde=data$x, dep=data$y, method="BFGS",hessian=T) fit3 sqrt(diag(solve(fit3$hessian)))