Verbeek 5ed. Modern Econometrics Using R

HOME | Next

Chapter 6. An Introduction to Maximum Likelihood

Figure 6.1

Sample loglikelihood function for N = 100 and N1 = 44.

library(ggplot2)

N <- 100  
N1 <- 44  

p_values <- seq(0.1, 0.9, by = 0.01)

# Calculate the log-likelihood
log_likelihood <- sapply(p_values, function(p) {
  N1 * log(p) + (N - N1) * log(1 - p)
})

# Create a data frame for plotting
plot_data <- data.frame(p = p_values, logL = log_likelihood)

ggplot(plot_data, aes(x = p, y = logL)) +
  geom_line() +
  theme_minimal() +
  labs(x = "p", y = "log L", 
       title = "Sample loglikelihood function for N = 100 and N1 = 44")

likelihood.normal.mu = function(mu, sig2=1, x) {
  # mu      mean of normal distribution for given sig
  # x       vector of data
  n = length(x)
  a1 = (2*pi*sig2)^-(n/2)
  a2 = -1/(2*sig2)
  y = (x-mu)^2
  ans = a1*exp(a2*sum(y))
  return(ans) 
}

# generate N(0,1) data
n = 100
set.seed(123)
x = rnorm(n, mean=0, sd=1)

# compute normal likelihood as function of mu
mu.vals = seq(-1,1, length.out=100)
like.vals = rep(0,length(mu.vals))
for (i in 1:length(like.vals)) {
  like.vals[i] = likelihood.normal.mu(mu.vals[i], sig2=1, x=x)
}

plot(mu.vals, like.vals, type="l", col="blue", lwd=2)
abline(v=0, col="red", lwd=2)

mean(x)
## [1] 0.09040591
# log-likelihood functions
#

# Bernoulli log-likelihood
# x = 0, 1
# f(x,theta) = (theta^x)*(1-theta)^(1-x)

loglike.Bernoulli = function(theta, x) {
  # theta   success probability parameter
  # x       vector of data
  n = length(x)
  ans = log(theta)*sum(x)+log(1-theta)*(n-sum(x))
  return(ans)
}

# plot Bernoulli log-likelihood
par(mfrow=c(1,2))
x1 = rep(0,5)
theta.vals = seq(0.1,0.99, length.out=10)
loglike.vals = loglike.Bernoulli(theta.vals, x1)
plot(theta.vals, loglike.vals, type="b", col="blue", lwd=2,
     main="Bernoulli log-likelihood for x=(0,0,0,0,0)")

x2 = rep(1,5)
like.vals = loglike.Bernoulli(theta.vals, x2)
plot(theta.vals, like.vals, type="b", col="blue", lwd=2,
     main="Bernoulli Likelihood for x=(1,1,1,1,1)")

par(mfrow=c(1,1))
# normal log-likelihood
loglike.normal.mu = function(mu, sig2=1, x) {
  # mu      mean of normal distribution for given sig
  # x       vector of data
  n = length(x)
  a1 = -(n/2)*log(2*pi)-(n/2)*log(sig2)
  a2 = -1/(2*sig2)
  y = (x-mu)^2
  ans = a1+a2*sum(y)
  return(ans) 
}

# generate N(0,1) data
n = 100
set.seed(123)
x = rnorm(n, mean=0, sd=1)

# compute normal likelihood as function of mu
mu.vals = seq(-1,1, length.out=100)
loglike.vals = rep(0,length(mu.vals))
for (i in 1:length(loglike.vals)) {
  loglike.vals[i] = loglike.normal.mu(mu.vals[i], sig2=1, x=x)
}

plot(mu.vals, loglike.vals, type="l", col="blue", lwd=2)
abline(v=0, col="red", lwd=2)

loglike.Bernoulli = function(p, n1, N) {
  # p: success probability parameter
  # n1: number of successes
  # N: total number of trials
  ans = log(p) * n1 + log(1 - p) * (N - n1)
  return(ans)
}

# plot Bernoulli log-likelihood
n1 = 56
N = 100  # Total number of trials is the sum of successes (n1) and failures (n2)
theta.vals = seq(0.4, 0.6, length.out=100)
loglike.vals = sapply(theta.vals, function(p) loglike.Bernoulli(p, n1, N))
plot(theta.vals, loglike.vals, type="l", col="blue",
     main="Bernoulli log-likelihood for n1=56, N=100")