
\documentclass{article}
\usepackage{amsmath}
\usepackage{amssymb}

\title{Master’s Comprehensive Exam \\ Statistical Inference}
\date{February 9, 2019}

\begin{document}
\maketitle

\section*{Instructions}
Solve six out of the nine problems given below. Clearly indicate which 6 problems you would like to be graded on the third page. Please number your pages and start each problem at the top of a new sheet. Submit problems in order. Write your name on the back of the sheets of paper with your work. You must show all your work to receive any credit.

\section*{Problems}

\begin{enumerate}
    \item Suppose $X$ is a random sample from the normal distribution with mean $\mu$ and variance $\sigma^2$.
    \begin{enumerate}
        \item Transform the variable $X$ to achieve a $Z$ distribution, and verify your answer.
        \item Call the variable you created in part (a) above $Y$. Derive the distribution of $Y^2$.
        \item Find the distribution of $\sum (X_i - \bar{X})^2$.
        \item Find $E\left[ (X_i - \bar{X})^2 \right]$ in two ways, verifying that your answers are equivalent.
    \end{enumerate}

    \item Let $X_1, \dots, X_n$ be i.i.d. Bernoulli$(p)$ random variables.
    \begin{enumerate}
        \item Find the MLE for $p$, denoted $\hat{p}$.
        \item Find the exact distribution of $\hat{p}$. Show work.
        \item What is the asymptotic distribution of $\hat{p}$?
        \item Find the sufficient statistic for $p$ using
        \begin{itemize}
            \item[i)] The factorization theorem
            \item[ii)] The definition of sufficiency
        \end{itemize}
    \end{enumerate}

    \item Let $X_1, \dots, X_n$ be a random sample from the Poisson$(\lambda)$ distribution.
    \begin{enumerate}
        \item Find the UMVUE for $\lambda$.
        \item Show that your answer is statistically independent of $\bar{X}$.
    \end{enumerate}

    \item Let $X_1, \dots, X_n$ be a random sample from the $\text{Gamma}(\theta, 1)$ distribution, with pdf
    \[
    f(x;\theta) = \frac{1}{\Gamma(\theta)} x^{\theta-1} e^{-x}, \quad x > 0, \theta > 0.
    \]
    \begin{enumerate}
        \item Find an unbiased estimator for $\frac{1}{\theta}$ based on $\bar{X}$. Call this estimator $W$.
        \item Find the asymptotic distribution of $W$. That is, find the distribution of $\sqrt{n}(W - \frac{1}{\theta})$.
        \item Generate a small sample (exact) confidence interval for $\theta$ based on your answer in (a).
        \item Generate a large sample (approximate) confidence interval for $\theta$ based on your answer in (b).
    \end{enumerate}

    \item Let $X_n$ be a sequence of i.i.d. random variables. Assume that $\mathbb{E}[X_i]$ is finite. Show that $\bar{X}_n$ converges to $\mathbb{E}[X_i]$ in probability.

    \item Let $X_1, X_2, \dots, X_n$ be independent and identically distributed with
    \[
    f(x) =
    \begin{cases}
    2x, & 0 \leq x \leq 1, \\
    0, & \text{otherwise}.
    \end{cases}
    \]
    Consider unbiased estimation of $\theta = \mathbb{E}[X]$. Use $Y = \min(X_1, X_2, \dots, X_n)$.
    \begin{enumerate}
        \item Find the UMVUE of $\theta$.
        \item Derive a lower bound for the variance of any unbiased estimator of $\theta$.
    \end{enumerate}

    \item Let $X_1, \dots, X_n$ be a random sample from $\text{Bernoulli}(\theta)$. Determine the level-$\alpha$ likelihood ratio test for $H_0: \theta = \theta_0$ versus $H_1: \theta \neq \theta_0$.

    \item Suppose $X$ is one observation from a population with pdf
    \[
    f(x) =
    \begin{cases}
    2x, & 0 \leq x \leq 1, \\
    0, & \text{otherwise}.
    \end{cases}
    \]
    \begin{enumerate}
        \item For testing $H_0: \theta = 0$ versus $H_1: \theta = 1$, find the size and sketch the power function of the test that rejects $H_0$ if $X > c$.
        \item Find the most powerful level $\alpha$ test of $H_0: \theta = 0$ versus $H_1: \theta = 1$.
        \item Is there a UMP test of $H_0: \theta = 0$ versus $H_1: \theta = 1$? Prove or disprove.
    \end{enumerate}

    \item Let $X \sim \text{Gamma}(\alpha, \beta)$.
    Define the random variable $Y = \frac{1}{X}$. Then $Y$ has an inverse gamma distribution and we write $Y \sim \text{Inverse-Gamma}(\alpha, \beta)$. Show that $Y$ has the inverse gamma pdf.
    
    Let $X$ with $\sigma$ known and $\mu$ unknown. Show that the inverse gamma family is conjugate for $\sigma^2$. In particular, find the posterior distribution corresponding to the prior, $p(\sigma^2) \propto (\sigma^2)^{-(\alpha+1)} e^{-\beta/\sigma^2}$.
\end{enumerate}

\end{document}
