\documentclass[12pt]{article}
\usepackage{amsmath, amssymb}
\usepackage{fullpage}

\begin{document}

\title{Master\'s Comprehensive Exam -- Statistical Inference}
\date{February 11, 2017}
\author{Name: \underline{\hspace{5cm}}}
\maketitle

\noindent\textbf{Instructions:} Solve seven out of the ten problems given below. Clearly indicate which 7 problems you would like to be graded. Please put your name on every sheet of paper you turn in, and submit questions in order.

\bigskip
\noindent Circle seven problems chosen: \quad 1 \quad 2 \quad 3 \quad 4 \quad 5 \quad 6 \quad 7 \quad 8 \quad 9 \quad 10

\bigskip

\begin{enumerate}
    \item Let $X_1, \dots, X_n$ be a random sample from a population with Bernoulli($p$) distribution.
    \begin{enumerate}
        \item Derive the distribution of $Y = \sum X_i$.
        \item Find $E(Y)$ using the definition of expectation.
        \item Find $\text{Var}(Y)$ using the moment generating function (mgf) of $Y$.
    \end{enumerate}

    \item A random variable $X$ is said to have a Pareto distribution with parameters $\alpha$ and $x_m$ if it has pdf
    \[ f(x) = \frac{\alpha x_m^{\alpha}}{x^{\alpha + 1}}, \quad x \geq x_m. \]
    \begin{enumerate}
        \item Verify that $f(x)$ is a pdf.
        \item Let $X_1, \dots, X_n$ be a random sample from the distribution above. Find the maximum likelihood estimator (MLE) for $\alpha$.
        \item Find the Cramer-Rao lower bound for estimating $\alpha$.
        \item Discuss the efficiency of the MLE.
    \end{enumerate}

    \item
    \begin{enumerate}
        \item The Extreme Value family of distributions is denoted by $\text{EV}(\gamma)$. The value of the parameter $\gamma$ determines the functional form of the cdf. Show that $\text{EV}(\gamma)$ is a continuous family in $\gamma$, in the sense that the cdf converges as $\gamma \to 0$.
        \item An expression of the Weibull distribution is
        \[ F(x) = 1 - e^{-(x/\lambda)^k}. \]
        Use this parameterization to find the MLE for the scale parameter $\lambda$.
    \end{enumerate}

    \item Let $X_1, \dots, X_n$ be a random sample from the $U(0,\theta)$ distribution.
    \begin{enumerate}
        \item Find an unbiased estimator for $\theta$ based on $Y_n = \max(X_1, \dots, X_n)$. Call this estimator $\hat{\theta}$.
        \item Find the asymptotic distribution of $\hat{\theta}$.
        \item Generate a small sample (exact) confidence interval for $\theta$ based on your answer in (a).
        \item Generate a large sample (approximate) confidence interval for $\theta$ based on your answer in (b).
    \end{enumerate}

    \item Let $X_1, \dots, X_n$ be a random sample from a normal distribution with mean $0$ and unknown variance $\sigma^2$.
    \begin{enumerate}
        \item Show that this distribution is a member of the regular exponential family.
        \item Find the UMVUE for $\sigma^2$.
        \item Determine the form of the uniformly most powerful (UMP) test of $H_0: \sigma^2 = \sigma_0^2$ versus $H_1: \sigma^2 \neq \sigma_0^2$.
        \item Is the test found in part (c) equivalent to the test usually used for hypotheses concerning variance?
    \end{enumerate}

    \item Let $X$ be a discrete random variable with the geometric distribution.
    \begin{enumerate}
        \item Summation and differentiation can be interchangeable if the series converges uniformly on every closed bounded subinterval. Prove this uniform convergence.
        \item Use part (a) to find $E(X)$.
    \end{enumerate}

    \item The random variable $X$ has pdf $f(x)$. One observation is obtained on the random variable $X$, and a test of $H_0$ versus $H_1$ needs to be constructed.
    \begin{enumerate}
        \item Find the UMP level-$\alpha$ test by answering the following questions:
        \begin{itemize}
            \item Identify the rejection region for the UMP test.
            \item Describe the form of this rejection region (i.e., increasing or decreasing, etc.).
            \item Clearly define the significance level $\alpha$.
            \item Derive the Type II error probability.
        \end{itemize}
    \end{enumerate}

    \item Consider a sample $X_1, \dots, X_n$ from a population with pdf
    \[ f(x;\theta,\lambda) = \frac{\theta}{\lambda^\theta} x^{\theta-1}, \quad 0 \leq x \leq \lambda, \]
    where both $\theta$ and $\lambda$ are unknown.
    \begin{enumerate}
        \item Find the maximum likelihood estimators for the unknown parameters.
        \item Find the likelihood ratio test of $H_0: \theta=\theta_0$ versus $H_1: \theta \neq \theta_0$.
    \end{enumerate}

    \item Let random variables $X_i$ satisfy
    \[ X_i = \mu_i + \epsilon_i, \quad i=1,\dots,n, \]
    where $\epsilon_i$ are independent $N(0,\sigma^2)$ random variables, $\mu_i$ are iid, and $\epsilon_i$ and $\mu_i$ are independent. Find the approximate mean and variance for $\bar{X}$.

    \item Suppose that $X_1, \dots, X_n$ are iid Poisson($\lambda$). Consider unbiased estimation of $e^{-2\lambda}$.
    \begin{enumerate}
        \item Construct an unbiased estimator $\delta(X)$ based on $X_1,\dots,X_n$.
        \item Apply the Rao-Blackwell technique to $\delta(X)$ to obtain the UMVUE.
        \item Derive the Cramer-Rao lower bound for an unbiased estimator of $e^{-2\lambda}$.
    \end{enumerate}
\end{enumerate}

\end{document}
