new; output file = ma_ml.out reset ; library optmum; N = 50; let beta0 = .5; beta1=3 ; sigma1 = 1; beta3=1 ; beta4=4 ; sigma2 = 2; rho=.8 ; x=seqa(1,1,N)/N.*rndn(N,1) ; /* x is the "regressor," could be anything, but if not centered around 0, the estimates will not be well identified. As for OLS you would like ``variation in the regressor" which here mean that we want to observe a good spread between 0s and 1s */ z=ones(N,1)+5*rndn(N,1) ; /* z is another regressor for linear relation */ Nsim =100 ; "Number of simulations:" Nsim ; /* create a matrix to hold the estimated values */ results = zeros(Nsim,3) ; /*starting values*/ let sv = 1 1 1; estim=0 ; do while estim < Nsim ; estim=estim+1 ; u = sigma1*rndn(N,1); epsilon=sqrt(1-rho^2)*sigma2*rndn(N,1) ; v=(rho/sigma2)*u+epsilon ; /*generate an a probit selection*/ z0=beta0+beta1*x+u ; z1=(z0.>0) ; /*generate a linear relation */ y=beta3+beta4*z+v ; __output=0; {b_ml,f,g,retcode} = optmum(&logl,sv); __output=1 ; results[estim,.]=b_ml' ; endo ; " " ; /*results of last estimation*/ "estimated parameters:" "beta3:" b_ml[1] ; "true beta3 is:" beta3; "beta4:" b_ml[2] ; "true beta4:" beta4 ; "sigma2:" b_ml[3] ; "true sigma2:" sigma2 ; "" ; "calculate Hessian"; h=hessp(&logl,b_ml); /*this is minus the hessian because we minimize -L*/ "variance matrix"; vmat=inv(h); vmat; "" ; "beta4-hat, t-value):" b_ml[1] b_ml[1]/sqrt(vmat[1,1]); "beta3-hat, t-value:" b_ml[2] b_ml[2]/sqrt(vmat[2,2]); " " ; /* averages and std dev of estimates */ "mean estimates" meanc(results); "std of estimates" stdc(results) ; " " ; proc logl(b); local r,L,muvec,sigma2,beta0,beta1,beta2,beta3,beta4, i; /* You will need beta0 and beta1 to do the ML controlling for selection this version here only show how OLS/ML is biased without controlling beta0=b[1] ; beta1=b[2] ; muvec=beta0+beta1*x ; */ beta3=b[1] ; beta4=b[2] ; sigma2=b[3] ; L=0 ; i=0 ; do while i