ks=1; output file=junk reset; capt = 204; @ capt is the sample size @ load bbbb[capt+1,2] = 2004.txt; bbbb=bbbb[.,2]; bbbb = bbbb[1:capt,1]~bbbb[2:capt+1,1]; y = 100*ln(bbbb[.,2]./bbbb[.,1]); y=y[1:200]; capt = rows(y); nth = 6; @ nth is the number of params to be estimated@ mu1 = 1; mu2 = 0; phi1 = .1; sig = 1; p11 = 3.5; p22 = 1.5; th = mu1|mu2|phi1|sig|p11|p22;
nk = 2; @ nk is the first observation for which the likelihood will be evaluated @ n = 4; @ n is the dimension of the state vector @ captst = capt - nk +1; @ captst is the effective sample size @ skif = zeros(captst,n); @ skif is the matrix of filtered probs @ skis = zeros(captst,n); @ skis is the matrix of smoothed probs @
let hp[4,4]= 1.0000 0.0000 1.0000 0.0000 0.0000 1.0000 0.0000 1.0000 1.0000 1.0000 0.0000 0.0000 0.0000 0.0000 1.0000 1.0000 ;
fm = zeros(4,4);
proc ofn(th); @ this proc evaluates filter probs and likelihood @ local ncount,mu,phi,sig,pm,eta,iz,chsi,it,f,fit,fx,hw,fj, ij,ap,const,pq,hk,ihk;
@ Convert parameter vector to convenient form @ mu = th[1:2,1]; phi = 1 | -th[3]; sig = th[4,1]*ones(n,1); if ks==2; sig=sig; pm=th[5 6]; elseif ks==1; sig = sig^2; pm = (th[5]^2/(1+th[5]^2))|th[6]^2/(1+th[6]^2); endif;
const = phi .*. mu; const = const'*hp;
@ Convert data to AR resids @ eta = y[nk:capt,1]; eta = eta ~y[1:capt-1,1]; eta = ((eta*phi - const)^2)./sig';
eta = (1./sqrt(sig')).*exp(-eta/2);
@ Calculate ergodic probabilities @
fm[1,1]=pm[1];fm[2,1]=1-pm[1]; fm[1,3]=pm[1];fm[2,3]=1-pm[1]; fm[3,2]=1-pm[2];fm[4,2]=pm[2]; fm[3,4]=1-pm[2];fm[4,4]=pm[2]; ap = (eye(n)-fm)|ones(1,n); chsi = sumc((invpd(ap'*ap)')); chsi = maxc(chsi'|zeros(1,n));
@ Filter iteration @ f = 0; it = 1; do until it > captst; fx = chsi .* eta[it,.]'; fit = sumc(fx); skif[it,.] = fx'/fit; f = f + ln(fit); chsi = fm*fx/fit; it = it+1; endo; retp(-f); endp; /*=========================================================================*/ @ Next call the GAUSS numerical optimizer @ output off; library optmum pgraph; optset; {x,f,g,h} =optmum(&ofn,th); output file=junk on;
"";"";"MLE as parameterized for numerical optimization "; "Coefficients:";x'; "";"Value of log likelihood:";;-f;
"";"Vector is reparameterized to report final results as follows"; "Means for each state:";x[1:2,1]'; "Autoregressive coefficients:";x[3]'; x[4] = x[4]^2; "Variances:";x[4]; x[5 6] = (x[5]^2/(1+x[5]^2))|x[6]^2/(1+x[6]^2);
ks = 2; h = (hessp(&ofn,x)); va = eigrs(h); call ofn(x); if minc(eigrs(h)) <= 0; "Negative of Hessian is not positive definite"; "Either you have not found local maximum, or else estimates are up " "against boundary condition. In latter case, impose the restricted " "params rather than estimate them to calculate standard errors"; else; h = invpd(h); std = diag(h)^.5; "For vector of coefficients parameterized as follows,";x'; "the standard errors are";std'; endif;
@ Calculate smoothed probs if desired @ if ks == 2; skis[captst,.] = skif[captst,.]; it = 1; do until it == captst; if minc(skif[captst-it,.]') > 1.e-150; skis[captst-it,.] = skif[captst-it,.].* ((skis[captst-it+1,.]./(skif[captst-it,.]*fm'))*fm); else; @ adjust code so as not to divide by zero @ hk = skif[captst-it,.]*fm'; ihk = 1; do until ihk > n; if hk[1,ihk] > 1.e-150; hk[1,ihk] = skis[captst-it+1,ihk]/hk[1,ihk]; else; hk[1,ihk] = 0; endif; ihk = ihk + 1; endo; skis[captst-it,.] = skif[captst-it,.].*(hk*fm); endif; it = it+1; endo; endif;
graphset; xy(seqa(53.5,.25,rows(skis)),skif[.,2]+skif[.,4]);