<body><script type="text/javascript"> function setAttributeOnload(object, attribute, val) { if(window.addEventListener) { window.addEventListener('load', function(){ object[attribute] = val; }, false); } else { window.attachEvent('onload', function(){ object[attribute] = val; }); } } </script> <div id="navbar-iframe-container"></div> <script type="text/javascript" src="https://apis.google.com/js/platform.js"></script> <script type="text/javascript"> gapi.load("gapi.iframes:gapi.iframes.style.bubble", function() { if (gapi.iframes && gapi.iframes.getContext) { gapi.iframes.getContext().openChild({ url: 'https://www.blogger.com/navbar/9732461?origin\x3dhttps://jaszz.blogspot.com', where: document.getElementById("navbar-iframe-container"), id: "navbar-iframe" }); } }); </script>
Thursday, May 08, 2008
~ 1:29 PM ~
Programming

To let you have a feel what i've been doing for the past 3 weeks in programming..here's a program that i've written so far....and it's not yet complete..coz something's wrong at the end......=(



/*Estimation of a simple linear regression model using Gibbs Sampling
***** Error variance (sighat2) unknown*****/

library pgraph;
graphset;
gausset;

fonts("simplex complex microb simgrma");

pqgwin many;

seed = 123456.0;
print "seed =" seed;

numb = 200;

lsig = 0.1;
usig = 0.8;
nums = ((usig-lsig)/0.01)+1;
cbeta2 = zeros(numb,1);
csig = zeros (nums, 1);

/************************1. Generating data = y x1 x2 x3 x4 x5**********************************/

nobs = 1000;

y = rndns (nobs, 1, seed);
x1 = ones (nobs,1);
x2 = rndns (nobs, 1,seed);
x3 = rndns (nobs, 1, seed);
x4 = rndns (nobs, 1, seed);
x5 = rndns (nobs, 1, seed);
u = rndns (nobs, 1, seed).*.0.3;

y = 5.0*x1 + 0.2*x2 + 0.3*x3 + 0.4*x4 + 0.5*x5 + u;

/****************Regressor matrix used in the estimation****************/
x = x1~x2~x3~x4~x5;

__output = 0;
_olsres = 1;

{z1,z2,bdt,z4,z5,stderrph,sighat,z8,z9,resid,z11} = ols(0,x,y);

/******Specification of sample statistics*****/
df = nobs - 5;
invxx = inv(x'x);
x22 = invxx[2,2];
x33 = invxx[3,3];
x44 = invxx[4,4];
x55 = invxx[5,5];
b2= bdt[2];
b3=bdt[3];
b4=bdt[4];
b5=bdt[5];

/***2. Specification of the exact marginal posterior UNIVARIATE Student t density
******** for beta 2 and 3 based on the noninformative Jeffreys prior
******** with df = nobs-5 degrees of freedom
*****/
proc margb2 (beta2);
local t,intcon,studt;

t = (beta2 - b2)/(sighat*sqrt(x22));
intcon = gamma((df+1)/2)/((sighat*sqrt(x22))*sqrt(df*pi)*gamma(df/2));
studt = intcon*(1 + (1/df)*t.^2).^(-(df+1)/2);

retp(studt);
endp;

beta2 = seqa(-0.1,0.01, numb);
exactb2 = margb2(beta2);
/*xy(beta2,exactb2);*/

proc margb3 (beta3);
local t1, intcon1, studt1;

t1 = (beta3-b3)/(sighat*sqrt(x33));
intcon1 = gamma((df+1)/2)/((sighat*sqrt(x33))*sqrt(df*pi)*gamma(df/2));
studt1 = intcon1*(1 + (1/df)*t1.^2).^(-(df+1)/2);

retp (studt1);
endp;

beta3 = seqa(-0.8,0.01,numb);
exactb3 = margb3(beta3);
/*xy(beta3,exactb3);*/

/***** 3.
**** Specification of the exact marginal posterior
**** INverted Gamma density for sigma based on the noninformative Jeffreys prior
**** with df = nobs-5 degrees of freedom
****/
proc margsig(sig);
local intcon, invgamma;

intcon = (2/(gamma(df/2)))*(df*(sighat.^2)/2)^(df/2);
invgamma = intcon * (1/sig.^(1+df)).*exp(-(df*sighat.^2)/(2*sig.^2));

retp (invgamma);
endp;

sig = seqa(lsig,0.01,nums);
exactsig = margsig(sig);
/*xy(sig,exactsig);*/

/***** 4.
****** Generation of values of beta and sigma via Gibbs sampling
*******/

/***burn in sample = 100; further 100 iterations****/

bur = 100;
repl = bur +100;
it = 10;

print " ";
print "Initial No. of Gibbs values = " repl;
print "No. of iterations per repl. = " it;
print " ";

bev = zeros (201,5);
sigv = zeros (201,1);

/*****4a Starting Values for Gibbs Algorithm, burn in sample = 100; further 100
iterations***/

sigv[1] = 0.2;
stnbe = rndns(200,5,seed);
meanb = (invxx)*x'y;
varb = (sigv[1])^2 * inv(x'x);
bgen = chol(varb)*stnbe[1,.]'+ meanb;
bev[2,.]=bgen';

a = (y-x*bev[2,.]')'(y-x*bev[2,.]');
zvec = rndns (200,1,seed);
chisq = zvec'zvec;
sigv[2]=sqrt(a/chisq);

j = 2;
do while j<=repl;
varb = (sigv[j])^2 * inv(x'x);
bgen = chol(varb)*stnbe[j,.]'+ meanb;
bev[j+1,.]=bgen';
a = (y-x*bev[j+1,.]')'(y-x*bev[j+1,.]');
sigv[j+1]=sqrt(a/chisq);

j = j+1;
endo;

print "bev" bev;
print "";
print "sigv" sigv;


/*******Formation of Beta2 and Beta3 conditional (analytically normalized)****/
meanb2 = meanb[2];
meanb3 = meanb[3];

j=102;
do while j<=repl;

varb = (sigv[j])^2 * inv(x'x);
varb2 = varb[2,2];
varb3 = varb[3,3];

proc condb2(beta2);
local z, cpdf;


z = (beta2 - b2)./sqrt(varb2);
cpdf = ((2*pi*varb2)^(-1/2))*exp(-0.5*(z.^2));

retp (cpdf);
endp;

beta2 = seqa(-0.1,0.01, numb);
cpdfbeta2 = condb2(beta2);

cbeta2 = cbeta2 + cpdfbeta2;

j= j+1;

endo;

densb2 = (1/100)*cbeta2;
xy(beta2, densb2~exactb2);






*
*
*
I LOVE GAUSS!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

About JaSzZ~



Jason Ng
Monash University (Australia)
2nd Year PhD (Financial Econometrics)
23 years old

Speak



Bloggers Alike

| Jean | Yihao | Jervis | Regina | Yunxuan | Benjamin | Ivy | Ida | Fawn | Jane | Jasmine | Gary | Jon Chua | Woot Woot | Eleanor | Justin Teh | Jeremiah | Yeong Ru | Sandy|
Journey

; December 2004; January 2005; February 2005; March 2005; April 2005; May 2005; June 2005; July 2005; August 2005; September 2005; October 2005; November 2005; December 2005; January 2006; February 2006; March 2006; April 2006; May 2006; June 2006; July 2006; August 2006; September 2006; October 2006; November 2006; December 2006; January 2007; February 2007; March 2007; April 2007; May 2007; June 2007; July 2007; August 2007; September 2007; October 2007; November 2007; December 2007; January 2008; February 2008; March 2008; April 2008; May 2008; June 2008; July 2008; August 2008; September 2008; October 2008; November 2008; December 2008; January 2009; February 2009; March 2009; April 2009; May 2009


credits


; j-wen
; deviantart
; brushes
; blogskins
; blogger