diff options
| author | Jeffrey B. Arnold <jeffrey.arnold@gmail.com> | 2012-08-20 03:25:58 -0400 |
|---|---|---|
| committer | Jeffrey B. Arnold <jeffrey.arnold@gmail.com> | 2012-08-20 03:25:58 -0400 |
| commit | dd48362999fbb799ff1361a8e2ab95d57b4e2eed (patch) | |
| tree | 9ae8111debfd8cb0499a83b14c20e4d986765b75 /tests/examplefiles/example.bug | |
| parent | 4c7c09d398e60f69fde2ccabad11f84d0b571112 (diff) | |
| download | pygments-dd48362999fbb799ff1361a8e2ab95d57b4e2eed.tar.gz | |
cleaning up BugsLexer, JagsLexer
Diffstat (limited to 'tests/examplefiles/example.bug')
| -rw-r--r-- | tests/examplefiles/example.bug | 47 |
1 files changed, 23 insertions, 24 deletions
diff --git a/tests/examplefiles/example.bug b/tests/examplefiles/example.bug index b5b2fe7f..9ccd531d 100644 --- a/tests/examplefiles/example.bug +++ b/tests/examplefiles/example.bug @@ -1,55 +1,54 @@ # Alligators: multinomial - logistic regression # http://www.openbugs.info/Examples/Aligators.html model { - - # PRIORS + # PRIORS alpha[1] <- 0; # zero contrast for baseline food for (k in 2 : K) { - alpha[k] ~ dnorm(0, 0.00001) # vague priors + alpha[k] ~ dnorm(0, 0.00001) # vague priors } # Loop around lakes: for (k in 1 : K){ - beta[1, k] <- 0 + beta[1, k] <- 0 } # corner-point contrast with first lake for (i in 2 : I) { - beta[i, 1] <- 0 ; # zero contrast for baseline food - for (k in 2 : K){ - beta[i, k] ~ dnorm(0, 0.00001) # vague priors - } + beta[i, 1] <- 0 ; # zero contrast for baseline food + for (k in 2 : K){ + beta[i, k] ~ dnorm(0, 0.00001) # vague priors + } } # Loop around sizes: for (k in 1 : K){ - gamma[1, k] <- 0 # corner-point contrast with first size + gamma[1, k] <- 0 # corner-point contrast with first size } for (j in 2 : J) { - gamma[j, 1] <- 0 ; # zero contrast for baseline food - for ( k in 2 : K){ - gamma[j, k] ~ dnorm(0, 0.00001) # vague priors - } + gamma[j, 1] <- 0 ; # zero contrast for baseline food + for ( k in 2 : K){ + gamma[j, k] ~ dnorm(0, 0.00001) # vague priors + } } # LIKELIHOOD for (i in 1 : I) { # loop around lakes - for (j in 1 : J) { # loop around sizes + for (j in 1 : J) { # loop around sizes - # Fit standard Poisson regressions relative to baseline - lambda[i, j] ~ dflat() # vague priors - for (k in 1 : K) { # loop around foods - X[i, j, k] ~ dpois(mu[i, j, k]) - log(mu[i, j, k]) <- lambda[i, j] + alpha[k] + beta[i, k] + gamma[j, k] - culmative.X[i, j, k] <- culmative(X[i, j, k], X[i, j, k]) - } - } + # Fit standard Poisson regressions relative to baseline + lambda[i, j] ~ dflat() # vague priors + for (k in 1 : K) { # loop around foods + X[i, j, k] ~ dpois(mu[i, j, k]) + log(mu[i, j, k]) <- lambda[i, j] + alpha[k] + beta[i, k] + gamma[j, k] + culmative.X[i, j, k] <- culmative(X[i, j, k], X[i, j, k]) + } + } } # TRANSFORM OUTPUT TO ENABLE COMPARISON # WITH AGRESTI'S RESULTS for (k in 1 : K) { # loop around foods for (i in 1 : I) { # loop around lakes - b[i, k] <- beta[i, k] - mean(beta[, k]); # sum to zero constraint + b[i, k] <- beta[i, k] - mean(beta[, k]); # sum to zero constraint } for (j in 1 : J) { # loop around sizes - g[j, k] <- gamma[j, k] - mean(gamma[, k]); # sum to zero constraint + g[j, k] <- gamma[j, k] - mean(gamma[, k]); # sum to zero constraint } } } |
