Skip to content

Commit

Permalink
Merge branch 'issue-17' into develop
Browse files Browse the repository at this point in the history
* issue-17:
  Increment version number to 1.4.4.9032
  Translated rest of BRL tutorial (#17)
  • Loading branch information
wleoncio committed Nov 6, 2023
2 parents 7a69ad7 + fae59e0 commit 3a3d287
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 64 deletions.
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: EMJMCMC
Type: Package
Title: Flexible Bayesian Nonlinear Model Configuration
Version: 1.4.4.9031
Version: 1.4.4.9032
Date: 2022-03-06
Authors@R:
c(
Expand Down
128 changes: 65 additions & 63 deletions tests/testthat/test-BLR-tutorial.R
Original file line number Diff line number Diff line change
Expand Up @@ -82,68 +82,70 @@ test_that("Results for the Jeffrey's prior are sensible", {
expect_length(res4J, 4L)
})

# # simulate Gaussian responses from a model with two-way interactions
# # and an age effect which is an extension of S.4 of the paper
# Xp = data.frame(X)
# Xp$age = rpois(sample_size,lambda = 34)
# Xp$Y=rnorm(n = sample_size,mean = 1+0.7*(Xp$X1*Xp$X4) +
# 0.89*(Xp$X8*Xp$X11)+1.43*(Xp$X5*Xp$X9) + 2*Xp$age, sd = 1)


# teid = sample.int(size =100,n = sample_size,replace = F)
# test = Xp[teid,]
# train = Xp[-teid,]



# # specify the initial formula
# formula1 = as.formula(paste("Y ~ 1 +",
# paste0(colnames(test)[-c(51,52)],collapse = "+")))
# # specify the link function
# g = function(x) x
# # specify the parameters of the custom estimator function
# estimator.args = list(data = train, n = dim(train)[1],
# m =stri_count_fixed(as.character(formula1)[3],"+"),k.max = 15)
# # specify the parameters of gmjmcmc algorithm
# gmjmcmc.params = list(allow_offsprings=1,mutation_rate = 250,
# last.mutation=10000, max.tree.size = 5, Nvars.max =15,
# p.allow.replace=0.9,p.allow.tree=0.01,p.nor=0.01,p.and = 0.9)
# # specify some advenced parameters of mjmcmc
# mjmcmc.params = list(max.N.glob=10, min.N.glob=5, max.N=3, min.N=1,
# printable = F)
# # run the inference of BLR with a non-binary covariate and predicions
# res.alt = pinferunemjmcmc(n.cores = 30, report.level = 0.2,
# num.mod.best = 100,simplify = T,predict = T,test.data = test,
# link.function = g,
# runemjmcmc.params = list(formula = formula1,latnames = c("I(age)"),
# data = train,estimator = estimate.logic.lm.jef,
# estimator.args =estimator.args,
# recalc_margin = 249, save.beta = T,interact = T,outgraphs=F,
# interact.param = gmjmcmc.params,
# n.models = 10000,unique = F,max.cpu = 4,max.cpu.glob = 4,
# create.table = F,create.hash = T,pseudo.paral = T,burn.in = 100,
# print.freq = 1000,
# advanced.param = mjmcmc.params))


# print(base::rbind(c("expressions","probabilities"),res.alt$feat.stat))



# print(sqrt(mean((res.alt$predictions-test$Y)^2)))
# print(mean(abs((res.alt$predictions-test$Y))))


# library(HDeconometrics)
# ridge = ic.glmnet(x = train[,-51],y=train$Y,family = "gaussian",
# alpha = 0)
# predict.ridge = predict(ridge$glmnet,newx = as.matrix(test[,-51]),
# type = "response")[,which(ridge$glmnet$lambda == ridge$lambda)]
# print(sqrt(mean((predict.ridge-test$Y)^2)))
# print(mean(abs((predict.ridge-test$Y))))
# simulate Gaussian responses from a model with two-way interactions
# and an age effect which is an extension of S.4 of the paper
Xp <- data.frame(X)
Xp$age <- rpois(sample_size,lambda = 34)
Xp$Y <- rnorm(
n = sample_size,
mean = 1 + 0.7 * (Xp$X1 * Xp$X4) + 0.89 * (Xp$X8 * Xp$X11) + 1.43 * (Xp$X5 * Xp$X9) + 2 * Xp$age,
sd = 1
)
teid <- sample.int(size = 100, n = sample_size, replace = FALSE)
test <- Xp[teid, ]
train <- Xp[-teid, ]

# specify the initial formula
formula1 = as.formula(
paste("Y ~ 1 +", paste0(colnames(test)[-c(51, 52)], collapse = "+"))
)
# specify the link function
g = function(x) x
# specify the parameters of the custom estimator function
estimator.args <- list(
data = train,
n = dim(train)[1],
m = stri_count_fixed(as.character(formula1)[3],"+"),
k.max = 15
)
# specify the parameters of gmjmcmc algorithm
gmjmcmc.params <- list(
allow_offsprings = 1, mutation_rate = 250, last.mutation = 10000,
max.tree.size = 5, Nvars.max = 15, p.allow.replace = 0.9, p.allow.tree = 0.01,
p.nor = 0.01, p.and = 0.9
)
# specify some advenced parameters of mjmcmc
mjmcmc.params <- list(
max.N.glob = 10, min.N.glob = 5, max.N = 3, min.N = 1, printable = FALSE
)
# run the inference of BLR with a non-binary covariate and predicions
res.alt <- pinferunemjmcmc(
n.cores = 30, report.level = 0.2, num.mod.best = 100, simplify = TRUE,
predict = TRUE, test.data = test, link.function = g,
runemjmcmc.params = list(
formula = formula1,latnames = c("I(age)"), data = train,
estimator = estimate.logic.lm.jef, estimator.args = estimator.args,
recalc_margin = 249, save.beta = TRUE, interact = TRUE, outgraphs = FALSE,
interact.param = gmjmcmc.params, n.models = 10000, unique = FALSE,
max.cpu = 4, max.cpu.glob = 4, create.table = FALSE, create.hash = TRUE,
pseudo.paral = TRUE, burn.in = 100, print.freq = 1000,
advanced.param = mjmcmc.params
)
)

# tmean = 1+2*test$age+0.7*(test$X1*test$X4) +
# 0.89*(test$X8*test$X11)+1.43*(test$X5*test$X9)
# print(sqrt(mean((tmean -test$Y)^2)))
# print(mean(abs((tmean -test$Y))))
test_that("Output with non-binary convariance is correct", {
expect_equal(
res.alt$feat.stat[, 1],
c("I(age)", "I(((X5))&((X9)))", "I(((X8))&((X11)))", "I(X4)", "I(X1)")
)
expect_equal(
res.alt$feat.stat[, 2],
c("1", "0.999999999999968", "0.999999999999968", "0.999982534893339", "0.999948879594117")
)
expect_equal(
sqrt(mean((res.alt$predictions-test$Y)^2)), 1.030494, tolerance = 1e-6
)
tmean <- 1 + 2 * test$age + 0.7 * (test$X1 * test$X4) + 0.89 * (test$X8 * test$X11) + 1.43 * (test$X5 * test$X9)
expect_equal(sqrt(mean((tmean -test$Y)^2)), .9839559, tolerance = 1e-6)
expect_equal(mean(abs((tmean -test$Y))), .8127523, tolerance = 1e-6)
})

0 comments on commit 3a3d287

Please sign in to comment.