confinit.cyclopsFit profiles the data likelihood to construct confidence intervals of arbitrary level. Usually it only makes sense to do this for variables that have not been regularized.

# S3 method for cyclopsFit
confint(
  object,
  parm,
  level = 0.95,
  overrideNoRegularization = FALSE,
  includePenalty = TRUE,
  rescale = FALSE,
  ...
)

Arguments

object

A fitted Cyclops model object

parm

A specification of which parameters require confidence intervals, either a vector of numbers of covariateId names

level

Numeric: confidence level required

overrideNoRegularization

Logical: Enable confidence interval estimation for regularized parameters

includePenalty

Logical: Include regularized covariate penalty in profile

rescale

Boolean: rescale coefficients for unnormalized covariate values

...

Additional argument(s) for methods

Value

A matrix with columns reporting lower and upper confidence limits for each parameter. These columns are labelled as (1-level) / 2 and 1 - (1 - level) / 2 in percent (by default 2.5 percent and 97.5 percent)

Examples

#Generate some simulated data:
sim <- simulateCyclopsData(nstrata = 1, nrows = 1000, ncovars = 2, eCovarsPerRow = 0.5, 
                           model = "poisson")
#> Sparseness = 75.9 %
cyclopsData <- convertToCyclopsData(sim$outcomes, sim$covariates, modelType = "pr", 
                                    addIntercept = TRUE)
#> Sorting covariates by covariateId and rowId

#Define the prior and control objects to use cross-validation for finding the 
#optimal hyperparameter:
prior <- createPrior("laplace", exclude = 0, useCrossValidation = TRUE)
control <- createControl(cvType = "auto", noiseLevel = "quiet")

#Fit the model
fit <- fitCyclopsModel(cyclopsData,prior = prior, control = control)  
#> Using cross-validation selector type byRow
#> Performing 10-fold cross-validation [seed = 1698789245] with data partitions of sizes 100 100 100 100 100 100 100 100 100 100
#> Using 1 thread(s)
#> Starting var = 0.241 (default)
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #1 Rep #1 pred log like = 558.834
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #2 Rep #1 pred log like = 629.538
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #3 Rep #1 pred log like = 578.237
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #4 Rep #1 pred log like = 557.823
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #5 Rep #1 pred log like = 639.232
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #6 Rep #1 pred log like = 660.216
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #7 Rep #1 pred log like = 493.902
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #8 Rep #1 pred log like = 647.66
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #9 Rep #1 pred log like = 873.862
#> Running at Laplace(2.88076) None  Grid-point #1 at 0.241 	Fold #10 Rep #1 pred log like = 578.915
#> AvgPred = 621.822 with stdev = 97.0547
#> Completed at 0.241
#> Next point at 2.41 with value 0 and continue = 1
#> search[ 0.241 ] = 621.822(97.0547)
#> 
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #1 Rep #1 pred log like = 558.859
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #2 Rep #1 pred log like = 629.492
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #3 Rep #1 pred log like = 578.231
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #4 Rep #1 pred log like = 557.829
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #5 Rep #1 pred log like = 639.243
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #6 Rep #1 pred log like = 660.233
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #7 Rep #1 pred log like = 493.917
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #8 Rep #1 pred log like = 647.677
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #9 Rep #1 pred log like = 873.817
#> Running at Laplace(0.910975) None  Grid-point #2 at 2.41 	Fold #10 Rep #1 pred log like = 578.913
#> AvgPred = 621.821 with stdev = 97.0402
#> Completed at 2.41
#> Next point at 0.0241 with value 0 and continue = 1
#> search[ 0.241 ] = 621.822(97.0547)
#> search[ 2.41 ] = 621.821(97.0402)
#> 
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #1 Rep #1 pred log like = 558.791
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #2 Rep #1 pred log like = 629.679
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #3 Rep #1 pred log like = 578.253
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #4 Rep #1 pred log like = 557.797
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #5 Rep #1 pred log like = 639.192
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #6 Rep #1 pred log like = 660.188
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #7 Rep #1 pred log like = 493.85
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #8 Rep #1 pred log like = 647.622
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #9 Rep #1 pred log like = 873.997
#> Running at Laplace(9.10975) None  Grid-point #3 at 0.0241 	Fold #10 Rep #1 pred log like = 578.914
#> AvgPred = 621.828 with stdev = 97.0986
#> Completed at 0.0241
#> Next point at 0.00241 with value 0 and continue = 1
#> search[ 0.0241 ] = 621.828(97.0986)
#> search[ 0.241 ] = 621.822(97.0547)
#> search[ 2.41 ] = 621.821(97.0402)
#> 
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #1 Rep #1 pred log like = 558.629
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #2 Rep #1 pred log like = 630.036
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #3 Rep #1 pred log like = 578.386
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #4 Rep #1 pred log like = 557.741
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #5 Rep #1 pred log like = 638.953
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #6 Rep #1 pred log like = 660.184
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #7 Rep #1 pred log like = 493.637
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #8 Rep #1 pred log like = 647.6
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #9 Rep #1 pred log like = 874.31
#> Running at Laplace(28.8076) None  Grid-point #4 at 0.00241 	Fold #10 Rep #1 pred log like = 578.792
#> AvgPred = 621.827 with stdev = 97.2197
#> Completed at 0.00241
#> Next point at 0.0010937 with value 621.827 and continue = 0
#> search[ 0.00241 ] = 621.827(97.2197)
#> search[ 0.0241 ] = 621.828(97.0986)
#> search[ 0.241 ] = 621.822(97.0547)
#> search[ 2.41 ] = 621.821(97.0402)
#> 
#> 
#> Maximum predicted log likelihood (621.827) estimated at:
#> 	0.0010937 (variance)
#> 	42.7628 (lambda)
#> 
#> Fitting model at optimal hyperparameter
#> Using prior: Laplace(42.7628) None 

#Find out what the optimal hyperparameter was:
getHyperParameter(fit)
#> [1] 0.0010937

#Extract the current log-likelihood, and coefficients
logLik(fit)
#> 'log Lik.' -2134.836 (df=3)
coef(fit)
#> (Intercept)           1           2 
#> -3.70113314  0.01674055  0.00000000 

#We can only retrieve the confidence interval for unregularized coefficients:
confint(fit, c(0))
#> Using 1 thread(s)
#>             covariate     2.5 %    97.5 % evaluations
#> (Intercept)         0 -3.729976 -3.673209          23