Skip to contents

Print the sensitivity metrics of a SensMLP object. This metrics are the mean sensitivity, the standard deviation of sensitivities and the mean of sensitivities square

Usage

# S3 method for summary.SensMLP
print(x, round_digits = NULL, boot.alpha = NULL, ...)

Arguments

x

summary.SensMLP object created by summary method of SensMLP object

round_digits

integer number of decimal places, default NULL

boot.alpha

float significance level to show statistical metrics. If NULL, boot.alpha inherits from x is used. Defaults to NULL.

...

additional parameters

References

Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.

Examples

## Load data -------------------------------------------------------------------
data("DAILY_DEMAND_TR")
fdata <- DAILY_DEMAND_TR

## Parameters of the NNET ------------------------------------------------------
hidden_neurons <- 5
iters <- 250
decay <- 0.1

################################################################################
#########################  REGRESSION NNET #####################################
################################################################################
## Regression dataframe --------------------------------------------------------
# Scale the data
fdata.Reg.tr <- fdata[,2:ncol(fdata)]
fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10
fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000

# Normalize the data for some models
preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale"))
nntrData <- predict(preProc, fdata.Reg.tr)

#' ## TRAIN nnet NNET --------------------------------------------------------
# Create a formula to train NNET
form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ")
form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ "))

set.seed(150)
nnetmod <- nnet::nnet(form,
                           data = nntrData,
                           linear.output = TRUE,
                           size = hidden_neurons,
                           decay = decay,
                           maxit = iters)
#> # weights:  21
#> initial  value 2487.870002 
#> iter  10 value 1587.516208
#> iter  20 value 1349.706741
#> iter  30 value 1333.940734
#> iter  40 value 1329.097060
#> iter  50 value 1326.518168
#> iter  60 value 1323.148574
#> iter  70 value 1322.378769
#> iter  80 value 1322.018091
#> final  value 1321.996301 
#> converged
# Try SensAnalysisMLP
sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE)
print(summary(sens))
#> Sensitivity analysis of 2-5-1 MLP network.
#> 
#> Sensitivity measures of each output:
#> $.outcome
#>           mean      std meanSensSQ
#> WD    2.709330 1.182700   2.956103
#> TEMP -1.520092 4.172399   4.439684
#> 
#> $.outcome
#>           mean      std meanSensSQ
#> WD    2.709330 1.182700   2.956103
#> TEMP -1.520092 4.172399   4.439684
#>