tlverse / sl3

@@ -23,7 +23,7 @@
Loading
23 23
#'    each bin has the same number of observations, use "equal_mass" (based on
24 24
#'    \code{\link[ggplot2]{cut_number}}).
25 25
#'   }
26 -
#'   \item{\code{n_bins = c(5, 10)}}{Only used if \code{type} is set to
26 +
#'   \item{\code{n_bins = c(3, 5)}}{Only used if \code{type} is set to
27 27
#'    \code{"equal_range"} or \code{"equal_mass"}. This \code{numeric} value
28 28
#'    indicates the number of bins that the support of the outcome variable is
29 29
#'    to be divided into.
@@ -32,6 +32,15 @@
Loading
32 32
#'    sequence of values of the regulariztion parameter of the Lasso regression,
33 33
#'    to be passed to to \code{\link[hal9001]{fit_hal}}.
34 34
#'   }
35 +
#'   \item{\code{trim_dens = 1/sqrt(n)}}{A \code{numeric} giving the minimum
36 +
#'     allowed value of the resultant density predictions. Any predicted
37 +
#'     density values below this tolerance threshold are set to the indicated
38 +
#'     minimum. The default is to use the inverse of the square root of the
39 +
#'     sample size of the prediction set, i.e., 1/sqrt(n); another notable
40 +
#'     choice is 1/sqrt(n)/log(n). If there are observations in the prediction
41 +
#'     set with values of \code{new_A} outside of the support of the training
42 +
#'     set, their predictions are similarly truncated.
43 +
#'   }
35 44
#'   \item{\code{...}}{ Other parameters passed directly to
36 45
#'    \code{\link[haldensify]{haldensify}}. See its documentation for details.
37 46
#'   }
@@ -41,9 +50,10 @@
Loading
41 50
  classname = "Lrnr_haldensify", inherit = Lrnr_base,
42 51
  portable = TRUE, class = TRUE,
43 52
  public = list(
44 -
    initialize = function(grid_type = c("equal_range", "equal_mass"),
45 -
                          n_bins = c(5, 10),
53 +
    initialize = function(grid_type = "equal_range",
54 +
                          n_bins = c(3, 5),
46 55
                          lambda_seq = exp(seq(-1, -13, length = 1000L)),
56 +
                          trim_dens = NULL,
47 57
                          ...) {
48 58
      params <- args_to_list()
49 59
      super$initialize(params = params, ...)
@@ -77,25 +87,42 @@
Loading
77 87
        args$family <- outcome_type$glm_family(return_object = TRUE)$family
78 88
      }
79 89
90 +
      # extract input data
80 91
      args$W <- as.matrix(task$X)
81 92
      args$A <- as.numeric(outcome_type$format(task$Y))
82 -
      args$use_future <- FALSE
83 93
94 +
      # handle weights
84 95
      if (task$has_node("weights")) {
85 96
        args$wts <- task$weights
86 97
      }
87 98
99 +
      # extract offset
88 100
      if (task$has_node("offset")) {
89 101
        args$offset <- task$offset
90 102
      }
91 103
92 -
      fit_object <- call_with_args(haldensify::haldensify, args)
104 +
      # fit haldensify conditional density estimator
105 +
      fit_object <- call_with_args(
106 +
        haldensify::haldensify, args,
107 +
        other_valid = c("max_degree", "smoothness_orders", "num_knots",
108 +
                        "adaptive_smoothing", "reduce_basis", "use_min"),
109 +
        ignore = c("cv_select", "weights", "family", "fit_type", "trim_dens")
110 +
      )
93 111
      return(fit_object)
94 112
    },
95 113
    .predict = function(task = NULL) {
114 +
      # set density trimming to haldensify::predict default if NULL
115 +
      if (is.null(self$params[["trim_dens"]])) {
116 +
        trim_dens <- 1 / sqrt(task$nrow)
117 +
      } else {
118 +
        trim_dens <- self$params[["trim_dens"]]
119 +
      }
120 +
121 +
      # predict density
96 122
      predictions <- predict(self$fit_object,
97 123
        new_A = as.numeric(task$Y),
98 -
        new_W = as.matrix(task$X)
124 +
        new_W = as.matrix(task$X),
125 +
        trim_dens = trim_dens
99 126
      )
100 127
      return(predictions)
101 128
    },

@@ -6,7 +6,6 @@
Loading
6 6
#' @docType class
7 7
#'
8 8
#' @importFrom R6 R6Class
9 -
#' @importFrom arm bayesglm
10 9
#'
11 10
#' @export
12 11
#'
Files Coverage
R 75.78%
Project Totals (91 files) 75.78%
1
codecov:
2
  token: 0e172078-76a1-4b94-bd7f-2e10d0647b3d
3

4
comment: false
5

6
ignore:
7
  - "R/utils.R"
8

9
coverage:
10
  status:
11
    project:
12
      default:
13
        against: parent
14
        target: auto
15
        threshold: 1%
16
        branches:
17
          - master
18
        if_no_uploads: error
19
        if_not_found: success
20
        if_ci_failed: error
21
        only_pulls: false
22
        flags:
23
          - integration
24
        paths:
25
          - folder
26

27
    patch:
28
      default:
29
        against: parent
30
        target: 80%
31
        branches: null
32
        if_no_uploads: success
33
        if_not_found: success
34
        if_ci_failed: error
35
        only_pulls: false
36
        flags:
37
          - integration
38
        paths:
39
          - folder
40

41
    changes:
42
      default:
43
        against: parent
44
        branches: null
45
        if_no_uploads: error
46
        if_not_found: success
47
        if_ci_failed: error
48
        only_pulls: false
49
        flags:
50
          - integration
51
        paths:
52
          - folder
53

54
  flags:
55
    integration:
56
      branches:
57
        - master
58
      ignore:
59
        - app/ui
60

61
  ignore: # files and folders for processing
62
    - tests/*
63

64
  fixes:
65
    - "old_path::new_path"
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading