QuantGen / BGData
Showing 1 of 4 files from the diff.
Other files ignored by Codecov
NEWS.md has changed.
man/getG.Rd has changed.

@@ -2,7 +2,7 @@
Loading
2 2
    formatC(x, width = as.integer(log10(total) + 1L), format = "d", flag = "0")
3 3
}
4 4
5 -
getG <- function(X, center = TRUE, scale = TRUE, scaleG = TRUE, minVar = 1e-05, i = seq_len(nrow(X)), j = seq_len(ncol(X)), i2 = NULL, chunkSize = 5000L, nCores = getOption("mc.cores", 2L), verbose = FALSE) {
5 +
getG <- function(X, center = TRUE, scale = TRUE, impute = TRUE, scaleG = TRUE, minVar = 1e-05, i = seq_len(nrow(X)), j = seq_len(ncol(X)), i2 = NULL, chunkSize = 5000L, nCores = getOption("mc.cores", 2L), verbose = FALSE) {
6 6
7 7
    # compute XY' rather than XX'
8 8
    hasY <- !is.null(i2)
@@ -116,9 +116,9 @@
Loading
116 116
117 117
            # center, scale and impute without duplications
118 118
            # set nCores to 1 here because section is already parallelized
119 -
            X1 <- preprocess(X1, center = center.chunk, scale = scale.chunk, impute = TRUE, nCores = 1)
119 +
            X1 <- preprocess(X1, center = center.chunk, scale = scale.chunk, impute = impute, nCores = 1)
120 120
            if (hasY) {
121 -
                X2 <- preprocess(X2, center = center.chunk, scale = scale.chunk, impute = TRUE, nCores = 1)
121 +
                X2 <- preprocess(X2, center = center.chunk, scale = scale.chunk, impute = impute, nCores = 1)
122 122
            }
123 123
124 124
            if (hasY) {
@@ -160,7 +160,7 @@
Loading
160 160
161 161
}
162 162
163 -
getG_symDMatrix <- function(X, center = TRUE, scale = TRUE, scaleG = TRUE, minVar = 1e-05, blockSize = 5000L, folderOut = paste0("symDMatrix_", randomString()), vmode = "double", i = seq_len(nrow(X)), j = seq_len(ncol(X)), chunkSize = 5000L, nCores = getOption("mc.cores", 2L), verbose = FALSE) {
163 +
getG_symDMatrix <- function(X, center = TRUE, scale = TRUE, impute = TRUE, scaleG = TRUE, minVar = 1e-05, blockSize = 5000L, folderOut = paste0("symDMatrix_", randomString()), vmode = "double", i = seq_len(nrow(X)), j = seq_len(ncol(X)), chunkSize = 5000L, nCores = getOption("mc.cores", 2L), verbose = FALSE) {
164 164
165 165
    i <- convertIndex(X, i, "i")
166 166
    j <- convertIndex(X, j, "j")
@@ -226,7 +226,7 @@
Loading
226 226
            }
227 227
            if (colIndex >= rowIndex) {
228 228
                blockName <- paste0("data_", padDigits(rowIndex, nBlocks), "_", padDigits(colIndex, nBlocks), ".bin")
229 -
                block <- as.ff(getG(X, center = center, scale = scale, scaleG = FALSE, minVar = minVar, i = blockIndices[[rowIndex]], j = j, i2 = blockIndices[[colIndex]], chunkSize = chunkSize, nCores = nCores, verbose = FALSE), filename = paste0(folderOut, "/", blockName), vmode = vmode)
229 +
                block <- as.ff(getG(X, center = center, scale = scale, impute = impute, scaleG = FALSE, minVar = minVar, i = blockIndices[[rowIndex]], j = j, i2 = blockIndices[[colIndex]], chunkSize = chunkSize, nCores = nCores, verbose = FALSE), filename = paste0(folderOut, "/", blockName), vmode = vmode)
230 230
                # Change ff path to a relative one
231 231
                physical(block)[["filename"]] <- blockName
232 232
                rowArgs[[colIndex]] <- block
Files Coverage
R 71.29%
src 96.35%
Project Totals (12 files) 78.91%
Notifications are pending CI completion. Periodically Codecov will check the CI state, when complete notifications will be submitted. Push notifications now.
Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading