ropensci / elastic

Compare c845453 ... +2 ... 96fad25


@@ -1,6 +1,7 @@
Loading
1 1
make_bulk_df_generator <- function(fun) {
2 2
  function(conn, x, index = NULL, type = NULL, chunk_size = 1000, 
3 -
    doc_ids = NULL, raw = FALSE, quiet = FALSE, query = list(), ...) {
3 +
    doc_ids = NULL, raw = FALSE, quiet = FALSE, query = list(),
4 +
    digits = NA, ...) {
4 5
  
5 6
    is_conn(conn)
6 7
    assert(quiet, "logical")
@@ -36,7 +37,7 @@
Loading
36 37
    for (i in seq_along(data_chks)) {
37 38
      if (!quiet) setTxtProgressBar(pb, i)
38 39
      resl[[i]] <- docs_bulk(conn, fun(x[data_chks[[i]], , drop = FALSE], 
39 -
        index, id_chks[[i]], type), query = query, ...)
40 +
        index, id_chks[[i]], type, digits = digits), query = query, ...)
40 41
    }
41 42
    return(resl)
42 43
  }

@@ -1,5 +1,5 @@
Loading
1 1
make_bulk_ <- function(df, index, counter, es_ids, type = NULL, path = NULL,
2 -
  action = "index") {
2 +
  action = "index", digits = NA) {
3 3
4 4
  if (!is.character(counter)) {
5 5
    if (max(counter) >= 10000000000) {
@@ -15,7 +15,7 @@
Loading
15 15
    sprintf(metadata_fmt, action, index, counter)
16 16
  }
17 17
  data <- jsonlite::toJSON(df, collapse = FALSE, na = "null",
18 -
    auto_unbox = TRUE)
18 +
    auto_unbox = TRUE, digits = digits)
19 19
  tmpf <- if (is.null(path)) tempfile("elastic__") else path
20 20
  write_utf8(paste(metadata, data, sep = "\n"), tmpf)
21 21
  invisible(tmpf)
@@ -24,7 +24,7 @@
Loading
24 24
bulk_ci_generator <- function(action = "index", es_ids = TRUE) {
25 25
  tt <- function(conn, x, index = NULL, type = NULL, chunk_size = 1000,
26 26
    doc_ids = NULL, es_ids = TRUE, raw = FALSE, quiet = FALSE,
27 -
    query = list(), ...) {
27 +
    query = list(), digits = NA, ...) {
28 28
29 29
    is_conn(conn)
30 30
    assert(quiet, "logical")
@@ -57,7 +57,7 @@
Loading
57 57
      if (!quiet) setTxtProgressBar(pb, i)
58 58
      resl[[i]] <- docs_bulk(conn,
59 59
        make_bulk_(x[data_chks[[i]], , drop = FALSE],
60 -
        index, id_chks[[i]], es_ids, type, action = action),
60 +
        index, id_chks[[i]], es_ids, type, action = action, digits = digits),
61 61
        query = query, ...)
62 62
    }
63 63
    return(resl)

@@ -27,6 +27,9 @@
Loading
27 27
#' options include: pipeline, refresh, routing, _source, _source_excludes,
28 28
#' _source_includes, timeout, wait_for_active_shards. See the docs bulk
29 29
#' ES page for details
30 +
#' @param digits digits used by the parameter of the same name by
31 +
#' [jsonlite::toJSON()] to convert data to JSON before being submitted to
32 +
#' your ES instance. default: `NA`
30 33
#' @param ... Pass on curl options to [crul::HttpClient]
31 34
#'
32 35
#' @details More on the Bulk API:
@@ -231,21 +234,24 @@
Loading
231 234
#' invisible(docs_bulk(x, mtcars, index = "hello", quiet = FALSE))
232 235
#' }
233 236
docs_bulk <- function(conn, x, index = NULL, type = NULL, chunk_size = 1000,
234 -
  doc_ids = NULL, es_ids = TRUE, raw = FALSE, quiet = FALSE, query = list(), ...) {
237 +
  doc_ids = NULL, es_ids = TRUE, raw = FALSE, quiet = FALSE, query = list(),
238 +
  digits = NA, ...) {
235 239
236 240
  UseMethod("docs_bulk", x)
237 241
}
238 242
239 243
#' @export
240 244
docs_bulk.default <- function(conn, x, index = NULL, type = NULL, chunk_size = 1000,
241 -
  doc_ids = NULL, es_ids = TRUE, raw = FALSE, quiet = FALSE, query = list(), ...) {
245 +
  doc_ids = NULL, es_ids = TRUE, raw = FALSE, quiet = FALSE, query = list(),
246 +
  digits = NA, ...) {
242 247
243 248
  stop("no 'docs_bulk' method for class ", class(x), call. = FALSE)
244 249
}
245 250
246 251
#' @export
247 252
docs_bulk.data.frame <- function(conn, x, index = NULL, type = NULL, chunk_size = 1000,
248 -
  doc_ids = NULL, es_ids = TRUE, raw = FALSE, quiet = FALSE, query = list(), ...) {
253 +
  doc_ids = NULL, es_ids = TRUE, raw = FALSE, quiet = FALSE, query = list(),
254 +
  digits = NA, ...) {
249 255
250 256
  is_conn(conn)
251 257
  assert(quiet, "logical")
@@ -276,15 +282,15 @@
Loading
276 282
  for (i in seq_along(data_chks)) {
277 283
    if (!quiet) setTxtProgressBar(pb, i)
278 284
    resl[[i]] <- docs_bulk(conn, make_bulk(x[data_chks[[i]], , drop = FALSE], 
279 -
      index, id_chks[[i]], es_ids, type), query = query, ...)
285 +
      index, id_chks[[i]], es_ids, type, digits = digits), query = query, ...)
280 286
  }
281 287
  return(resl)
282 288
}
283 289
284 290
#' @export
285 291
docs_bulk.list <- function(conn, x, index = NULL, type = NULL, chunk_size = 1000,
286 292
                           doc_ids = NULL, es_ids = TRUE, raw = FALSE, 
287 -
                           quiet = FALSE, query = list(), ...) {
293 +
                           quiet = FALSE, query = list(), digits = NA, ...) {
288 294
289 295
  is_conn(conn)
290 296
  assert(quiet, "logical")
@@ -317,15 +323,15 @@
Loading
317 323
  for (i in seq_along(data_chks)) {
318 324
    if (!quiet) setTxtProgressBar(pb, i)
319 325
    resl[[i]] <- docs_bulk(conn, make_bulk(x[data_chks[[i]]], index, 
320 -
      id_chks[[i]], es_ids, type), query = query, ...)
326 +
      id_chks[[i]], es_ids, type, digits = digits), query = query, ...)
321 327
  }
322 328
  return(resl)
323 329
}
324 330
325 331
#' @export
326 332
docs_bulk.character <- function(conn, x, index = NULL, type = NULL, chunk_size = 1000,
327 333
                                doc_ids = NULL, es_ids = TRUE, raw=FALSE, 
328 -
                                quiet = FALSE, query = list(), ...) {
334 +
                                quiet = FALSE, query = list(), digits = NA, ...) {
329 335
330 336
  is_conn(conn)
331 337
  stopifnot(file.exists(x))

@@ -133,21 +133,21 @@
Loading
133 133
#'   path = tempfile(fileext = ".json"), quiet = FALSE)
134 134
#' }
135 135
docs_bulk_prep <- function(x, index, path, type = NULL, chunk_size = 1000,
136 -
  doc_ids = NULL, quiet = FALSE) {
136 +
  doc_ids = NULL, quiet = FALSE, digits = NA) {
137 137
138 138
  UseMethod("docs_bulk_prep")
139 139
}
140 140
141 141
#' @export
142 142
docs_bulk_prep.default <- function(x, index, path, type = NULL,
143 -
  chunk_size = 1000, doc_ids = NULL, quiet = FALSE) {
143 +
  chunk_size = 1000, doc_ids = NULL, quiet = FALSE, digits = NA) {
144 144
145 145
  stop("no 'docs_bulk_prep' method for class ", class(x), call. = FALSE)
146 146
}
147 147
148 148
#' @export
149 149
docs_bulk_prep.data.frame <- function(x, index, path, type = NULL,
150 -
  chunk_size = 1000, doc_ids = NULL, quiet = FALSE) {
150 +
  chunk_size = 1000, doc_ids = NULL, quiet = FALSE, digits = NA) {
151 151
152 152
  assert(quiet, "logical")
153 153
  check_doc_ids(x, doc_ids)
@@ -175,15 +175,16 @@
Loading
175 175
    if (!quiet) setTxtProgressBar(pb, i)
176 176
    resl[[i]] <- make_bulk(
177 177
      x[data_chks[[i]], , drop = FALSE], index, id_chks[[i]], es_ids, type,
178 -
      path = if (length(data_chks) > 1) adjust_path(path, i) else path
178 +
      path = if (length(data_chks) > 1) adjust_path(path, i) else path,
179 +
      digits = digits
179 180
    )
180 181
  }
181 182
  return(unlist(resl))
182 183
}
183 184
184 185
#' @export
185 186
docs_bulk_prep.list <- function(x, index, path, type = NULL,
186 -
  chunk_size = 1000, doc_ids = NULL, quiet = FALSE) {
187 +
  chunk_size = 1000, doc_ids = NULL, quiet = FALSE, digits = NA) {
187 188
188 189
  assert(quiet, "logical")
189 190
  check_doc_ids(x, doc_ids)
@@ -213,7 +214,8 @@
Loading
213 214
    if (!quiet) setTxtProgressBar(pb, i)
214 215
    resl[[i]] <- make_bulk(
215 216
      x[data_chks[[i]]], index, id_chks[[i]], es_ids, type,
216 -
      path = if (length(data_chks) > 1) adjust_path(path, i) else path
217 +
      path = if (length(data_chks) > 1) adjust_path(path, i) else path,
218 +
      digits = digits
217 219
    )
218 220
  }
219 221
  return(unlist(resl))

@@ -14,7 +14,7 @@
Loading
14 14
#' @param parent ID of the parent document. Is is only used for routing and
15 15
#' when for the upsert request
16 16
#' @param refresh Refresh the index after performing the operation. See
17 -
#' <http://bit.ly/2ezW9Zr> for details
17 +
#' <https://bit.ly/2ezW9Zr> for details
18 18
#' @param retry_on_conflict Specify how many times should the operation be
19 19
#' retried when a conflict occurs (default: 0)
20 20
#' @param routing (character) Specific routing value
@@ -29,16 +29,16 @@
Loading
29 29
#' a number (in milliseconds) or any valid time value (e.g, 86400000, 1d).
30 30
#' @param wait_for_active_shards The number of shard copies required to be
31 31
#' active before proceeding with the update operation.
32 -
#' See <http://bit.ly/2fbqkZ1> for details.
32 +
#' See <https://bit.ly/2fbqkZ1> for details.
33 33
#' @param source Allows to control if and how the updated source should be
34 34
#' returned in the response. By default the updated source is not returned.
35 -
#' See <http://bit.ly/2efmYiE> filtering for details
35 +
#' See <https://bit.ly/2efmYiE> filtering for details
36 36
#' @param detect_noop (logical) Specifying `TRUE` will cause Elasticsearch
37 37
#' to check if there are changes and, if there aren't, turn the update request
38 38
#' into a noop.
39 39
#' @param callopts Curl options passed on to [crul::HttpClient]
40 40
#' @param ... Further args to query DSL
41 -
#' @references <http://bit.ly/2eVYqLz>
41 +
#' @references <https://bit.ly/2eVYqLz>
42 42
#' @examples \dontrun{
43 43
#' (x <- connect())
44 44
#' if (!index_exists(x, 'plos')) {

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Everything is accounted for!

No changes detected that need to be reviewed.
What changes does Codecov check for?
Lines, not adjusted in diff, that have changed coverage data.
Files that introduced coverage data that had none before.
Files that have missing coverage data that once were tracked.
Files Coverage
R -0.01% 2.06%
Project Totals (50 files) 2.06%
Loading