USGS-R / sbtools

@@ -5,6 +5,8 @@
Loading
5 5
#' @template item_with_parent
6 6
#'
7 7
#' @param files A string vector of paths to files to be uploaded
8 +
#' @param scrape_files logical should the files be scraped for metadata? 
9 +
#' If TRUE, sciencebase will attempt to create extensions based on the files.
8 10
#' @export
9 11
#' @return An object of class \code{sbitem}
10 12
#' @examples \dontrun{
@@ -13,14 +15,21 @@
Loading
13 15
#' file <- system.file("examples", "books.json", package = "sbtools")
14 16
#' item_upload_create(user_id(), file)
15 17
#' }
16 -
item_upload_create = function(parent_id, files, ..., session=current_session()){
18 +
item_upload_create = function(parent_id, files, ..., scrape_files = TRUE, session=current_session()){
17 19
	
18 20
	if(length(files) > 50){
19 21
		warning('Trying to attach a large number of files to a SB item. SB imposes file limits which may cause this to fail')
20 22
	}
21 23
	
22 24
	item <- as.sbitem(parent_id)
23 -
	r = sbtools_POST(url = paste0(pkg.env$url_upload_create, item$id, '?title=title'), 
25 +
	
26 +
	params <- '?title=title'
27 +
	
28 +
	if(!scrape_files) {
29 +
		params <- paste0(params, '&scrapeFile=false')
30 +
	}
31 +
	
32 +
	r = sbtools_POST(url = paste0(pkg.env$url_upload_create, item$id, params), 
24 33
									 ...,
25 34
									 body = multi_file_body(files), 
26 35
									 session = session)
@@ -31,7 +40,9 @@
Loading
31 40
		stop('Not authenticated or lack of permission to parent object\nAunthenticate with the authenticate_sb function.')
32 41
	}
33 42
	
34 -
	return(as.sbitem(content(r)))
43 +
	item <- as.sbitem(content(r))
44 +
	
45 +
	return(check_upload(item, files))
35 46
}
36 47
37 48
#' 
@@ -39,7 +50,7 @@
Loading
39 50
#' @description Adds a file to an item
40 51
#'
41 52
#' @template manipulate_item
42 -
#' @param files A file path to upload.
53 +
#' @inheritParams item_upload_create
43 54
#'
44 55
#' @return An object of class \code{sbitem}
45 56
#'
@@ -52,19 +63,40 @@
Loading
52 63
#' item_append_files(res$id, "foobar.txt")
53 64
#' }
54 65
#' @export
55 -
item_append_files = function(sb_id, files, ..., session=current_session()){
66 +
item_append_files = function(sb_id, files, ..., scrape_files = TRUE, session=current_session()){
56 67
	
57 68
	if(length(files) > 50){
58 69
		warning('Trying to attach a large number of files to a SB item. SB imposes file limits which may cause this to fail')
59 70
	}
60 71
	
61 72
	item <- as.sbitem(sb_id)
62 -
	r = sbtools_POST(url = paste0(pkg.env$url_upload,'?id=', item$id), ...,
73 +
	
74 +
	params <- paste0("?id=", item$id)
75 +
	
76 +
	if(!scrape_files) {
77 +
		params <- paste0(params, "&scrapeFile=false")
78 +
	}
79 +
	
80 +
	r = sbtools_POST(url = paste0(pkg.env$url_upload, params), ...,
63 81
									 body = multi_file_body(files), 
64 82
									 session = session)
65 83
  
66 -
  return(as.sbitem(content(r)))
84 +
	item <- as.sbitem(content(r))
85 +
	
86 +
	return(check_upload(item, files))
87 +
	
88 +
}
89 +
90 +
check_upload <- function(item, files) {
91 +
	
92 +
	if(!all(basename(files) %in% sapply(item$files, function(x) x$name))) {
93 +
		warning("Not all files ended up in the item files. \n",
94 +
		"This indicates that a sciencebase extension was created with the file. \n",
95 +
		"set 'scrape_files' to FALSE to avoid this behavior. \n",
96 +
		"NOTE: 'scrape_files' will default to FALSE in a future version of sbtools.")
97 +
	}
67 98
	
99 +
	item
68 100
}
69 101
70 102
multi_file_body <- function(files){

@@ -70,7 +70,9 @@
Loading
70 70
	flist = merge(flist, data.frame(fname=names, dest=destinations, stringsAsFactors=FALSE))
71 71
72 72
	for(i in 1:nrow(flist)){
73 -
		GET(url=flist[i,]$url, ..., write_disk(flist[i,]$dest, overwrite = overwrite_file), handle=session)
73 +
		GET(url=flist[i,]$url, ..., 
74 +
				write_disk(flist[i,]$dest, overwrite = overwrite_file), 
75 +
				handle=session, timeout = httr::timeout(default_timeout()))
74 76
	}
75 77
76 78
	return(path.expand(flist$dest))

@@ -11,7 +11,9 @@
Loading
11 11
sb_ping <- function(...) {
12 12
13 13
	tryCatch({
14 -
		x <- GET(paste0(pkg.env$url_item, 'ping'), ...)
14 +
		x <- GET(paste0(pkg.env$url_item, 'ping'), 
15 +
						 timeout = httr::timeout(default_timeout()),
16 +
						 ...)
15 17
		res = jsonlite::fromJSON(content(x, "text"))
16 18
		if(is(res, 'list') & !is.null(res$result) & res$result == 'OK'){
17 19
			return(TRUE)

@@ -35,8 +35,9 @@
Loading
35 35
	h = handle(pkg.env$url_base)
36 36
	
37 37
	## authenticate
38 -
	resp = GET(pkg.env$url_base, accept_json(), authenticate(username, password, type='basic'),
39 -
						 handle=h)
38 +
	resp = GET(pkg.env$url_base, accept_json(), 
39 +
						 authenticate(username, password, type='basic'),
40 +
						 handle=h, timeout = httr::timeout(default_timeout()))
40 41
	
41 42
	if(!any(resp$cookies$name %in% 'JSESSIONID')){
42 43
		stop('Unable to authenticate to SB. Check username and password')

@@ -13,7 +13,8 @@
Loading
13 13
	supported_types <- c('text/plain', 'application/json')
14 14
	check_session(session)
15 15
	
16 -
	r = POST(url=url, ..., httrUserAgent(), accept_json(), body=body, handle=session) 
16 +
	r = POST(url=url, ..., httrUserAgent(), accept_json(), body=body, handle=session, 
17 +
					 timeout = httr::timeout(default_timeout())) 
17 18
	handle_errors(r, url, "POST", supported_types)	
18 19
	# if (!strsplit(headers(r)[['content-type']], '[;]')[[1]][1] %in% supported_types)
19 20
	# 	stop('POST failed to ',url,'. check authorization and/or content')
@@ -37,9 +38,12 @@
Loading
37 38
sbtools_GET <- function(url, ..., session = NULL) {
38 39
	supported_types <- c('text/plain','text/csv','text/tab-separated-values','application/json','application/x-gzip', 'application/pdf')
39 40
	r <- tryCatch({
40 -
		GET(url = url, ..., httrUserAgent(), handle = session)
41 +
		GET(url = url, ..., httrUserAgent(), handle = session, timeout = httr::timeout(default_timeout()))
41 42
	}, error = function(e) {
42 -
		if(grepl("Item not found", e)) stop(e)
43 +
		if(grepl("Item not found", e))  {
44 +
			warning(e)
45 +
			return(list(status = 404))
46 +
		}
43 47
		
44 48
		if(!is.null(session) && !inherits(session, "curl_handle")) stop("Session is not valid.")
45 49
		
@@ -68,7 +72,7 @@
Loading
68 72
#' @keywords internal
69 73
sbtools_PUT <- function(url, body, ..., session) {
70 74
	check_session(session)
71 -
	r = PUT(url = url, ..., httrUserAgent(), body = body, handle = session)
75 +
	r = PUT(url = url, ..., httrUserAgent(), body = body, handle = session, timeout = httr::timeout(default_timeout()))
72 76
	handle_errors(r, url, "PUT", NULL)
73 77
	session_age_reset()
74 78
	return(r)
@@ -88,7 +92,8 @@
Loading
88 92
#' @keywords internal
89 93
sbtools_DELETE <- function(url, ..., session) {
90 94
	check_session(session)
91 -
	r = DELETE(url = url, ..., httrUserAgent(), accept_json(), handle = session)
95 +
	r = DELETE(url = url, ..., httrUserAgent(), accept_json(), 
96 +
						 handle = session, timeout = httr::timeout(default_timeout()))
92 97
	handle_errors(r, url, "DELETE", NULL)
93 98
	session_age_reset()
94 99
	return(r)
@@ -97,7 +102,8 @@
Loading
97 102
# HEAD fxn
98 103
sbtools_HEAD <- function(url, ..., session) {
99 104
	session_val(session)
100 -
	r <- tryCatch(HEAD(url = url, ..., httrUserAgent(), handle = session),
105 +
	r <- tryCatch(HEAD(url = url, ..., httrUserAgent(), handle = session,
106 +
										 timeout = httr::timeout(default_timeout())),
101 107
					 error = function(e) {
102 108
							warning(paste("Something went wrong with request: \n",
103 109
														e))

@@ -86,6 +86,8 @@
Loading
86 86
#' @export
87 87
query_sb = function(query_list, ..., limit=20, session = current_session()){
88 88
	
89 +
	tryCatch({
90 +
		
89 91
	if(!is(query_list, 'list')){
90 92
		stop('query_list must be a list of query parameters')
91 93
	}
@@ -113,11 +115,11 @@
Loading
113 115
	}
114 116
	
115 117
	tryCatch({
116 -
	result = query_items(query_list, ..., session=session)
118 +
		result <- query_items(query_list, ..., session=session)
117 119
	}, error = function(e) {
118 120
		result <- list(status = 404)
119 121
		warning(paste("unhandled error with sciencebase request. \n", 
120 -
						"Error was: \n", e))
122 +
									"Error was: \n", e))
121 123
	})
122 124
	
123 125
	if(is(result, "list") && result$status == 404) {
@@ -151,6 +153,18 @@
Loading
151 153
	if(length(out) > limit){
152 154
		out = out[1:limit]
153 155
	}
154 -
	return(out)
156 +
	},
157 +
	error = function(e) {
158 +
		warning(paste("Something unexpected went wrong with a web request \n",
159 +
									"Original error was:\n", e))
160 +
		return(NULL)
161 +
	})
162 +
	
163 +
	if(exists("out")) {
164 +
		return(out)
165 +
	} else {
166 +
		NULL
167 +
	}
168 +
	
155 169
}
156 170

@@ -14,7 +14,8 @@
Loading
14 14
#' @export
15 15
session_details <- function(..., session = current_session()) {
16 16
	x <- GET(paste0(pkg.env$url_base, "jossoHelper/sessionInfo"), 
17 -
					 handle = session, ...)
17 +
					 handle = session, timeout = httr::timeout(default_timeout()),
18 +
					 ...)
18 19
	stop_for_status(x)
19 20
	jsonlite::fromJSON(content(x, "text"))
20 21
}
Files Coverage
R 50.99%
Project Totals (51 files) 50.99%
Notifications are pending CI completion. Periodically Codecov will check the CI state, when complete notifications will be submitted. Push notifications now.

No yaml found.

Create your codecov.yml to customize your Codecov experience

Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading