USGS-R / sbtools

Compare 49d6e87 ... +9 ... a0b423f


@@ -13,7 +13,8 @@
Loading
13 13
	supported_types <- c('text/plain', 'application/json')
14 14
	check_session(session)
15 15
	
16 -
	r = POST(url=url, ..., httrUserAgent(), accept_json(), body=body, handle=session) 
16 +
	r = POST(url=url, ..., httrUserAgent(), accept_json(), body=body, handle=session, 
17 +
					 timeout = httr::timeout(default_timeout())) 
17 18
	handle_errors(r, url, "POST", supported_types)	
18 19
	# if (!strsplit(headers(r)[['content-type']], '[;]')[[1]][1] %in% supported_types)
19 20
	# 	stop('POST failed to ',url,'. check authorization and/or content')
@@ -37,9 +38,12 @@
Loading
37 38
sbtools_GET <- function(url, ..., session = NULL) {
38 39
	supported_types <- c('text/plain','text/csv','text/tab-separated-values','application/json','application/x-gzip', 'application/pdf')
39 40
	r <- tryCatch({
40 -
		GET(url = url, ..., httrUserAgent(), handle = session)
41 +
		GET(url = url, ..., httrUserAgent(), handle = session, timeout = httr::timeout(default_timeout()))
41 42
	}, error = function(e) {
42 -
		if(grepl("Item not found", e)) stop(e)
43 +
		if(grepl("Item not found", e))  {
44 +
			warning(e)
45 +
			return(list(status = 404))
46 +
		}
43 47
		
44 48
		if(!is.null(session) && !inherits(session, "curl_handle")) stop("Session is not valid.")
45 49
		
@@ -68,7 +72,7 @@
Loading
68 72
#' @keywords internal
69 73
sbtools_PUT <- function(url, body, ..., session) {
70 74
	check_session(session)
71 -
	r = PUT(url = url, ..., httrUserAgent(), body = body, handle = session)
75 +
	r = PUT(url = url, ..., httrUserAgent(), body = body, handle = session, timeout = httr::timeout(default_timeout()))
72 76
	handle_errors(r, url, "PUT", NULL)
73 77
	session_age_reset()
74 78
	return(r)
@@ -88,7 +92,8 @@
Loading
88 92
#' @keywords internal
89 93
sbtools_DELETE <- function(url, ..., session) {
90 94
	check_session(session)
91 -
	r = DELETE(url = url, ..., httrUserAgent(), accept_json(), handle = session)
95 +
	r = DELETE(url = url, ..., httrUserAgent(), accept_json(), 
96 +
						 handle = session, timeout = httr::timeout(default_timeout()))
92 97
	handle_errors(r, url, "DELETE", NULL)
93 98
	session_age_reset()
94 99
	return(r)
@@ -97,7 +102,8 @@
Loading
97 102
# HEAD fxn
98 103
sbtools_HEAD <- function(url, ..., session) {
99 104
	session_val(session)
100 -
	r <- tryCatch(HEAD(url = url, ..., httrUserAgent(), handle = session),
105 +
	r <- tryCatch(HEAD(url = url, ..., httrUserAgent(), handle = session,
106 +
										 timeout = httr::timeout(default_timeout())),
101 107
					 error = function(e) {
102 108
							warning(paste("Something went wrong with request: \n",
103 109
														e))

@@ -5,6 +5,8 @@
Loading
5 5
#' @template item_with_parent
6 6
#'
7 7
#' @param files A string vector of paths to files to be uploaded
8 +
#' @param scrape_files logical should the files be scraped for metadata? 
9 +
#' If TRUE, sciencebase will attempt to create extensions based on the files.
8 10
#' @export
9 11
#' @return An object of class \code{sbitem}
10 12
#' @examples \dontrun{
@@ -13,14 +15,21 @@
Loading
13 15
#' file <- system.file("examples", "books.json", package = "sbtools")
14 16
#' item_upload_create(user_id(), file)
15 17
#' }
16 -
item_upload_create = function(parent_id, files, ..., session=current_session()){
18 +
item_upload_create = function(parent_id, files, ..., scrape_files = TRUE, session=current_session()){
17 19
	
18 20
	if(length(files) > 50){
19 21
		warning('Trying to attach a large number of files to a SB item. SB imposes file limits which may cause this to fail')
20 22
	}
21 23
	
22 24
	item <- as.sbitem(parent_id)
23 -
	r = sbtools_POST(url = paste0(pkg.env$url_upload_create, item$id, '?title=title'), 
25 +
	
26 +
	params <- '?title=title'
27 +
	
28 +
	if(!scrape_files) {
29 +
		params <- paste0(params, '&scrapeFile=false')
30 +
	}
31 +
	
32 +
	r = sbtools_POST(url = paste0(pkg.env$url_upload_create, item$id, params), 
24 33
									 ...,
25 34
									 body = multi_file_body(files), 
26 35
									 session = session)
@@ -31,15 +40,17 @@
Loading
31 40
		stop('Not authenticated or lack of permission to parent object\nAunthenticate with the authenticate_sb function.')
32 41
	}
33 42
	
34 -
	return(as.sbitem(content(r)))
43 +
	item <- as.sbitem(content(r))
44 +
	
45 +
	return(check_upload(item, files))
35 46
}
36 47
37 48
#' 
38 49
#' @title Upload File to Item
39 50
#' @description Adds a file to an item
40 51
#'
41 52
#' @template manipulate_item
42 -
#' @param files A file path to upload.
53 +
#' @inheritParams item_upload_create
43 54
#'
44 55
#' @return An object of class \code{sbitem}
45 56
#'
@@ -52,19 +63,40 @@
Loading
52 63
#' item_append_files(res$id, "foobar.txt")
53 64
#' }
54 65
#' @export
55 -
item_append_files = function(sb_id, files, ..., session=current_session()){
66 +
item_append_files = function(sb_id, files, ..., scrape_files = TRUE, session=current_session()){
56 67
	
57 68
	if(length(files) > 50){
58 69
		warning('Trying to attach a large number of files to a SB item. SB imposes file limits which may cause this to fail')
59 70
	}
60 71
	
61 72
	item <- as.sbitem(sb_id)
62 -
	r = sbtools_POST(url = paste0(pkg.env$url_upload,'?id=', item$id), ...,
73 +
	
74 +
	params <- paste0("?id=", item$id)
75 +
	
76 +
	if(!scrape_files) {
77 +
		params <- paste0(params, "&scrapeFile=false")
78 +
	}
79 +
	
80 +
	r = sbtools_POST(url = paste0(pkg.env$url_upload, params), ...,
63 81
									 body = multi_file_body(files), 
64 82
									 session = session)
65 83
  
66 -
  return(as.sbitem(content(r)))
84 +
	item <- as.sbitem(content(r))
85 +
	
86 +
	return(check_upload(item, files))
87 +
	
88 +
}
89 +
90 +
check_upload <- function(item, files) {
91 +
	
92 +
	if(!all(basename(files) %in% sapply(item$files, function(x) x$name))) {
93 +
		warning("Not all files ended up in the item files. \n",
94 +
		"This indicates that a sciencebase extension was created with the file. \n",
95 +
		"set 'scrape_files' to FALSE to avoid this behavior. \n",
96 +
		"NOTE: 'scrape_files' will default to FALSE in a future version of sbtools.")
97 +
	}
67 98
	
99 +
	item
68 100
}
69 101
70 102
multi_file_body <- function(files){

@@ -70,7 +70,9 @@
Loading
70 70
	flist = merge(flist, data.frame(fname=names, dest=destinations, stringsAsFactors=FALSE))
71 71
72 72
	for(i in 1:nrow(flist)){
73 -
		GET(url=flist[i,]$url, ..., write_disk(flist[i,]$dest, overwrite = overwrite_file), handle=session)
73 +
		GET(url=flist[i,]$url, ..., 
74 +
				write_disk(flist[i,]$dest, overwrite = overwrite_file), 
75 +
				handle=session, timeout = httr::timeout(default_timeout()))
74 76
	}
75 77
76 78
	return(path.expand(flist$dest))

@@ -11,7 +11,9 @@
Loading
11 11
sb_ping <- function(...) {
12 12
13 13
	tryCatch({
14 -
		x <- GET(paste0(pkg.env$url_item, 'ping'), ...)
14 +
		x <- GET(paste0(pkg.env$url_item, 'ping'), 
15 +
						 timeout = httr::timeout(default_timeout()),
16 +
						 ...)
15 17
		res = jsonlite::fromJSON(content(x, "text"))
16 18
		if(is(res, 'list') & !is.null(res$result) & res$result == 'OK'){
17 19
			return(TRUE)

@@ -35,8 +35,9 @@
Loading
35 35
	h = handle(pkg.env$url_base)
36 36
	
37 37
	## authenticate
38 -
	resp = GET(pkg.env$url_base, accept_json(), authenticate(username, password, type='basic'),
39 -
						 handle=h)
38 +
	resp = GET(pkg.env$url_base, accept_json(), 
39 +
						 authenticate(username, password, type='basic'),
40 +
						 handle=h, timeout = httr::timeout(default_timeout()))
40 41
	
41 42
	if(!any(resp$cookies$name %in% 'JSESSIONID')){
42 43
		stop('Unable to authenticate to SB. Check username and password')

Click to load this diff.
Loading diff...

Click to load this diff.
Loading diff...

Learn more Showing 1 files with coverage changes found.

Changes in R/AAA.R
-1
+1
Loading file...
Files Coverage
Project Totals (51 files) 50.99%
Loading