(even with good connects) download times of
45 minutes, easily. Retries are found not
to be necessary, limit set to 60.
One does not not want to
try again in an unsupervised manner after
dropping 500MBs of download because of a
time limit, retries hence set to 0.
git-svn-id: svn://svn.r-forge.r-project.org/svnroot/cran2deb@357
edb9625f-4e0d-4859-8d74-
9fd3b1da38cb
+
+curl.maxtime<-60*60 # 60 minutes max download time (some bioconductor packages are truly big and take time)
+curl.retries<-0 # No retries (connections are commonly good enough)
+
setup <- function() {
# set up the working directory
tmp <- tempfile('cran2deb')
setup <- function() {
# set up the working directory
tmp <- tempfile('cran2deb')
# dodgy network connections (hello BT 'OpenWorld', bad ISP)
url <- paste(available[pkgname,'Repository'], fn, sep='/')
# don't log the output -- we don't care!
# dodgy network connections (hello BT 'OpenWorld', bad ISP)
url <- paste(available[pkgname,'Repository'], fn, sep='/')
# don't log the output -- we don't care!
- ret <- system(paste('curl','-o',shQuote(archive),'-m 720 --retry 5',shQuote(url)))
+ ret <- system(paste('curl','-o',shQuote(archive),
+ paste('-m',curl.maxtime,'--retry',curl.retries,sep=' '),
+ shQuote(url)))
if (ret != 0) {
fail('failed to download',url)
}
if (ret != 0) {
fail('failed to download',url)
}