diff options
author | Michael Vogt <michael.vogt@ubuntu.com> | 2007-08-03 15:01:05 +0200 |
---|---|---|
committer | Michael Vogt <michael.vogt@ubuntu.com> | 2007-08-03 15:01:05 +0200 |
commit | 7fc9db7c75948504393971681054c5dbea137c5d (patch) | |
tree | c0c188462ce8f5313f97a6c0ae35a25bef47d239 /methods/https.cc | |
parent | 5b28c8040795b53a4df54f33e57976720558ed32 (diff) | |
parent | c3279b9d100fb62f412267512458b58dda40f95b (diff) |
* apt-inst/contrib/extracttar.cc:
- fix fd leak for zero size files (thanks to Bill Broadley for
reporting this bug)
* apt-pkg/acquire-item.cc:
- remove zero size files on I-M-S hit
* methods/https.cc:
- only send LastModified if we actually have one
- send range request with if-range
- delete failed downloads
* apt-pkg/deb/dpkgpm.{cc,h}:
- merged dpkg-log branch, this lets you specify a
Dir::Log::Terminal file to log dpkg output to
(ABI break)
* merged apt--sha256 branch to fully support the new
sha256 checksums in the Packages and Release files
(ABI break)
* Applied patch from Aurelien Jarno <aurel32@debian.org> to fix wrong
directory downloading on non-linux architectures (closes: #435597)
Diffstat (limited to 'methods/https.cc')
-rw-r--r-- | methods/https.cc | 28 |
1 files changed, 20 insertions, 8 deletions
diff --git a/methods/https.cc b/methods/https.cc index d48ac97fb..e6717e63a 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -107,6 +107,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) struct stat SBuf; struct curl_slist *headers=NULL; char curl_errorstr[CURL_ERROR_SIZE]; + long curl_responsecode; // TODO: // - http::Timeout @@ -159,8 +160,11 @@ bool HttpsMethod::Fetch(FetchItem *Itm) curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers); // set time values - curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE); - curl_easy_setopt(curl, CURLOPT_TIMEVALUE, Itm->LastModified); + if(Itm->LastModified > 0) + { + curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE); + curl_easy_setopt(curl, CURLOPT_TIMEVALUE, Itm->LastModified); + } // speed limit int dlLimit = _config->FindI("Acquire::http::Dl-Limit",0)*1024; @@ -178,8 +182,14 @@ bool HttpsMethod::Fetch(FetchItem *Itm) curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr); // In this case we send an if-range query with a range header - if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) - curl_easy_setopt(curl, CURLOPT_RESUME_FROM, (long)SBuf.st_size); + if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) + { + char Buf[1000]; + sprintf(Buf,"Range: bytes=%li-\r\nIf-Range: %s\r\n", + (long)SBuf.st_size - 1, + TimeRFC1123(SBuf.st_mtime).c_str()); + headers = curl_slist_append(headers, Buf); + } // go for it - if the file exists, append on it File = new FileFd(Itm->DestFile, FileFd::WriteAny); @@ -190,14 +200,17 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // get it! CURLcode success = curl_easy_perform(curl); - + curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &curl_responsecode); // cleanup - if(success != 0) { + if(success != 0) + { + unlink(File->Name().c_str()); _error->Error(curl_errorstr); Fail(); return true; } + File->Close(); if (Res.Size == 0) Res.Size = File->Size(); @@ -210,7 +223,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) Res.Filename = File->Name(); Res.LastModified = Buf.st_mtime; Res.IMSHit = false; - if (Itm->LastModified != 0 && Buf.st_mtime >= Itm->LastModified) + if (curl_responsecode == 304) { Res.IMSHit = true; Res.LastModified = Itm->LastModified; @@ -227,7 +240,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm) URIDone(Res); // cleanup - File->Close(); Res.Size = 0; delete File; curl_slist_free_all(headers); |