summaryrefslogtreecommitdiff
path: root/methods/https.cc
diff options
context:
space:
mode:
Diffstat (limited to 'methods/https.cc')
-rw-r--r--methods/https.cc53
1 files changed, 39 insertions, 14 deletions
diff --git a/methods/https.cc b/methods/https.cc
index d48ac97fb..b2bbbddb1 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -107,6 +107,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
struct stat SBuf;
struct curl_slist *headers=NULL;
char curl_errorstr[CURL_ERROR_SIZE];
+ long curl_responsecode;
// TODO:
// - http::Timeout
@@ -114,6 +115,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// - error checking/reporting
// - more debug options? (CURLOPT_DEBUGFUNCTION?)
+ curl_easy_reset(curl);
SetupProxy();
// callbacks
@@ -124,6 +126,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this);
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false);
curl_easy_setopt(curl, CURLOPT_FAILONERROR, true);
+ curl_easy_setopt(curl, CURLOPT_FILETIME, true);
// FIXME: https: offer various options of verification
bool peer_verify = _config->FindB("Acquire::https::Verify-Peer", false);
@@ -158,10 +161,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
}
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
- // set time values
- curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
- curl_easy_setopt(curl, CURLOPT_TIMEVALUE, Itm->LastModified);
-
// speed limit
int dlLimit = _config->FindI("Acquire::http::Dl-Limit",0)*1024;
if (dlLimit > 0)
@@ -177,44 +176,71 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// error handling
curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr);
- // In this case we send an if-range query with a range header
- if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
- curl_easy_setopt(curl, CURLOPT_RESUME_FROM, (long)SBuf.st_size);
+ // if we have the file send an if-range query with a range header
+ if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
+ {
+ char Buf[1000];
+ sprintf(Buf,"Range: bytes=%li-\r\nIf-Range: %s\r\n",
+ (long)SBuf.st_size - 1,
+ TimeRFC1123(SBuf.st_mtime).c_str());
+ headers = curl_slist_append(headers, Buf);
+ }
+ else if(Itm->LastModified > 0)
+ {
+ curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
+ curl_easy_setopt(curl, CURLOPT_TIMEVALUE, Itm->LastModified);
+ }
// go for it - if the file exists, append on it
File = new FileFd(Itm->DestFile, FileFd::WriteAny);
- File->Seek(File->Size());
+ if (File->Size() > 0)
+ File->Seek(File->Size() - 1);
// keep apt updated
Res.Filename = Itm->DestFile;
// get it!
CURLcode success = curl_easy_perform(curl);
+ curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &curl_responsecode);
+ long curl_servdate;
+ curl_easy_getinfo(curl, CURLINFO_FILETIME, &curl_servdate);
// cleanup
- if(success != 0) {
+ if(success != 0)
+ {
+ unlink(File->Name().c_str());
_error->Error(curl_errorstr);
Fail();
return true;
}
+ File->Close();
- if (Res.Size == 0)
- Res.Size = File->Size();
+ // Timestamp
+ struct utimbuf UBuf;
+ if (curl_servdate != -1) {
+ UBuf.actime = curl_servdate;
+ UBuf.modtime = curl_servdate;
+ utime(File->Name().c_str(),&UBuf);
+ }
// check the downloaded result
struct stat Buf;
if (stat(File->Name().c_str(),&Buf) == 0)
{
- Res.Size = Buf.st_size;
Res.Filename = File->Name();
Res.LastModified = Buf.st_mtime;
Res.IMSHit = false;
- if (Itm->LastModified != 0 && Buf.st_mtime >= Itm->LastModified)
+ if (curl_responsecode == 304)
{
+ unlink(File->Name().c_str());
Res.IMSHit = true;
Res.LastModified = Itm->LastModified;
+ Res.Size = 0;
+ URIDone(Res);
+ return true;
}
+ Res.Size = Buf.st_size;
}
// take hashes
@@ -227,7 +253,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
URIDone(Res);
// cleanup
- File->Close();
Res.Size = 0;
delete File;
curl_slist_free_all(headers);