]> git.saurik.com Git - apt.git/blobdiff - methods/https.cc
* merged from apt--mvo
[apt.git] / methods / https.cc
index d48ac97fb2df5d6d8434ff946ad619a4055b6df2..3b2b0bb19e3dfb8c2ac6139ee0955f1a54fc5fc9 100644 (file)
@@ -107,6 +107,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    struct stat SBuf;
    struct curl_slist *headers=NULL;  
    char curl_errorstr[CURL_ERROR_SIZE];
+   long curl_responsecode;
 
    // TODO:
    //       - http::Timeout
@@ -114,6 +115,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    //       - error checking/reporting
    //       - more debug options? (CURLOPT_DEBUGFUNCTION?)
 
+   curl_easy_reset(curl);
    SetupProxy();
 
    // callbacks
@@ -124,6 +126,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this);
    curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false);
    curl_easy_setopt(curl, CURLOPT_FAILONERROR, true);
+   curl_easy_setopt(curl, CURLOPT_FILETIME, true);
 
    // FIXME: https: offer various options of verification
    bool peer_verify = _config->FindB("Acquire::https::Verify-Peer", false);
@@ -159,8 +162,11 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
 
    // set time values
-   curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
-   curl_easy_setopt(curl, CURLOPT_TIMEVALUE, Itm->LastModified);
+   if(Itm->LastModified > 0)
+   {
+      curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
+      curl_easy_setopt(curl, CURLOPT_TIMEVALUE, Itm->LastModified);
+   }
 
    // speed limit
    int dlLimit = _config->FindI("Acquire::http::Dl-Limit",0)*1024;
@@ -178,8 +184,14 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr);
 
    // In this case we send an if-range query with a range header
-  if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
-     curl_easy_setopt(curl, CURLOPT_RESUME_FROM, (long)SBuf.st_size);
+   if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
+   {
+      char Buf[1000];
+      sprintf(Buf,"Range: bytes=%li-\r\nIf-Range: %s\r\n",
+             (long)SBuf.st_size - 1,
+             TimeRFC1123(SBuf.st_mtime).c_str());
+      headers = curl_slist_append(headers, Buf);
+   }
 
    // go for it - if the file exists, append on it
    File = new FileFd(Itm->DestFile, FileFd::WriteAny);
@@ -190,18 +202,32 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
 
    // get it!
    CURLcode success = curl_easy_perform(curl);
+   curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &curl_responsecode);
 
+   long curl_servdate;
+   curl_easy_getinfo(curl, CURLINFO_FILETIME, &curl_servdate);
 
    // cleanup
-   if(success != 0) {
+   if(success != 0) 
+   {
+      unlink(File->Name().c_str());
       _error->Error(curl_errorstr);
       Fail();
       return true;
    }
+   File->Close();
 
    if (Res.Size == 0)
       Res.Size = File->Size();
 
+   // Timestamp
+   struct utimbuf UBuf;
+   if (curl_servdate != -1) {
+       UBuf.actime = curl_servdate;
+       UBuf.modtime = curl_servdate;
+       utime(File->Name().c_str(),&UBuf);
+   }
+
    // check the downloaded result
    struct stat Buf;
    if (stat(File->Name().c_str(),&Buf) == 0)
@@ -210,7 +236,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
       Res.Filename = File->Name();
       Res.LastModified = Buf.st_mtime;
       Res.IMSHit = false;
-      if (Itm->LastModified != 0 && Buf.st_mtime >= Itm->LastModified)
+      if (curl_responsecode == 304)
       {
         Res.IMSHit = true;
         Res.LastModified = Itm->LastModified;
@@ -227,7 +253,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    URIDone(Res);
 
    // cleanup
-   File->Close();
    Res.Size = 0;
    delete File;
    curl_slist_free_all(headers);