]> git.saurik.com Git - apt.git/blobdiff - methods/https.cc
Brazilian translation update
[apt.git] / methods / https.cc
index e6717e63ae3f9b6aa4513508aca04b2a3571787a..b0b05a47e19bfe9845a2838da2fbb171551808fc 100644 (file)
@@ -110,11 +110,11 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    long curl_responsecode;
 
    // TODO:
-   //       - http::Timeout
    //       - http::Pipeline-Depth
    //       - error checking/reporting
    //       - more debug options? (CURLOPT_DEBUGFUNCTION?)
 
+   curl_easy_reset(curl);
    SetupProxy();
 
    // callbacks
@@ -125,6 +125,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this);
    curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false);
    curl_easy_setopt(curl, CURLOPT_FAILONERROR, true);
+   curl_easy_setopt(curl, CURLOPT_FILETIME, true);
 
    // FIXME: https: offer various options of verification
    bool peer_verify = _config->FindB("Acquire::https::Verify-Peer", false);
@@ -159,13 +160,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    }
    curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
 
-   // set time values
-   if(Itm->LastModified > 0)
-   {
-      curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
-      curl_easy_setopt(curl, CURLOPT_TIMEVALUE, Itm->LastModified);
-   }
-
    // speed limit
    int dlLimit = _config->FindI("Acquire::http::Dl-Limit",0)*1024;
    if (dlLimit > 0)
@@ -174,6 +168,11 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    // set header
    curl_easy_setopt(curl, CURLOPT_USERAGENT,"Debian APT-CURL/1.0 ("VERSION")");
 
+   // set timeout
+   int timeout = _config->FindI("Acquire::http::Timeout",120);
+   curl_easy_setopt(curl, CURLOPT_TIMEOUT, timeout);
+   curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, timeout);
+
    // debug
    if(_config->FindB("Debug::Acquire::https", false))
       curl_easy_setopt(curl, CURLOPT_VERBOSE, true);
@@ -181,7 +180,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    // error handling
    curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr);
 
-   // In this case we send an if-range query with a range header
+   // if we have the file send an if-range query with a range header
    if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
    {
       char Buf[1000];
@@ -189,11 +188,17 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
              (long)SBuf.st_size - 1,
              TimeRFC1123(SBuf.st_mtime).c_str());
       headers = curl_slist_append(headers, Buf);
+   } 
+   else if(Itm->LastModified > 0)
+   {
+      curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
+      curl_easy_setopt(curl, CURLOPT_TIMEVALUE, Itm->LastModified);
    }
 
    // go for it - if the file exists, append on it
    File = new FileFd(Itm->DestFile, FileFd::WriteAny);
-   File->Seek(File->Size());
+   if (File->Size() > 0)
+      File->Seek(File->Size() - 1);
    
    // keep apt updated
    Res.Filename = Itm->DestFile;
@@ -202,6 +207,9 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    CURLcode success = curl_easy_perform(curl);
    curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &curl_responsecode);
 
+   long curl_servdate;
+   curl_easy_getinfo(curl, CURLINFO_FILETIME, &curl_servdate);
+
    // cleanup
    if(success != 0) 
    {
@@ -212,22 +220,31 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    }
    File->Close();
 
-   if (Res.Size == 0)
-      Res.Size = File->Size();
+   // Timestamp
+   struct utimbuf UBuf;
+   if (curl_servdate != -1) {
+       UBuf.actime = curl_servdate;
+       UBuf.modtime = curl_servdate;
+       utime(File->Name().c_str(),&UBuf);
+   }
 
    // check the downloaded result
    struct stat Buf;
    if (stat(File->Name().c_str(),&Buf) == 0)
    {
-      Res.Size = Buf.st_size;
       Res.Filename = File->Name();
       Res.LastModified = Buf.st_mtime;
       Res.IMSHit = false;
       if (curl_responsecode == 304)
       {
+        unlink(File->Name().c_str());
         Res.IMSHit = true;
         Res.LastModified = Itm->LastModified;
+        Res.Size = 0;
+        URIDone(Res);
+        return true;
       }
+      Res.Size = Buf.st_size;
    }
 
    // take hashes