]> git.saurik.com Git - apt.git/blobdiff - methods/https.cc
* methods/https.cc:
[apt.git] / methods / https.cc
index fc649d6c217f800aad3ba147e9a8ff1320394f38..6de18b8e09db126e2a41f7055c7d050388da2ce3 100644 (file)
    ##################################################################### */
                                                                        /*}}}*/
 // Include Files                                                       /*{{{*/
    ##################################################################### */
                                                                        /*}}}*/
 // Include Files                                                       /*{{{*/
+#include <config.h>
+
 #include <apt-pkg/fileutl.h>
 #include <apt-pkg/acquire-method.h>
 #include <apt-pkg/error.h>
 #include <apt-pkg/hashes.h>
 #include <apt-pkg/netrc.h>
 #include <apt-pkg/fileutl.h>
 #include <apt-pkg/acquire-method.h>
 #include <apt-pkg/error.h>
 #include <apt-pkg/hashes.h>
 #include <apt-pkg/netrc.h>
+#include <apt-pkg/configuration.h>
 
 #include <sys/stat.h>
 #include <sys/time.h>
 
 #include <sys/stat.h>
 #include <sys/time.h>
 #include <errno.h>
 #include <string.h>
 #include <iostream>
 #include <errno.h>
 #include <string.h>
 #include <iostream>
-#include <apti18n.h>
 #include <sstream>
 
 #include "config.h"
 #include "https.h"
 #include <sstream>
 
 #include "config.h"
 #include "https.h"
-
+#include <apti18n.h>
                                                                        /*}}}*/
 using namespace std;
 
                                                                        /*}}}*/
 using namespace std;
 
@@ -51,7 +53,7 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double dlnow,
 {
    HttpsMethod *me = (HttpsMethod *)clientp;
    if(dltotal > 0 && me->Res.Size == 0) {
 {
    HttpsMethod *me = (HttpsMethod *)clientp;
    if(dltotal > 0 && me->Res.Size == 0) {
-      me->Res.Size = (unsigned long)dltotal;
+      me->Res.Size = (unsigned long long)dltotal;
       me->URIStart(me->Res);
    }
    return 0;
       me->URIStart(me->Res);
    }
    return 0;
@@ -98,7 +100,6 @@ void HttpsMethod::SetupProxy()                                       /*{{{*/
    depth. */
 bool HttpsMethod::Fetch(FetchItem *Itm)
 {
    depth. */
 bool HttpsMethod::Fetch(FetchItem *Itm)
 {
-   stringstream ss;
    struct stat SBuf;
    struct curl_slist *headers=NULL;  
    char curl_errorstr[CURL_ERROR_SIZE];
    struct stat SBuf;
    struct curl_slist *headers=NULL;  
    char curl_errorstr[CURL_ERROR_SIZE];
@@ -197,6 +198,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
       if (_config->FindB("Acquire::https::No-Store",
                _config->FindB("Acquire::http::No-Store",false)) == true)
         headers = curl_slist_append(headers,"Cache-Control: no-store");
       if (_config->FindB("Acquire::https::No-Store",
                _config->FindB("Acquire::http::No-Store",false)) == true)
         headers = curl_slist_append(headers,"Cache-Control: no-store");
+      stringstream ss;
       ioprintf(ss, "Cache-Control: max-age=%u", _config->FindI("Acquire::https::Max-Age",
                _config->FindI("Acquire::http::Max-Age",0)));
       headers = curl_slist_append(headers, ss.str().c_str());
       ioprintf(ss, "Cache-Control: max-age=%u", _config->FindI("Acquire::https::Max-Age",
                _config->FindI("Acquire::http::Max-Age",0)));
       headers = curl_slist_append(headers, ss.str().c_str());
@@ -244,11 +246,11 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
    {
       char Buf[1000];
    if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
    {
       char Buf[1000];
-      sprintf(Buf,"Range: bytes=%li-\r\nIf-Range: %s\r\n",
-             (long)SBuf.st_size - 1,
-             TimeRFC1123(SBuf.st_mtime).c_str());
+      sprintf(Buf, "Range: bytes=%li-", (long) SBuf.st_size - 1);
+      headers = curl_slist_append(headers, Buf);
+      sprintf(Buf, "If-Range: %s", TimeRFC1123(SBuf.st_mtime).c_str());
       headers = curl_slist_append(headers, Buf);
       headers = curl_slist_append(headers, Buf);
-   } 
+   }
    else if(Itm->LastModified > 0)
    {
       curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
    else if(Itm->LastModified > 0)
    {
       curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE);
@@ -270,14 +272,17 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    long curl_servdate;
    curl_easy_getinfo(curl, CURLINFO_FILETIME, &curl_servdate);
 
    long curl_servdate;
    curl_easy_getinfo(curl, CURLINFO_FILETIME, &curl_servdate);
 
+   File->Close();
+
    // cleanup
    if(success != 0) 
    {
       _error->Error("%s", curl_errorstr);
    // cleanup
    if(success != 0) 
    {
       _error->Error("%s", curl_errorstr);
+      // unlink, no need keep 401/404 page content in partial/
+      unlink(File->Name().c_str());
       Fail();
       return true;
    }
       Fail();
       return true;
    }
-   File->Close();
 
    // Timestamp
    struct utimbuf UBuf;
 
    // Timestamp
    struct utimbuf UBuf;
@@ -309,7 +314,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    // take hashes
    Hashes Hash;
    FileFd Fd(Res.Filename, FileFd::ReadOnly);
    // take hashes
    Hashes Hash;
    FileFd Fd(Res.Filename, FileFd::ReadOnly);
-   Hash.AddFD(Fd.Fd(), Fd.Size());
+   Hash.AddFD(Fd);
    Res.TakeHashes(Hash);
    
    // keep apt updated
    Res.TakeHashes(Hash);
    
    // keep apt updated