]> git.saurik.com Git - apt.git/commitdiff
calculate hashes while downloading in https
authorDavid Kalnischkies <david@kalnischkies.de>
Sat, 11 Apr 2015 08:23:52 +0000 (10:23 +0200)
committerDavid Kalnischkies <david@kalnischkies.de>
Sat, 18 Apr 2015 23:13:09 +0000 (01:13 +0200)
We do this in HTTP already to give the CPU some exercise while the disk
is heavily spinning (or flashing?) to store the data avoiding the need
to reread the entire file again later on to calculate the hashes – which
happens outside of the eyes of progress reporting, so you might ended up
with a bunch of https workers 'stuck' at 100% while they were busy
calculating hashes.

This is a bummer for everyone using apt as a connection speedtest as the
https method works slower now (not really, it just isn't reporting done
too early anymore).

methods/http.cc
methods/http.h
methods/https.cc
methods/https.h
methods/server.cc
methods/server.h
test/integration/test-apt-download-progress

index e4773b0e245a924bc4440352406b0101b4472987..af3d5ccb673c8882df511ca41c86cf2141eac2bb 100644 (file)
@@ -484,16 +484,14 @@ APT_PURE bool HttpServerState::IsOpen()                                   /*{{{*/
    return (ServerFd != -1);
 }
                                                                        /*}}}*/
-bool HttpServerState::InitHashes(FileFd &File, HashStringList const &ExpectedHashes)/*{{{*/
+bool HttpServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/
 {
    delete In.Hash;
    In.Hash = new Hashes(ExpectedHashes);
-
-   // Set the expected size and read file for the hashes
-   File.Truncate(StartPos);
-   return In.Hash->AddFD(File, StartPos);
+   return true;
 }
                                                                        /*}}}*/
+
 APT_PURE Hashes * HttpServerState::GetHashes()                         /*{{{*/
 {
    return In.Hash;
index 6dc872659d3bab0956d9a15dcacb354491be9032..e73871931d3ee7fcf8fb08522ea24dcd3f6c8b69 100644 (file)
@@ -111,7 +111,7 @@ struct HttpServerState: public ServerState
    virtual bool Open();
    virtual bool IsOpen();
    virtual bool Close();
-   virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes);
+   virtual bool InitHashes(HashStringList const &ExpectedHashes);
    virtual Hashes * GetHashes();
    virtual bool Die(FileFd &File);
    virtual bool Flush(FileFd * const File);
index 81903b239c1a46ace6f10d0e1f24585ff0c2fefb..c6b75d9ad151a5b377a3b2d82b203a36c49f380d 100644 (file)
@@ -72,18 +72,18 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
       else
         me->https->Server->StartPos = 0;
 
-      me->https->File->Truncate(me->https->Server->StartPos);
-      me->https->File->Seek(me->https->Server->StartPos);
-
       me->Res->LastModified = me->https->Server->Date;
       me->Res->Size = me->https->Server->Size;
       me->Res->ResumePoint = me->https->Server->StartPos;
 
       // we expect valid data, so tell our caller we get the file now
-      if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 &&
-           me->https->Server->JunkSize == 0 &&
-           me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
-        me->https->URIStart(*me->Res);
+      if (me->https->Server->Result >= 200 && me->https->Server->Result < 300)
+      {
+        if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
+           me->https->URIStart(*me->Res);
+        if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false)
+           return 0;
+      }
    }
    else if (me->https->Server->HeaderLine(line) == false)
       return 0;
@@ -116,16 +116,31 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
       }
    }
 
+   if (me->Server->GetHashes()->Add((unsigned char const * const)buffer, buffer_size) == false)
+      return 0;
+
    return buffer_size;
 }
 
 // HttpsServerState::HttpsServerState - Constructor                    /*{{{*/
-HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner)
+HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner), Hash(NULL)
 {
    TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
    Reset();
 }
                                                                        /*}}}*/
+bool HttpsServerState::InitHashes(HashStringList const &ExpectedHashes)        /*{{{*/
+{
+   delete Hash;
+   Hash = new Hashes(ExpectedHashes);
+   return true;
+}
+                                                                       /*}}}*/
+APT_PURE Hashes * HttpsServerState::GetHashes()                                /*{{{*/
+{
+   return Hash;
+}
+                                                                       /*}}}*/
 
 void HttpsMethod::SetupProxy()                                         /*{{{*/
 {
@@ -365,6 +380,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    // go for it - if the file exists, append on it
    File = new FileFd(Itm->DestFile, FileFd::WriteAny);
    Server = CreateServerState(Itm->Uri);
+   if (Server->InitHashes(Itm->ExpectedHashes) == false)
+      return false;
 
    // keep apt updated
    Res.Filename = Itm->DestFile;
@@ -443,10 +460,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
       Res.LastModified = resultStat.st_mtime;
 
    // take hashes
-   Hashes Hash(Itm->ExpectedHashes);
-   FileFd Fd(Res.Filename, FileFd::ReadOnly);
-   Hash.AddFD(Fd);
-   Res.TakeHashes(Hash);
+   Res.TakeHashes(*(Server->GetHashes()));
 
    // keep apt updated
    URIDone(Res);
index dc0ff332213d839d0bee006953153809c912e397..6e32e8d3d0318302d7abf17ac4a65e7c84fb2ecf 100644 (file)
@@ -29,6 +29,8 @@ class FileFd;
 
 class HttpsServerState : public ServerState
 {
+   Hashes * Hash;
+
    protected:
    virtual bool ReadHeaderLines(std::string &/*Data*/) { return false; }
    virtual bool LoadNextResponse(bool const /*ToFile*/, FileFd * const /*File*/) { return false; }
@@ -42,8 +44,8 @@ class HttpsServerState : public ServerState
    virtual bool Open() { return false; }
    virtual bool IsOpen() { return false; }
    virtual bool Close() { return false; }
-   virtual bool InitHashes(FileFd &/*File*/, HashStringList const &/*ExpectedHashes*/) { return false; }
-   virtual Hashes * GetHashes() { return NULL; }
+   virtual bool InitHashes(HashStringList const &ExpectedHashes);
+   virtual Hashes * GetHashes();
    virtual bool Die(FileFd &/*File*/) { return false; }
    virtual bool Flush(FileFd * const /*File*/) { return false; }
    virtual bool Go(bool /*ToFile*/, FileFd * const /*File*/) { return false; }
index e403f10713c6c1c0acc69409237d48a36fa653cf..2116926b063a3e7abe503549dd8fc4b06ff36693 100644 (file)
@@ -235,6 +235,12 @@ ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOu
    Reset();
 }
                                                                        /*}}}*/
+bool ServerState::AddPartialFileToHashes(FileFd &File)                 /*{{{*/
+{
+   File.Truncate(StartPos);
+   return GetHashes()->AddFD(File, StartPos);
+}
+                                                                       /*}}}*/
 
 bool ServerMethod::Configuration(string Message)                       /*{{{*/
 {
@@ -357,7 +363,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
    FailFd = File->Fd();
    FailTime = Server->Date;
 
-   if (Server->InitHashes(*File, Queue->ExpectedHashes) == false)
+   if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false)
    {
       _error->Errno("read",_("Problem hashing file"));
       return ERROR_NOT_FROM_SERVER;
index 45622dd34c6557df0170459a284eda60814f43ea..1b1f754a388312f032e35544db11eeb4eee2b629 100644 (file)
@@ -72,6 +72,7 @@ struct ServerState
    };
    /** \brief Get the headers before the data */
    RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri);
+   bool AddPartialFileToHashes(FileFd &File);
 
    bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
    virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0;
@@ -85,7 +86,7 @@ struct ServerState
    virtual bool Open() = 0;
    virtual bool IsOpen() = 0;
    virtual bool Close() = 0;
-   virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes) = 0;
+   virtual bool InitHashes(HashStringList const &ExpectedHashes) = 0;
    virtual Hashes * GetHashes() = 0;
    virtual bool Die(FileFd &File) = 0;
    virtual bool Flush(FileFd * const File) = 0;
index b2c9effe6832e88f5d292ceaa69ec6f0cb76673d..07c5e09c548841de5c0ed3576a9c1430d2bd1122 100755 (executable)
@@ -26,14 +26,16 @@ assertprogress() {
 TESTFILE=testfile.big
 testsuccess dd if=/dev/zero of=./aptarchive/$TESTFILE bs=800k count=1 
 
+OPT='-o APT::Status-Fd=3 -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 -o Debug::Acquire::https=1'
+
 msgtest 'download progress works via' 'http'
 exec 3> apt-progress.log
-testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE -o APT::Status-Fd=3 -o Acquire::http::Dl-Limit=800
+testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE $OPT -o Acquire::http::Dl-Limit=800
 assertprogress apt-progress.log
 
 msgtest 'download progress works via' 'https'
 exec 3> apt-progress.log
-testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE -o APT::Status-Fd=3 -o Acquire::https::Dl-Limit=800
+testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE $OPT -o Acquire::https::Dl-Limit=800
 assertprogress apt-progress.log
 
 # cleanup