]> git.saurik.com Git - apt.git/commitdiff
use the same redirection handling for http and https
authorDavid Kalnischkies <david@kalnischkies.de>
Tue, 2 Aug 2016 12:49:58 +0000 (14:49 +0200)
committerDavid Kalnischkies <david@kalnischkies.de>
Wed, 10 Aug 2016 21:19:44 +0000 (23:19 +0200)
cURL which backs our https implementation can handle redirects on its
own, but by dealing with them on our own we gain finer control over which
redirections will be performed (we don't like https → http) and by whom
so that redirections to other hosts correctly spawn a new https method
dealing with these instead of letting the current one deal with it.

methods/http.cc
methods/http.h
methods/https.cc
methods/server.cc
methods/server.h
test/integration/test-apt-https-no-redirect
test/integration/test-partial-file-support

index cf5eae06d3b9a62dee672cd74826a0382a24eebe..b7a3aa4a12fd3d4964b30ca33f40eed995d8b42d 100644 (file)
@@ -797,3 +797,32 @@ void HttpMethod::RotateDNS()                                               /*{{{*/
    ::RotateDNS();
 }
                                                                        /*}}}*/
    ::RotateDNS();
 }
                                                                        /*}}}*/
+ServerMethod::DealWithHeadersResult HttpMethod::DealWithHeaders(FetchResult &Res)/*{{{*/
+{
+   auto ret = ServerMethod::DealWithHeaders(Res);
+   if (ret != ServerMethod::FILE_IS_OPEN)
+      return ret;
+
+   // Open the file
+   delete File;
+   File = new FileFd(Queue->DestFile,FileFd::WriteAny);
+   if (_error->PendingError() == true)
+      return ERROR_NOT_FROM_SERVER;
+
+   FailFile = Queue->DestFile;
+   FailFile.c_str();   // Make sure we don't do a malloc in the signal handler
+   FailFd = File->Fd();
+   FailTime = Server->Date;
+
+   if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false)
+   {
+      _error->Errno("read",_("Problem hashing file"));
+      return ERROR_NOT_FROM_SERVER;
+   }
+   if (Server->StartPos > 0)
+      Res.ResumePoint = Server->StartPos;
+
+   SetNonBlock(File->Fd(),true);
+   return FILE_IS_OPEN;
+}
+                                                                       /*}}}*/
index 2ac3b8c9b366d0b2e313b05d3e8147c38ddd3a07..e6b11d642d79171dd1774fb05420c2e2a42ed074 100644 (file)
@@ -130,6 +130,7 @@ class HttpMethod : public ServerMethod
 
    virtual std::unique_ptr<ServerState> CreateServerState(URI const &uri) APT_OVERRIDE;
    virtual void RotateDNS() APT_OVERRIDE;
 
    virtual std::unique_ptr<ServerState> CreateServerState(URI const &uri) APT_OVERRIDE;
    virtual void RotateDNS() APT_OVERRIDE;
+   virtual DealWithHeadersResult DealWithHeaders(FetchResult &Res) APT_OVERRIDE;
 
    protected:
    std::string AutoDetectProxyCmd;
 
    protected:
    std::string AutoDetectProxyCmd;
index 7c0c3241df95c8bc1f3b7dfabe346b182fa32af3..a5a8a7cd1ef183f7a4e00e04725d2f3519f042bf 100644 (file)
@@ -60,6 +60,7 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
 
    if (line.empty() == true)
    {
 
    if (line.empty() == true)
    {
+      me->https->Server->JunkSize = 0;
       if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0)
         ;
       else if (me->https->Server->Result == 416)
       if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0)
         ;
       else if (me->https->Server->Result == 416)
@@ -99,11 +100,13 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
       // we expect valid data, so tell our caller we get the file now
       if (me->https->Server->Result >= 200 && me->https->Server->Result < 300)
       {
       // we expect valid data, so tell our caller we get the file now
       if (me->https->Server->Result >= 200 && me->https->Server->Result < 300)
       {
-        if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
+        if (me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
            me->https->URIStart(*me->Res);
         if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false)
            return 0;
       }
            me->https->URIStart(*me->Res);
         if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false)
            return 0;
       }
+      else
+        me->https->Server->JunkSize = std::numeric_limits<decltype(me->https->Server->JunkSize)>::max();
    }
    else if (me->https->Server->HeaderLine(line) == false)
       return 0;
    }
    else if (me->https->Server->HeaderLine(line) == false)
       return 0;
@@ -266,6 +269,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    // options
    curl_easy_setopt(curl, CURLOPT_NOPROGRESS, true);
    curl_easy_setopt(curl, CURLOPT_FILETIME, true);
    // options
    curl_easy_setopt(curl, CURLOPT_NOPROGRESS, true);
    curl_easy_setopt(curl, CURLOPT_FILETIME, true);
+   curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 0);
    // only allow curl to handle https, not the other stuff it supports
    curl_easy_setopt(curl, CURLOPT_PROTOCOLS, CURLPROTO_HTTPS);
    curl_easy_setopt(curl, CURLOPT_REDIR_PROTOCOLS, CURLPROTO_HTTPS);
    // only allow curl to handle https, not the other stuff it supports
    curl_easy_setopt(curl, CURLOPT_PROTOCOLS, CURLPROTO_HTTPS);
    curl_easy_setopt(curl, CURLOPT_REDIR_PROTOCOLS, CURLPROTO_HTTPS);
@@ -372,10 +376,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    curl_easy_setopt(curl, CURLOPT_LOW_SPEED_LIMIT, DL_MIN_SPEED);
    curl_easy_setopt(curl, CURLOPT_LOW_SPEED_TIME, timeout);
 
    curl_easy_setopt(curl, CURLOPT_LOW_SPEED_LIMIT, DL_MIN_SPEED);
    curl_easy_setopt(curl, CURLOPT_LOW_SPEED_TIME, timeout);
 
-   // set redirect options and default to 10 redirects
-   curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, AllowRedirect);
-   curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 10);
-
    // debug
    if (Debug == true)
       curl_easy_setopt(curl, CURLOPT_VERBOSE, true);
    // debug
    if (Debug == true)
       curl_easy_setopt(curl, CURLOPT_VERBOSE, true);
@@ -428,6 +428,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    // met, CURLINFO_CONDITION_UNMET will be set to 1
    long curl_condition_unmet = 0;
    curl_easy_getinfo(curl, CURLINFO_CONDITION_UNMET, &curl_condition_unmet);
    // met, CURLINFO_CONDITION_UNMET will be set to 1
    long curl_condition_unmet = 0;
    curl_easy_getinfo(curl, CURLINFO_CONDITION_UNMET, &curl_condition_unmet);
+   if (curl_condition_unmet == 1)
+      Server->Result = 304;
 
    File->Close();
    curl_slist_free_all(headers);
 
    File->Close();
    curl_slist_free_all(headers);
@@ -460,70 +462,54 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
       return false;
    }
 
       return false;
    }
 
-   // server says file not modified
-   if (Server->Result == 304 || curl_condition_unmet == 1)
+   switch (DealWithHeaders(Res))
    {
    {
-      RemoveFile("https", File->Name());
-      Res.IMSHit = true;
-      Res.LastModified = Itm->LastModified;
-      Res.Size = 0;
-      URIDone(Res);
-      return true;
-   }
-   Res.IMSHit = false;
+      case ServerMethod::IMS_HIT:
+        URIDone(Res);
+        break;
 
 
-   if (Server->Result != 200 && // OK
-        Server->Result != 206 && // Partial
-        Server->Result != 416) // invalid Range
-   {
-      char err[255];
-      snprintf(err, sizeof(err) - 1, "HttpError%i", Server->Result);
-      SetFailReason(err);
-      _error->Error("%i %s", Server->Result, Server->Code);
-      // unlink, no need keep 401/404 page content in partial/
-      RemoveFile("https", File->Name());
-      return false;
-   }
+      case ServerMethod::ERROR_WITH_CONTENT_PAGE:
+        // unlink, no need keep 401/404 page content in partial/
+        RemoveFile(Binary.c_str(), File->Name());
+      case ServerMethod::ERROR_UNRECOVERABLE:
+      case ServerMethod::ERROR_NOT_FROM_SERVER:
+        return false;
 
 
-   // invalid range-request
-   if (Server->Result == 416)
-   {
-      RemoveFile("https", File->Name());
-      delete File;
-      Redirect(Itm->Uri);
-      return true;
-   }
+      case ServerMethod::TRY_AGAIN_OR_REDIRECT:
+        Redirect(NextURI);
+        break;
 
 
-   struct stat resultStat;
-   if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
-   {
-      _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
-      return false;
-   }
-   Res.Size = resultStat.st_size;
+      case ServerMethod::FILE_IS_OPEN:
+        struct stat resultStat;
+        if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
+        {
+           _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
+           return false;
+        }
+        Res.Size = resultStat.st_size;
 
 
-   // Timestamp
-   curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
-   if (Res.LastModified != -1)
-   {
-      struct timeval times[2];
-      times[0].tv_sec = Res.LastModified;
-      times[1].tv_sec = Res.LastModified;
-      times[0].tv_usec = times[1].tv_usec = 0;
-      utimes(File->Name().c_str(), times);
-   }
-   else
-      Res.LastModified = resultStat.st_mtime;
+        // Timestamp
+        curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
+        if (Res.LastModified != -1)
+        {
+           struct timeval times[2];
+           times[0].tv_sec = Res.LastModified;
+           times[1].tv_sec = Res.LastModified;
+           times[0].tv_usec = times[1].tv_usec = 0;
+           utimes(File->Name().c_str(), times);
+        }
+        else
+           Res.LastModified = resultStat.st_mtime;
 
 
-   // take hashes
-   Res.TakeHashes(*(Server->GetHashes()));
+        // take hashes
+        Res.TakeHashes(*(Server->GetHashes()));
 
 
-   // keep apt updated
-   URIDone(Res);
+        // keep apt updated
+        URIDone(Res);
+        break;
+   }
 
 
-   // cleanup
    delete File;
    delete File;
-
    return true;
 }
                                                                        /*}}}*/
    return true;
 }
                                                                        /*}}}*/
index 672a3d16dd72a55aa796a7b91c1f984380cef74e..66551b8931c33eaf3dab77973fe5a88dee9eb928 100644 (file)
@@ -170,7 +170,7 @@ bool ServerState::HeaderLine(string Line)
       HaveContent = true;
 
       unsigned long long * DownloadSizePtr = &DownloadSize;
       HaveContent = true;
 
       unsigned long long * DownloadSizePtr = &DownloadSize;
-      if (Result == 416)
+      if (Result == 416 || (Result >= 300 && Result < 400))
         DownloadSizePtr = &JunkSize;
 
       *DownloadSizePtr = strtoull(Val.c_str(), NULL, 10);
         DownloadSizePtr = &JunkSize;
 
       *DownloadSizePtr = strtoull(Val.c_str(), NULL, 10);
@@ -272,6 +272,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
       RemoveFile("server", Queue->DestFile);
       Res.IMSHit = true;
       Res.LastModified = Queue->LastModified;
       RemoveFile("server", Queue->DestFile);
       Res.IMSHit = true;
       Res.LastModified = Queue->LastModified;
+      Res.Size = 0;
       return IMS_HIT;
    }
 
       return IMS_HIT;
    }
 
@@ -288,7 +289,8 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
            && Server->Result != 304    // Not Modified
            && Server->Result != 306))  // (Not part of HTTP/1.1, reserved)
    {
            && Server->Result != 304    // Not Modified
            && Server->Result != 306))  // (Not part of HTTP/1.1, reserved)
    {
-      if (Server->Location.empty() == true);
+      if (Server->Location.empty() == true)
+        ;
       else if (Server->Location[0] == '/' && Queue->Uri.empty() == false)
       {
         URI Uri = Queue->Uri;
       else if (Server->Location[0] == '/' && Queue->Uri.empty() == false)
       {
         URI Uri = Queue->Uri;
@@ -329,8 +331,10 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
         if (tmpURI.Access == Uri.Access)
            return TRY_AGAIN_OR_REDIRECT;
         // as well as http to https
         if (tmpURI.Access == Uri.Access)
            return TRY_AGAIN_OR_REDIRECT;
         // as well as http to https
-        else if (Uri.Access == "http" && tmpURI.Access == "https")
+        else if ((Uri.Access == "http" || Uri.Access == "https+http") && tmpURI.Access == "https")
            return TRY_AGAIN_OR_REDIRECT;
            return TRY_AGAIN_OR_REDIRECT;
+        else
+           _error->Error("Redirection from %s to '%s' is forbidden", Uri.Access.c_str(), NextURI.c_str());
       }
       /* else pass through for error message */
    }
       }
       /* else pass through for error message */
    }
@@ -358,11 +362,10 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
            // the file is completely downloaded, but was not moved
            if (Server->HaveContent == true)
            {
            // the file is completely downloaded, but was not moved
            if (Server->HaveContent == true)
            {
-              // Send to error page to dev/null
-              FileFd DevNull("/dev/null",FileFd::WriteExists);
-              Server->RunData(&DevNull);
+              // nuke the sent error page
+              Server->RunDataToDevNull();
+              Server->HaveContent = false;
            }
            }
-           Server->HaveContent = false;
            Server->StartPos = Server->TotalFileSize;
            Server->Result = 200;
         }
            Server->StartPos = Server->TotalFileSize;
            Server->Result = 200;
         }
@@ -378,10 +381,13 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
       failure */
    if (Server->Result < 200 || Server->Result >= 300)
    {
       failure */
    if (Server->Result < 200 || Server->Result >= 300)
    {
-      std::string err;
-      strprintf(err, "HttpError%u", Server->Result);
-      SetFailReason(err);
-      _error->Error("%u %s", Server->Result, Server->Code);
+      if (_error->PendingError() == false)
+      {
+        std::string err;
+        strprintf(err, "HttpError%u", Server->Result);
+        SetFailReason(err);
+        _error->Error("%u %s", Server->Result, Server->Code);
+      }
       if (Server->HaveContent == true)
         return ERROR_WITH_CONTENT_PAGE;
       return ERROR_UNRECOVERABLE;
       if (Server->HaveContent == true)
         return ERROR_WITH_CONTENT_PAGE;
       return ERROR_UNRECOVERABLE;
@@ -390,27 +396,6 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
    // This is some sort of 2xx 'data follows' reply
    Res.LastModified = Server->Date;
    Res.Size = Server->TotalFileSize;
    // This is some sort of 2xx 'data follows' reply
    Res.LastModified = Server->Date;
    Res.Size = Server->TotalFileSize;
-   
-   // Open the file
-   delete File;
-   File = new FileFd(Queue->DestFile,FileFd::WriteAny);
-   if (_error->PendingError() == true)
-      return ERROR_NOT_FROM_SERVER;
-
-   FailFile = Queue->DestFile;
-   FailFile.c_str();   // Make sure we don't do a malloc in the signal handler
-   FailFd = File->Fd();
-   FailTime = Server->Date;
-
-   if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false)
-   {
-      _error->Errno("read",_("Problem hashing file"));
-      return ERROR_NOT_FROM_SERVER;
-   }
-   if (Server->StartPos > 0)
-      Res.ResumePoint = Server->StartPos;
-
-   SetNonBlock(File->Fd(),true);
    return FILE_IS_OPEN;
 }
                                                                        /*}}}*/
    return FILE_IS_OPEN;
 }
                                                                        /*}}}*/
@@ -729,12 +714,7 @@ int ServerMethod::Loop()
         case ERROR_WITH_CONTENT_PAGE:
         {
            Fail();
         case ERROR_WITH_CONTENT_PAGE:
         {
            Fail();
-           
-           // Send to content to dev/null
-           File = new FileFd("/dev/null",FileFd::WriteExists);
-           Server->RunData(File);
-           delete File;
-           File = 0;
+           Server->RunDataToDevNull();
            break;
         }
 
            break;
         }
 
index b23b0e50ae737969a250afce0ffaa74535eff09d..f2868c96a533992da004efe996ffb8d05ac3b9e3 100644 (file)
@@ -141,7 +141,7 @@ class ServerMethod : public aptMethod
       TRY_AGAIN_OR_REDIRECT
    };
    /** \brief Handle the retrieved header data */
       TRY_AGAIN_OR_REDIRECT
    };
    /** \brief Handle the retrieved header data */
-   DealWithHeadersResult DealWithHeaders(FetchResult &Res);
+   virtual DealWithHeadersResult DealWithHeaders(FetchResult &Res);
 
    // In the event of a fatal signal this file will be closed and timestamped.
    static std::string FailFile;
 
    // In the event of a fatal signal this file will be closed and timestamped.
    static std::string FailFile;
index b437ac1e6a198cac2b3f448e5eaae17740363f4d..69e2ba57f3ba497e036f81b418d432b81c9da7a5 100755 (executable)
@@ -11,7 +11,9 @@ insertpackage 'stable' 'apt' 'all' '1'
 setupaptarchive --no-update
 
 echo 'alright' > aptarchive/working
 setupaptarchive --no-update
 
 echo 'alright' > aptarchive/working
-changetohttpswebserver  -o "aptwebserver::redirect::replace::/redirectme/=http://localhost:${APTHTTPPORT}/"
+changetohttpswebserver
+webserverconfig 'aptwebserver::redirect::replace::/redirectme/' "http://localhost:${APTHTTPPORT}/"
+webserverconfig 'aptwebserver::redirect::replace::/redirectme2/' "https://localhost:${APTHTTPSPORT}/"
 
 msgtest 'download of a file works via' 'http'
 testsuccess --nomsg downloadfile "http://localhost:${APTHTTPPORT}/working" httpfile
 
 msgtest 'download of a file works via' 'http'
 testsuccess --nomsg downloadfile "http://localhost:${APTHTTPPORT}/working" httpfile
@@ -25,4 +27,4 @@ msgtest 'download of a file does not work if' 'https redirected to http'
 testfailure --nomsg downloadfile "https://localhost:${APTHTTPSPORT}/redirectme/working" redirectfile
 
 msgtest 'libcurl has forbidden access in last request to' 'http resource'
 testfailure --nomsg downloadfile "https://localhost:${APTHTTPSPORT}/redirectme/working" redirectfile
 
 msgtest 'libcurl has forbidden access in last request to' 'http resource'
-testsuccess --nomsg grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' rootdir/tmp/testfailure.output
+testsuccess --nomsg grep -q -E -- "Redirection from https to 'http://.*' is forbidden" rootdir/tmp/testfailure.output
index e2d2743b3cfefae7507a67cec072c872a205f4eb..1c5d120d87aa8dc23de671ed95bf196d3360b2ce 100755 (executable)
@@ -146,3 +146,6 @@ serverconfigs "http://localhost:${APTHTTPPORT}"
 changetohttpswebserver
 
 serverconfigs "https://localhost:${APTHTTPSPORT}"
 changetohttpswebserver
 
 serverconfigs "https://localhost:${APTHTTPSPORT}"
+
+webserverconfig 'aptwebserver::redirect::replace::/redirectme/' "https://localhost:${APTHTTPSPORT}/"
+serverconfigs "http://localhost:${APTHTTPPORT}/redirectme"