X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/b00a6e80892dcbf844ee70a449288e87290a821d..1351329e376a8dbe7780693693d0f3db0ec749fa:/methods/https.cc diff --git a/methods/https.cc b/methods/https.cc index fc649d6c2..c1a49ba60 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -10,11 +10,14 @@ ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ +#include + #include #include #include #include #include +#include #include #include @@ -25,12 +28,11 @@ #include #include #include -#include #include #include "config.h" #include "https.h" - +#include /*}}}*/ using namespace std; @@ -51,7 +53,7 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double dlnow, { HttpsMethod *me = (HttpsMethod *)clientp; if(dltotal > 0 && me->Res.Size == 0) { - me->Res.Size = (unsigned long)dltotal; + me->Res.Size = (unsigned long long)dltotal; me->URIStart(me->Res); } return 0; @@ -98,7 +100,6 @@ void HttpsMethod::SetupProxy() /*{{{*/ depth. */ bool HttpsMethod::Fetch(FetchItem *Itm) { - stringstream ss; struct stat SBuf; struct curl_slist *headers=NULL; char curl_errorstr[CURL_ERROR_SIZE]; @@ -197,6 +198,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) if (_config->FindB("Acquire::https::No-Store", _config->FindB("Acquire::http::No-Store",false)) == true) headers = curl_slist_append(headers,"Cache-Control: no-store"); + stringstream ss; ioprintf(ss, "Cache-Control: max-age=%u", _config->FindI("Acquire::https::Max-Age", _config->FindI("Acquire::http::Max-Age",0))); headers = curl_slist_append(headers, ss.str().c_str()); @@ -217,7 +219,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) curl_easy_setopt(curl, CURLOPT_USERAGENT, _config->Find("Acquire::https::User-Agent", _config->Find("Acquire::http::User-Agent", - "Debian APT-CURL/1.0 ("VERSION")").c_str()).c_str()); + "Debian APT-CURL/1.0 (" PACKAGE_VERSION ")").c_str()).c_str()); // set timeout int const timeout = _config->FindI("Acquire::https::Timeout", @@ -240,15 +242,28 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // error handling curl_easy_setopt(curl, CURLOPT_ERRORBUFFER, curl_errorstr); + // If we ask for uncompressed files servers might respond with content- + // negotation which lets us end up with compressed files we do not support, + // see 657029, 657560 and co, so if we have no extension on the request + // ask for text only. As a sidenote: If there is nothing to negotate servers + // seem to be nice and ignore it. + if (_config->FindB("Acquire::https::SendAccept", _config->FindB("Acquire::http::SendAccept", true)) == true) + { + size_t const filepos = Itm->Uri.find_last_of('/'); + string const file = Itm->Uri.substr(filepos + 1); + if (flExtension(file) == file) + headers = curl_slist_append(headers, "Accept: text/*"); + } + // if we have the file send an if-range query with a range header if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) { char Buf[1000]; - sprintf(Buf,"Range: bytes=%li-\r\nIf-Range: %s\r\n", - (long)SBuf.st_size - 1, - TimeRFC1123(SBuf.st_mtime).c_str()); + sprintf(Buf, "Range: bytes=%li-", (long) SBuf.st_size - 1); + headers = curl_slist_append(headers, Buf); + sprintf(Buf, "If-Range: %s", TimeRFC1123(SBuf.st_mtime).c_str()); headers = curl_slist_append(headers, Buf); - } + } else if(Itm->LastModified > 0) { curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE); @@ -270,14 +285,17 @@ bool HttpsMethod::Fetch(FetchItem *Itm) long curl_servdate; curl_easy_getinfo(curl, CURLINFO_FILETIME, &curl_servdate); + File->Close(); + // cleanup if(success != 0) { _error->Error("%s", curl_errorstr); + // unlink, no need keep 401/404 page content in partial/ + unlink(File->Name().c_str()); Fail(); return true; } - File->Close(); // Timestamp struct utimbuf UBuf; @@ -309,7 +327,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // take hashes Hashes Hash; FileFd Fd(Res.Filename, FileFd::ReadOnly); - Hash.AddFD(Fd.Fd(), Fd.Size()); + Hash.AddFD(Fd); Res.TakeHashes(Hash); // keep apt updated