X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/36b8ebbb4a5db4909c3cf739d8ea472f70703662..6f4501f96f9ea256ff580129ba3835e0d56c398a:/methods/http.cc diff --git a/methods/http.cc b/methods/http.cc index 13f9cbe06..2721b1224 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -25,8 +25,11 @@ ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ +#include + #include #include +#include #include #include #include @@ -41,8 +44,6 @@ #include #include #include -#include - // Internet stuff #include @@ -51,6 +52,8 @@ #include "connect.h" #include "rfc2553emu.h" #include "http.h" + +#include /*}}}*/ using namespace std; @@ -63,15 +66,15 @@ bool AllowRedirect = false; bool Debug = false; URI Proxy; -unsigned long CircleBuf::BwReadLimit=0; -unsigned long CircleBuf::BwTickReadData=0; +unsigned long long CircleBuf::BwReadLimit=0; +unsigned long long CircleBuf::BwTickReadData=0; struct timeval CircleBuf::BwReadTick={0,0}; const unsigned int CircleBuf::BW_HZ=10; // CircleBuf::CircleBuf - Circular input buffer /*{{{*/ // --------------------------------------------------------------------- /* */ -CircleBuf::CircleBuf(unsigned long Size) : Size(Size), Hash(0) +CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0) { Buf = new unsigned char[Size]; Reset(); @@ -87,7 +90,7 @@ void CircleBuf::Reset() InP = 0; OutP = 0; StrPos = 0; - MaxGet = (unsigned int)-1; + MaxGet = (unsigned long long)-1; OutQueue = string(); if (Hash != 0) { @@ -102,7 +105,7 @@ void CircleBuf::Reset() is non-blocking.. */ bool CircleBuf::Read(int Fd) { - unsigned long BwReadMax; + unsigned long long BwReadMax; while (1) { @@ -117,7 +120,7 @@ bool CircleBuf::Read(int Fd) struct timeval now; gettimeofday(&now,0); - unsigned long d = (now.tv_sec-CircleBuf::BwReadTick.tv_sec)*1000000 + + unsigned long long d = (now.tv_sec-CircleBuf::BwReadTick.tv_sec)*1000000 + now.tv_usec-CircleBuf::BwReadTick.tv_usec; if(d > 1000000/BW_HZ) { CircleBuf::BwReadTick = now; @@ -131,7 +134,7 @@ bool CircleBuf::Read(int Fd) } // Write the buffer segment - int Res; + ssize_t Res; if(CircleBuf::BwReadLimit) { Res = read(Fd,Buf + (InP%Size), BwReadMax > LeftRead() ? LeftRead() : BwReadMax); @@ -180,7 +183,7 @@ void CircleBuf::FillOut() return; // Write the buffer segment - unsigned long Sz = LeftRead(); + unsigned long long Sz = LeftRead(); if (OutQueue.length() - StrPos < Sz) Sz = OutQueue.length() - StrPos; memcpy(Buf + (InP%Size),OutQueue.c_str() + StrPos,Sz); @@ -214,7 +217,7 @@ bool CircleBuf::Write(int Fd) return true; // Write the buffer segment - int Res; + ssize_t Res; Res = write(Fd,Buf + (OutP%Size),LeftWrite()); if (Res == 0) @@ -240,7 +243,7 @@ bool CircleBuf::Write(int Fd) bool CircleBuf::WriteTillEl(string &Data,bool Single) { // We cheat and assume it is unneeded to have more than one buffer load - for (unsigned long I = OutP; I < InP; I++) + for (unsigned long long I = OutP; I < InP; I++) { if (Buf[I%Size] != '\n') continue; @@ -258,7 +261,7 @@ bool CircleBuf::WriteTillEl(string &Data,bool Single) Data = ""; while (OutP < I) { - unsigned long Sz = LeftWrite(); + unsigned long long Sz = LeftWrite(); if (Sz == 0) return false; if (I - OutP < Sz) @@ -286,6 +289,11 @@ void CircleBuf::Stats() clog << "Got " << InP << " in " << Diff << " at " << InP/Diff << endl;*/ } /*}}}*/ +CircleBuf::~CircleBuf() +{ + delete [] Buf; + delete Hash; +} // ServerState::ServerState - Constructor /*{{{*/ // --------------------------------------------------------------------- @@ -402,10 +410,10 @@ ServerState::RunHeadersResult ServerState::RunHeaders() if (Debug == true) clog << Data; - for (string::const_iterator I = Data.begin(); I < Data.end(); I++) + for (string::const_iterator I = Data.begin(); I < Data.end(); ++I) { string::const_iterator J = I; - for (; J != Data.end() && *J != '\n' && *J != '\r';J++); + for (; J != Data.end() && *J != '\n' && *J != '\r'; ++J); if (HeaderLine(string(I,J)) == false) return RUN_HEADERS_PARSE_ERROR; I = J; @@ -453,7 +461,7 @@ bool ServerState::RunData() return false; // See if we are done - unsigned long Len = strtol(Data.c_str(),0,16); + unsigned long long Len = strtoull(Data.c_str(),0,16); if (Len == 0) { In.Limit(-1); @@ -596,7 +604,7 @@ bool ServerState::HeaderLine(string Line) if (StartPos != 0) return true; - if (sscanf(Val.c_str(),"%lu",&Size) != 1) + if (sscanf(Val.c_str(),"%llu",&Size) != 1) return _error->Error(_("The HTTP server sent an invalid Content-Length header")); return true; } @@ -611,9 +619,9 @@ bool ServerState::HeaderLine(string Line) { HaveContent = true; - if (sscanf(Val.c_str(),"bytes %lu-%*u/%lu",&StartPos,&Size) != 2) + if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2) return _error->Error(_("The HTTP server sent an invalid Content-Range header")); - if ((unsigned)StartPos > Size) + if ((unsigned long long)StartPos > Size) return _error->Error(_("This HTTP server has broken range support")); return true; } @@ -708,7 +716,19 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out) } } - + // If we ask for uncompressed files servers might respond with content- + // negotation which lets us end up with compressed files we do not support, + // see 657029, 657560 and co, so if we have no extension on the request + // ask for text only. As a sidenote: If there is nothing to negotate servers + // seem to be nice and ignore it. + if (_config->FindB("Acquire::http::SendAccept", true) == true) + { + size_t const filepos = Itm->Uri.find_last_of('/'); + string const file = Itm->Uri.substr(filepos + 1); + if (flExtension(file) == file) + strcat(Buf,"Accept: text/*\r\n"); + } + string Req = Buf; // Check for a partial file @@ -716,7 +736,7 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out) if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) { // In this case we send an if-range query with a range header - sprintf(Buf,"Range: bytes=%li-\r\nIf-Range: %s\r\n",(long)SBuf.st_size - 1, + sprintf(Buf,"Range: bytes=%lli-\r\nIf-Range: %s\r\n",(long long)SBuf.st_size - 1, TimeRFC1123(SBuf.st_mtime).c_str()); Req += Buf; } @@ -999,31 +1019,21 @@ HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv) FailFile.c_str(); // Make sure we dont do a malloc in the signal handler FailFd = File->Fd(); FailTime = Srv->Date; - - // Set the expected size - if (Srv->StartPos >= 0) - { - Res.ResumePoint = Srv->StartPos; - if (ftruncate(File->Fd(),Srv->StartPos) < 0) - _error->Errno("ftruncate", _("Failed to truncate file")); - } - - // Set the start point - lseek(File->Fd(),0,SEEK_END); delete Srv->In.Hash; Srv->In.Hash = new Hashes; - - // Fill the Hash if the file is non-empty (resume) - if (Srv->StartPos > 0) + + // Set the expected size and read file for the hashes + if (Srv->StartPos >= 0) { - lseek(File->Fd(),0,SEEK_SET); - if (Srv->In.Hash->AddFD(File->Fd(),Srv->StartPos) == false) + Res.ResumePoint = Srv->StartPos; + File->Truncate(Srv->StartPos); + + if (Srv->In.Hash->AddFD(*File,Srv->StartPos) == false) { _error->Errno("read",_("Problem hashing file")); return ERROR_NOT_FROM_SERVER; } - lseek(File->Fd(),0,SEEK_END); } SetNonBlock(File->Fd(),true); @@ -1325,7 +1335,7 @@ int HttpMethod::Loop() StopRedirects = true; else { - for (StringVectorIterator I = R.begin(); I != R.end(); I++) + for (StringVectorIterator I = R.begin(); I != R.end(); ++I) if (Queue->Uri == *I) { R[0] = "STOP";