##################################################################### */
/*}}}*/
// Include Files /*{{{*/
+#include <config.h>
+
#include <apt-pkg/fileutl.h>
#include <apt-pkg/acquire-method.h>
+#include <apt-pkg/configuration.h>
#include <apt-pkg/error.h>
#include <apt-pkg/hashes.h>
#include <apt-pkg/netrc.h>
#include <string.h>
#include <iostream>
#include <map>
-#include <apti18n.h>
-
// Internet stuff
#include <netdb.h>
#include "connect.h"
#include "rfc2553emu.h"
#include "http.h"
+
+#include <apti18n.h>
/*}}}*/
using namespace std;
bool Debug = false;
URI Proxy;
-unsigned long CircleBuf::BwReadLimit=0;
-unsigned long CircleBuf::BwTickReadData=0;
+unsigned long long CircleBuf::BwReadLimit=0;
+unsigned long long CircleBuf::BwTickReadData=0;
struct timeval CircleBuf::BwReadTick={0,0};
const unsigned int CircleBuf::BW_HZ=10;
-
+
// CircleBuf::CircleBuf - Circular input buffer /*{{{*/
// ---------------------------------------------------------------------
/* */
-CircleBuf::CircleBuf(unsigned long Size) : Size(Size), Hash(0)
+CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0)
{
Buf = new unsigned char[Size];
Reset();
InP = 0;
OutP = 0;
StrPos = 0;
- MaxGet = (unsigned int)-1;
+ MaxGet = (unsigned long long)-1;
OutQueue = string();
if (Hash != 0)
{
is non-blocking.. */
bool CircleBuf::Read(int Fd)
{
- unsigned long BwReadMax;
+ unsigned long long BwReadMax;
while (1)
{
struct timeval now;
gettimeofday(&now,0);
- unsigned long d = (now.tv_sec-CircleBuf::BwReadTick.tv_sec)*1000000 +
+ unsigned long long d = (now.tv_sec-CircleBuf::BwReadTick.tv_sec)*1000000 +
now.tv_usec-CircleBuf::BwReadTick.tv_usec;
if(d > 1000000/BW_HZ) {
CircleBuf::BwReadTick = now;
}
// Write the buffer segment
- int Res;
+ ssize_t Res;
if(CircleBuf::BwReadLimit) {
Res = read(Fd,Buf + (InP%Size),
BwReadMax > LeftRead() ? LeftRead() : BwReadMax);
return;
// Write the buffer segment
- unsigned long Sz = LeftRead();
+ unsigned long long Sz = LeftRead();
if (OutQueue.length() - StrPos < Sz)
Sz = OutQueue.length() - StrPos;
memcpy(Buf + (InP%Size),OutQueue.c_str() + StrPos,Sz);
return true;
// Write the buffer segment
- int Res;
+ ssize_t Res;
Res = write(Fd,Buf + (OutP%Size),LeftWrite());
if (Res == 0)
bool CircleBuf::WriteTillEl(string &Data,bool Single)
{
// We cheat and assume it is unneeded to have more than one buffer load
- for (unsigned long I = OutP; I < InP; I++)
+ for (unsigned long long I = OutP; I < InP; I++)
{
if (Buf[I%Size] != '\n')
continue;
Data = "";
while (OutP < I)
{
- unsigned long Sz = LeftWrite();
+ unsigned long long Sz = LeftWrite();
if (Sz == 0)
return false;
if (I - OutP < Sz)
clog << "Got " << InP << " in " << Diff << " at " << InP/Diff << endl;*/
}
/*}}}*/
+CircleBuf::~CircleBuf()
+{
+ delete [] Buf;
+ delete Hash;
+}
// ServerState::ServerState - Constructor /*{{{*/
// ---------------------------------------------------------------------
// ---------------------------------------------------------------------
/* Returns 0 if things are OK, 1 if an IO error occurred and 2 if a header
parse error occurred */
-int ServerState::RunHeaders()
+ServerState::RunHeadersResult ServerState::RunHeaders()
{
State = Header;
if (Debug == true)
clog << Data;
- for (string::const_iterator I = Data.begin(); I < Data.end(); I++)
+ for (string::const_iterator I = Data.begin(); I < Data.end(); ++I)
{
string::const_iterator J = I;
- for (; J != Data.end() && *J != '\n' && *J != '\r';J++);
+ for (; J != Data.end() && *J != '\n' && *J != '\r'; ++J);
if (HeaderLine(string(I,J)) == false)
- return 2;
+ return RUN_HEADERS_PARSE_ERROR;
I = J;
}
if (Encoding == Closes && HaveContent == true)
Persistent = false;
- return 0;
+ return RUN_HEADERS_OK;
}
while (Owner->Go(false,this) == true);
- return 1;
+ return RUN_HEADERS_IO_ERROR;
}
/*}}}*/
// ServerState::RunData - Transfer the data from the socket /*{{{*/
return false;
// See if we are done
- unsigned long Len = strtol(Data.c_str(),0,16);
+ unsigned long long Len = strtoull(Data.c_str(),0,16);
if (Len == 0)
{
In.Limit(-1);
// Evil servers return no version
if (Line[4] == '/')
{
- if (sscanf(Line.c_str(),"HTTP/%u.%u %u%[^\n]",&Major,&Minor,
- &Result,Code) != 4)
+ int const elements = sscanf(Line.c_str(),"HTTP/%u.%u %u%[^\n]",&Major,&Minor,&Result,Code);
+ if (elements == 3)
+ {
+ Code[0] = '\0';
+ if (Debug == true)
+ clog << "HTTP server doesn't give Reason-Phrase for " << Result << std::endl;
+ }
+ else if (elements != 4)
return _error->Error(_("The HTTP server sent an invalid reply header"));
}
else
if (StartPos != 0)
return true;
- if (sscanf(Val.c_str(),"%lu",&Size) != 1)
+ if (sscanf(Val.c_str(),"%llu",&Size) != 1)
return _error->Error(_("The HTTP server sent an invalid Content-Length header"));
return true;
}
{
HaveContent = true;
- if (sscanf(Val.c_str(),"bytes %lu-%*u/%lu",&StartPos,&Size) != 2)
+ if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2)
return _error->Error(_("The HTTP server sent an invalid Content-Range header"));
- if ((unsigned)StartPos > Size)
+ if ((unsigned long long)StartPos > Size)
return _error->Error(_("This HTTP server has broken range support"));
return true;
}
if (stringcasecmp(Tag,"Last-Modified:") == 0)
{
- if (StrToTime(Val,Date) == false)
+ if (RFC1123StrToTime(Val.c_str(), Date) == false)
return _error->Error(_("Unknown date format"));
return true;
}
and a no-store directive for archives. */
sprintf(Buf,"GET %s HTTP/1.1\r\nHost: %s\r\n",
Itm->Uri.c_str(),ProperHost.c_str());
- // only generate a cache control header if we actually want to
- // use a cache
- if (_config->FindB("Acquire::http::No-Cache",false) == false)
+ }
+ // generate a cache control header (if needed)
+ if (_config->FindB("Acquire::http::No-Cache",false) == true)
+ {
+ strcat(Buf,"Cache-Control: no-cache\r\nPragma: no-cache\r\n");
+ }
+ else
+ {
+ if (Itm->IndexFile == true)
{
- if (Itm->IndexFile == true)
- sprintf(Buf+strlen(Buf),"Cache-Control: max-age=%u\r\n",
- _config->FindI("Acquire::http::Max-Age",0));
- else
- {
- if (_config->FindB("Acquire::http::No-Store",false) == true)
- strcat(Buf,"Cache-Control: no-store\r\n");
- }
+ sprintf(Buf+strlen(Buf),"Cache-Control: max-age=%u\r\n",
+ _config->FindI("Acquire::http::Max-Age",0));
+ }
+ else
+ {
+ if (_config->FindB("Acquire::http::No-Store",false) == true)
+ strcat(Buf,"Cache-Control: no-store\r\n");
}
}
- // generate a no-cache header if needed
- if (_config->FindB("Acquire::http::No-Cache",false) == true)
- strcat(Buf,"Cache-Control: no-cache\r\nPragma: no-cache\r\n");
string Req = Buf;
if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
{
// In this case we send an if-range query with a range header
- sprintf(Buf,"Range: bytes=%li-\r\nIf-Range: %s\r\n",(long)SBuf.st_size - 1,
+ sprintf(Buf,"Range: bytes=%lli-\r\nIf-Range: %s\r\n",(long long)SBuf.st_size - 1,
TimeRFC1123(SBuf.st_mtime).c_str());
Req += Buf;
}
Base64Encode(Uri.User + ":" + Uri.Password) + "\r\n";
}
Req += "User-Agent: " + _config->Find("Acquire::http::User-Agent",
- "Ubuntu APT-HTTP/1.3 ("VERSION")") + "\r\n\r\n";
+ "Debian APT-HTTP/1.3 ("VERSION")") + "\r\n\r\n";
if (Debug == true)
cerr << Req << endl;
if (Srv->In.WriteSpace() == true && ToFile == true && FileFD != -1)
FD_SET(FileFD,&wfds);
-
+
// Add stdin
- FD_SET(STDIN_FILENO,&rfds);
+ if (_config->FindB("Acquire::http::DependOnSTDIN", true) == true)
+ FD_SET(STDIN_FILENO,&rfds);
// Figure out the max fd
int MaxFd = FileFD;
// HttpMethod::DealWithHeaders - Handle the retrieved header data /*{{{*/
// ---------------------------------------------------------------------
/* We look at the header data we got back from the server and decide what
- to do. Returns
- 0 - File is open,
- 1 - IMS hit
- 3 - Unrecoverable error
- 4 - Error with error content page
- 5 - Unrecoverable non-server error (close the connection)
- 6 - Try again with a new or changed URI
+ to do. Returns DealWithHeadersResult (see http.h for details).
*/
-int HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv)
+HttpMethod::DealWithHeadersResult
+HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv)
{
// Not Modified
if (Srv->Result == 304)
unlink(Queue->DestFile.c_str());
Res.IMSHit = true;
Res.LastModified = Queue->LastModified;
- return 1;
+ return IMS_HIT;
}
/* Redirect
&& Srv->Result != 304 // Not Modified
&& Srv->Result != 306)) // (Not part of HTTP/1.1, reserved)
{
- if (!Srv->Location.empty())
+ if (Srv->Location.empty() == true);
+ else if (Srv->Location[0] == '/' && Queue->Uri.empty() == false)
{
- NextURI = Srv->Location;
- return 6;
+ URI Uri = Queue->Uri;
+ if (Uri.Host.empty() == false)
+ {
+ if (Uri.Port != 0)
+ strprintf(NextURI, "http://%s:%u", Uri.Host.c_str(), Uri.Port);
+ else
+ NextURI = "http://" + Uri.Host;
+ }
+ else
+ NextURI.clear();
+ NextURI.append(DeQuoteString(Srv->Location));
+ return TRY_AGAIN_OR_REDIRECT;
+ }
+ else
+ {
+ NextURI = DeQuoteString(Srv->Location);
+ return TRY_AGAIN_OR_REDIRECT;
}
/* else pass through for error message */
}
SetFailReason(err);
_error->Error("%u %s",Srv->Result,Srv->Code);
if (Srv->HaveContent == true)
- return 4;
- return 3;
+ return ERROR_WITH_CONTENT_PAGE;
+ return ERROR_UNRECOVERABLE;
}
// This is some sort of 2xx 'data follows' reply
delete File;
File = new FileFd(Queue->DestFile,FileFd::WriteAny);
if (_error->PendingError() == true)
- return 5;
+ return ERROR_NOT_FROM_SERVER;
FailFile = Queue->DestFile;
FailFile.c_str(); // Make sure we dont do a malloc in the signal handler
FailFd = File->Fd();
FailTime = Srv->Date;
-
- // Set the expected size
- if (Srv->StartPos >= 0)
- {
- Res.ResumePoint = Srv->StartPos;
- if (ftruncate(File->Fd(),Srv->StartPos) < 0)
- _error->Errno("ftruncate", _("Failed to truncate file"));
- }
-
- // Set the start point
- lseek(File->Fd(),0,SEEK_END);
delete Srv->In.Hash;
Srv->In.Hash = new Hashes;
-
- // Fill the Hash if the file is non-empty (resume)
- if (Srv->StartPos > 0)
+
+ // Set the expected size and read file for the hashes
+ if (Srv->StartPos >= 0)
{
- lseek(File->Fd(),0,SEEK_SET);
- if (Srv->In.Hash->AddFD(File->Fd(),Srv->StartPos) == false)
+ Res.ResumePoint = Srv->StartPos;
+ File->Truncate(Srv->StartPos);
+
+ if (Srv->In.Hash->AddFD(*File,Srv->StartPos) == false)
{
_error->Errno("read",_("Problem hashing file"));
- return 5;
+ return ERROR_NOT_FROM_SERVER;
}
- lseek(File->Fd(),0,SEEK_END);
}
SetNonBlock(File->Fd(),true);
- return 0;
+ return FILE_IS_OPEN;
}
/*}}}*/
// HttpMethod::SigTerm - Handle a fatal signal /*{{{*/
depth. */
bool HttpMethod::Fetch(FetchItem *)
{
- if (Server == 0)
+ if (Server == 0)
return true;
// Queue the requests
PipelineDepth = _config->FindI("Acquire::http::Pipeline-Depth",
PipelineDepth);
Debug = _config->FindB("Debug::Acquire::http",false);
-
+ AutoDetectProxyCmd = _config->Find("Acquire::http::ProxyAutoDetect");
+
+ // Get the proxy to use
+ AutoDetectProxy();
+
return true;
}
/*}}}*/
do a WaitFd above.. Otherwise the FD is closed. */
int Result = Run(true);
if (Result != -1 && (Result != 0 || Queue == 0))
- return 100;
+ {
+ if(FailReason.empty() == false ||
+ _config->FindB("Acquire::http::DependOnSTDIN", true) == true)
+ return 100;
+ else
+ return 0;
+ }
if (Queue == 0)
continue;
// Fetch the next URL header data from the server.
switch (Server->RunHeaders())
{
- case 0:
+ case ServerState::RUN_HEADERS_OK:
break;
// The header data is bad
- case 2:
+ case ServerState::RUN_HEADERS_PARSE_ERROR:
{
_error->Error(_("Bad header data"));
Fail(true);
// The server closed a connection during the header get..
default:
- case 1:
+ case ServerState::RUN_HEADERS_IO_ERROR:
{
FailCounter++;
_error->Discard();
switch (DealWithHeaders(Res,Server))
{
// Ok, the file is Open
- case 0:
+ case FILE_IS_OPEN:
{
URIStart(Res);
}
// IMS hit
- case 1:
+ case IMS_HIT:
{
URIDone(Res);
break;
}
// Hard server error, not found or something
- case 3:
+ case ERROR_UNRECOVERABLE:
{
Fail();
break;
}
// Hard internal error, kill the connection and fail
- case 5:
+ case ERROR_NOT_FROM_SERVER:
{
delete File;
File = 0;
}
// We need to flush the data, the header is like a 404 w/ error text
- case 4:
+ case ERROR_WITH_CONTENT_PAGE:
{
Fail();
}
// Try again with a new URL
- case 6:
+ case TRY_AGAIN_OR_REDIRECT:
{
// Clear rest of response if there is content
if (Server->HaveContent)
StopRedirects = true;
else
{
- for (StringVectorIterator I = R.begin(); I != R.end(); I++)
+ for (StringVectorIterator I = R.begin(); I != R.end(); ++I)
if (Queue->Uri == *I)
{
R[0] = "STOP";
return 0;
}
/*}}}*/
+// HttpMethod::AutoDetectProxy - auto detect proxy /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool HttpMethod::AutoDetectProxy()
+{
+ if (AutoDetectProxyCmd.empty())
+ return true;
+
+ if (Debug)
+ clog << "Using auto proxy detect command: " << AutoDetectProxyCmd << endl;
+
+ int Pipes[2] = {-1,-1};
+ if (pipe(Pipes) != 0)
+ return _error->Errno("pipe", "Failed to create Pipe");
+
+ pid_t Process = ExecFork();
+ if (Process == 0)
+ {
+ close(Pipes[0]);
+ dup2(Pipes[1],STDOUT_FILENO);
+ SetCloseExec(STDOUT_FILENO,false);
+
+ const char *Args[2];
+ Args[0] = AutoDetectProxyCmd.c_str();
+ Args[1] = 0;
+ execv(Args[0],(char **)Args);
+ cerr << "Failed to exec method " << Args[0] << endl;
+ _exit(100);
+ }
+ char buf[512];
+ int InFd = Pipes[0];
+ close(Pipes[1]);
+ int res = read(InFd, buf, sizeof(buf));
+ ExecWait(Process, "ProxyAutoDetect", true);
+
+ if (res < 0)
+ return _error->Errno("read", "Failed to read");
+ if (res == 0)
+ return _error->Warning("ProxyAutoDetect returned no data");
+
+ // add trailing \0
+ buf[res] = 0;
+
+ if (Debug)
+ clog << "auto detect command returned: '" << buf << "'" << endl;
+
+ if (strstr(buf, "http://") == buf)
+ _config->Set("Acquire::http::proxy", _strstrip(buf));
+
+ return true;
+}
+ /*}}}*/