X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/1825554677c23fbf3e6ae36264ac093fd15554db..72ed5f14b558984bd9c5731f82345b10cb2df2ca:/apt-inst/contrib/extracttar.cc diff --git a/apt-inst/contrib/extracttar.cc b/apt-inst/contrib/extracttar.cc index f8bd109ce..60360053e 100644 --- a/apt-inst/contrib/extracttar.cc +++ b/apt-inst/contrib/extracttar.cc @@ -1,12 +1,12 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: extracttar.cc,v 1.3 2001/05/27 23:47:09 jgg Exp $ +// $Id: extracttar.cc,v 1.8.2.1 2004/01/16 18:58:50 mdz Exp $ /* ###################################################################### Extract a Tar - Tar Extractor Some performance measurements showed that zlib performed quite poorly - in comparision to a forked gzip process. This tar extractor makes use + in comparison to a forked gzip process. This tar extractor makes use of the fact that dup'd file descriptors have the same seek pointer and that gzip will not read past the end of a compressed stream, even if there is more data. We use the dup property to track extraction @@ -16,23 +16,28 @@ ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ -#ifdef __GNUG__ -#pragma implementation "apt-pkg/extracttar.h" -#endif -#include +#include +#include +#include #include #include #include -#include +#include -#include +#include +#include +#include #include #include #include -#include +#include + +#include /*}}}*/ +using namespace std; + // The on disk header for a tar file. struct ExtractTar::TarHeader { @@ -55,12 +60,10 @@ struct ExtractTar::TarHeader // ExtractTar::ExtractTar - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -ExtractTar::ExtractTar(FileFd &Fd,unsigned long Max) : File(Fd), - MaxInSize(Max) - +ExtractTar::ExtractTar(FileFd &Fd,unsigned long long Max,string DecompressionProgram) + : File(Fd), MaxInSize(Max), DecompressProg(DecompressionProgram) { GZPid = -1; - InFd = -1; Eof = false; } /*}}}*/ @@ -69,35 +72,18 @@ ExtractTar::ExtractTar(FileFd &Fd,unsigned long Max) : File(Fd), /* */ ExtractTar::~ExtractTar() { - Done(false); + // Error close + Done(); } /*}}}*/ // ExtractTar::Done - Reap the gzip sub process /*{{{*/ -// --------------------------------------------------------------------- -/* If the force flag is given then error messages are suppressed - this - means we hit the end of the tar file but there was still gzip data. */ -bool ExtractTar::Done(bool Force) +bool ExtractTar::Done(bool) { - InFd.Close(); - if (GZPid <= 0) - return true; - - /* If there is a pending error then we are cleaning up gzip and are - not interested in it's failures */ - if (_error->PendingError() == true) - Force = true; - - // Make sure we clean it up! - kill(GZPid,SIGINT); - if (ExecWait(GZPid,_config->Find("dir::bin::gzip","/bin/gzip").c_str(), - Force) == false) - { - GZPid = -1; - return Force; - } - - GZPid = -1; - return true; + return Done(); +} +bool ExtractTar::Done() +{ + return InFd.Close(); } /*}}}*/ // ExtractTar::StartGzip - Startup gzip /*{{{*/ @@ -107,41 +93,23 @@ bool ExtractTar::Done(bool Force) gzip will efficiently ignore the extra bits. */ bool ExtractTar::StartGzip() { - int Pipes[2]; - if (pipe(Pipes) != 0) - return _error->Errno("pipe","Failed to create pipes"); - - // Fork off the process - GZPid = ExecFork(); - - // Spawn the subprocess - if (GZPid == 0) + if (DecompressProg.empty()) { - // Setup the FDs - dup2(Pipes[1],STDOUT_FILENO); - dup2(File.Fd(),STDIN_FILENO); - int Fd = open("/dev/null",O_RDWR); - if (Fd == -1) - _exit(101); - dup2(Fd,STDERR_FILENO); - close(Fd); - SetCloseExec(STDOUT_FILENO,false); - SetCloseExec(STDIN_FILENO,false); - SetCloseExec(STDERR_FILENO,false); - - const char *Args[3]; - Args[0] = _config->Find("dir::bin::gzip","/bin/gzip").c_str(); - Args[1] = "-d"; - Args[2] = 0; - execv(Args[0],(char **)Args); - cerr << "Failed to exec gzip " << Args[0] << endl; - _exit(100); + InFd.OpenDescriptor(File.Fd(), FileFd::ReadOnly, FileFd::None, false); + return true; + } + + std::vector const compressors = APT::Configuration::getCompressors(); + std::vector::const_iterator compressor = compressors.begin(); + for (; compressor != compressors.end(); ++compressor) { + if (compressor->Name == DecompressProg) { + return InFd.OpenDescriptor(File.Fd(), FileFd::ReadOnly, *compressor, false); + } } - // Fix up our FDs - InFd.Fd(Pipes[0]); - close(Pipes[1]); - return true; + return _error->Error(_("Cannot find a configured compressor for '%s'"), + DecompressProg.c_str()); + } /*}}}*/ // ExtractTar::Go - Perform extraction /*{{{*/ @@ -155,8 +123,8 @@ bool ExtractTar::Go(pkgDirStream &Stream) return false; // Loop over all blocks - string LastLongLink; - string LastLongName; + string LastLongLink, ItemLink; + string LastLongName, ItemName; while (1) { bool BadRecord = false; @@ -171,7 +139,7 @@ bool ExtractTar::Go(pkgDirStream &Stream) TarHeader *Tar = (TarHeader *)Block; unsigned long CheckSum; if (StrToNum(Tar->Checksum,CheckSum,sizeof(Tar->Checksum),8) == false) - return _error->Error("Corrupted archive"); + return _error->Error(_("Corrupted archive")); /* Compute the checksum field. The actual checksum is blanked out with spaces so it is not included in the computation */ @@ -183,42 +151,42 @@ bool ExtractTar::Go(pkgDirStream &Stream) /* Check for a block of nulls - in this case we kill gzip, GNU tar does this.. */ if (NewSum == ' '*sizeof(Tar->Checksum)) - return Done(true); + return Done(); if (NewSum != CheckSum) - return _error->Error("Tar Checksum failed, archive corrupted"); + return _error->Error(_("Tar checksum failed, archive corrupted")); // Decode all of the fields pkgDirStream::Item Itm; - unsigned long UID; - unsigned long GID; if (StrToNum(Tar->Mode,Itm.Mode,sizeof(Tar->Mode),8) == false || - StrToNum(Tar->UserID,UID,sizeof(Tar->UserID),8) == false || - StrToNum(Tar->GroupID,GID,sizeof(Tar->GroupID),8) == false || - StrToNum(Tar->Size,Itm.Size,sizeof(Tar->Size),8) == false || - StrToNum(Tar->MTime,Itm.MTime,sizeof(Tar->MTime),8) == false || + (Base256ToNum(Tar->UserID,Itm.UID,8) == false && + StrToNum(Tar->UserID,Itm.UID,sizeof(Tar->UserID),8) == false) || + (Base256ToNum(Tar->GroupID,Itm.GID,8) == false && + StrToNum(Tar->GroupID,Itm.GID,sizeof(Tar->GroupID),8) == false) || + (Base256ToNum(Tar->Size,Itm.Size,12) == false && + StrToNum(Tar->Size,Itm.Size,sizeof(Tar->Size),8) == false) || + (Base256ToNum(Tar->MTime,Itm.MTime,12) == false && + StrToNum(Tar->MTime,Itm.MTime,sizeof(Tar->MTime),8) == false) || StrToNum(Tar->Major,Itm.Major,sizeof(Tar->Major),8) == false || StrToNum(Tar->Minor,Itm.Minor,sizeof(Tar->Minor),8) == false) - return _error->Error("Corrupted archive"); - - // Grab the filename + return _error->Error(_("Corrupted archive")); + + // Grab the filename and link target: use last long name if one was + // set, otherwise use the header value as-is, but remember that it may + // fill the entire 100-byte block and needs to be zero-terminated. + // See Debian Bug #689582. if (LastLongName.empty() == false) Itm.Name = (char *)LastLongName.c_str(); else - { - Tar->Name[sizeof(Tar->Name)] = 0; - Itm.Name = Tar->Name; - } + Itm.Name = (char *)ItemName.assign(Tar->Name, sizeof(Tar->Name)).c_str(); if (Itm.Name[0] == '.' && Itm.Name[1] == '/' && Itm.Name[2] != 0) Itm.Name += 2; - - // Grab the link target - Tar->Name[sizeof(Tar->LinkName)] = 0; - Itm.LinkTarget = Tar->LinkName; if (LastLongLink.empty() == false) Itm.LinkTarget = (char *)LastLongLink.c_str(); - + else + Itm.LinkTarget = (char *)ItemLink.assign(Tar->LinkName, sizeof(Tar->LinkName)).c_str(); + // Convert the type over switch (Tar->LinkFlag) { @@ -253,7 +221,7 @@ bool ExtractTar::Go(pkgDirStream &Stream) case GNU_LongLink: { - unsigned long Length = Itm.Size; + unsigned long long Length = Itm.Size; unsigned char Block[512]; while (Length > 0) { @@ -272,7 +240,7 @@ bool ExtractTar::Go(pkgDirStream &Stream) case GNU_LongName: { - unsigned long Length = Itm.Size; + unsigned long long Length = Itm.Size; unsigned char Block[512]; while (Length > 0) { @@ -291,7 +259,7 @@ bool ExtractTar::Go(pkgDirStream &Stream) default: BadRecord = true; - _error->Warning("Unkown TAR header type %u, member %s",(unsigned)Tar->LinkFlag,Tar->Name); + _error->Warning(_("Unknown TAR header type %u, member %s"),(unsigned)Tar->LinkFlag,Tar->Name); break; } @@ -301,11 +269,11 @@ bool ExtractTar::Go(pkgDirStream &Stream) return false; // Copy the file over the FD - unsigned long Size = Itm.Size; + unsigned long long Size = Itm.Size; while (Size != 0) { unsigned char Junk[32*1024]; - unsigned long Read = MIN(Size,sizeof(Junk)); + unsigned long Read = min(Size, (unsigned long long)sizeof(Junk)); if (InFd.Read(Junk,((Read+511)/512)*512) == false) return false; @@ -330,7 +298,7 @@ bool ExtractTar::Go(pkgDirStream &Stream) } // And finish up - if (Itm.Size != 0 && BadRecord == false) + if (BadRecord == false) if (Stream.FinishedFile(Itm,Fd) == false) return false; @@ -338,6 +306,6 @@ bool ExtractTar::Go(pkgDirStream &Stream) LastLongLink.erase(); } - return Done(false); + return Done(); } /*}}}*/