X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/a7b33d6543ac4dcf7fc40b38301f7cc9e22db3d2..b55e706a8794d810fb7c5a7c175c04ea207b1ce7:/ftparchive/writer.cc diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 5547c6aa5..7ecfe78ed 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -11,16 +11,17 @@ ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ -#include "writer.h" - -#include +#include + #include #include #include +#include #include -#include -#include +#include #include +#include +#include #include #include @@ -28,11 +29,15 @@ #include #include #include +#include #include - + +#include "writer.h" #include "cachedb.h" #include "apt-ftparchive.h" #include "multicompress.h" + +#include /*}}}*/ using namespace std; FTWScanner *FTWScanner::Owner; @@ -54,14 +59,15 @@ inline void SetTFRewriteData(struct TFRewriteData &tfrd, // FTWScanner::FTWScanner - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -FTWScanner::FTWScanner() +FTWScanner::FTWScanner(string const &Arch): Arch(Arch) { ErrorPrinted = false; NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true); - RealPath = 0; - long PMax = pathconf(".",_PC_PATH_MAX); - if (PMax > 0) - RealPath = new char[PMax]; + + DoMD5 = _config->FindB("APT::FTPArchive::MD5",true); + DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true); + DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true); + DoSHA512 = _config->FindB("APT::FTPArchive::SHA512",true); } /*}}}*/ // FTWScanner::Scanner - FTW Scanner /*{{{*/ @@ -92,6 +98,8 @@ int FTWScanner::ScannerFTW(const char *File,const struct stat *sb,int Flag) int FTWScanner::ScannerFile(const char *File, bool const &ReadLink) { const char *LastComponent = strrchr(File, '/'); + char *RealPath = NULL; + if (LastComponent == NULL) LastComponent = File; else @@ -111,10 +119,13 @@ int FTWScanner::ScannerFile(const char *File, bool const &ReadLink) given are not links themselves. */ char Jnk[2]; Owner->OriginalPath = File; - if (ReadLink && Owner->RealPath != 0 && + if (ReadLink && readlink(File,Jnk,sizeof(Jnk)) != -1 && - realpath(File,Owner->RealPath) != 0) - Owner->DoPackage(Owner->RealPath); + (RealPath = realpath(File,NULL)) != 0) + { + Owner->DoPackage(RealPath); + free(RealPath); + } else Owner->DoPackage(File); @@ -150,13 +161,15 @@ int FTWScanner::ScannerFile(const char *File, bool const &ReadLink) /* */ bool FTWScanner::RecursiveScan(string const &Dir) { + char *RealPath = NULL; /* If noprefix is set then jam the scan root in, so we don't generate link followed paths out of control */ if (InternalPrefix.empty() == true) { - if (realpath(Dir.c_str(),RealPath) == 0) + if ((RealPath = realpath(Dir.c_str(),NULL)) == 0) return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str()); - InternalPrefix = RealPath; + InternalPrefix = RealPath; + free(RealPath); } // Do recursive directory searching @@ -180,13 +193,15 @@ bool FTWScanner::RecursiveScan(string const &Dir) of files from another file. */ bool FTWScanner::LoadFileList(string const &Dir, string const &File) { + char *RealPath = NULL; /* If noprefix is set then jam the scan root in, so we don't generate link followed paths out of control */ if (InternalPrefix.empty() == true) { - if (realpath(Dir.c_str(),RealPath) == 0) + if ((RealPath = realpath(Dir.c_str(),NULL)) == 0) return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str()); InternalPrefix = RealPath; + free(RealPath); } Owner = this; @@ -235,8 +250,8 @@ bool FTWScanner::LoadFileList(string const &Dir, string const &File) // --------------------------------------------------------------------- /* */ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, - unsigned long &DeLinkBytes, - off_t const &FileSize) + unsigned long long &DeLinkBytes, + unsigned long long const &FileSize) { // See if this isn't an internaly prefix'd file name. if (InternalPrefix.empty() == false && @@ -269,7 +284,8 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, if (link(FileName.c_str(),OriginalPath) != 0) { // Panic! Restore the symlink - symlink(OldLink,OriginalPath); + if (symlink(OldLink,OriginalPath) != 0) + _error->Errno("symlink", "failed to restore symlink"); return _error->Errno("link",_("*** Failed to link %s to %s"), FileName.c_str(), OriginalPath); @@ -294,18 +310,18 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, // --------------------------------------------------------------------- /* */ PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides, - string const &aArch) : - Db(DB),Stats(Db.Stats), Arch(aArch) + string const &Arch) : + FTWScanner(Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL) { Output = stdout; - SetExts(".deb .udeb .foo .bar .baz"); - AddPattern("*.deb"); + SetExts(".deb .udeb"); DeLinkLimit = 0; - + // Process the command line options - DoMD5 = _config->FindB("APT::FTPArchive::MD5",true); - DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true); - DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true); + DoMD5 = _config->FindB("APT::FTPArchive::Packages::MD5",DoMD5); + DoSHA1 = _config->FindB("APT::FTPArchive::Packages::SHA1",DoSHA1); + DoSHA256 = _config->FindB("APT::FTPArchive::Packages::SHA256",DoSHA256); + DoSHA512 = _config->FindB("APT::FTPArchive::Packages::SHA512",DoSHA512); DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false); DoContents = _config->FindB("APT::FTPArchive::Contents",true); NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false); @@ -313,7 +329,7 @@ PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string c if (Db.Loaded() == false) DoContents = false; - + // Read the override file if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false) return; @@ -335,17 +351,16 @@ bool FTWScanner::SetExts(string const &Vals) string::size_type Start = 0; while (Start <= Vals.length()-1) { - string::size_type Space = Vals.find(' ',Start); - string::size_type Length; - if (Space == string::npos) + string::size_type const Space = Vals.find(' ',Start); + string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start; + if ( Arch.empty() == false ) { - Length = Vals.length()-Start; + AddPattern(string("*_") + Arch + Vals.substr(Start, Length)); + AddPattern(string("*_all") + Vals.substr(Start, Length)); } else - { - Length = Space-Start; - } - AddPattern(string("*") + Vals.substr(Start, Length)); + AddPattern(string("*") + Vals.substr(Start, Length)); + Start += Length + 1; } @@ -361,13 +376,13 @@ bool FTWScanner::SetExts(string const &Vals) bool PackagesWriter::DoPackage(string FileName) { // Pull all the data we need form the DB - if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoAlwaysStat) + if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) == false) { return false; } - off_t FileSize = Db.GetFileSize(); + unsigned long long FileSize = Db.GetFileSize(); if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false) return false; @@ -403,7 +418,7 @@ bool PackagesWriter::DoPackage(string FileName) } char Size[40]; - sprintf(Size,"%lu", (unsigned long) FileSize); + sprintf(Size,"%llu", (unsigned long long) FileSize); // Strip the DirStrip prefix from the FileName and add the PathPrefix string NewFileName; @@ -431,9 +446,14 @@ bool PackagesWriter::DoPackage(string FileName) unsigned int End = 0; SetTFRewriteData(Changes[End++], "Size", Size); - SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str()); - SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str()); - SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str()); + if (DoMD5 == true) + SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str()); + if (DoSHA1 == true) + SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str()); + if (DoSHA256 == true) + SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str()); + if (DoSHA512 == true) + SetTFRewriteData(Changes[End++], "SHA512", Db.SHA512Res.c_str()); SetTFRewriteData(Changes[End++], "Filename", NewFileName.c_str()); SetTFRewriteData(Changes[End++], "Priority", OverItem->Priority.c_str()); SetTFRewriteData(Changes[End++], "Status", 0); @@ -445,6 +465,8 @@ bool PackagesWriter::DoPackage(string FileName) descmd5.Add(desc.c_str()); DescriptionMd5 = descmd5.Result().Value(); SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str()); + if (TransWriter != NULL) + TransWriter->DoPackage(Package, desc, DescriptionMd5); } // Rewrite the maintainer field if necessary @@ -477,7 +499,7 @@ bool PackagesWriter::DoPackage(string FileName) } for (map::const_iterator I = OverItem->FieldOverride.begin(); - I != OverItem->FieldOverride.end(); I++) + I != OverItem->FieldOverride.end(); ++I) SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); SetTFRewriteData(Changes[End++], 0, 0); @@ -491,11 +513,61 @@ bool PackagesWriter::DoPackage(string FileName) } /*}}}*/ +// TranslationWriter::TranslationWriter - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* Create a Translation-Master file for this Packages file */ +TranslationWriter::TranslationWriter(string const &File, string const &TransCompress, + mode_t const &Permissions) : Output(NULL), + RefCounter(0) +{ + if (File.empty() == true) + return; + + Comp = new MultiCompress(File, TransCompress, Permissions); + Output = Comp->Input; +} + /*}}}*/ +// TranslationWriter::DoPackage - Process a single package /*{{{*/ +// --------------------------------------------------------------------- +/* Create a Translation-Master file for this Packages file */ +bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc, + string const &MD5) +{ + if (Output == NULL) + return true; + + // Different archs can include different versions and therefore + // different descriptions - so we need to check for both name and md5. + string const Record = Pkg + ":" + MD5; + + if (Included.find(Record) != Included.end()) + return true; + + fprintf(Output, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n", + Pkg.c_str(), MD5.c_str(), Desc.c_str()); + + Included.insert(Record); + return true; +} + /*}}}*/ +// TranslationWriter::~TranslationWriter - Destructor /*{{{*/ +// --------------------------------------------------------------------- +/* */ +TranslationWriter::~TranslationWriter() +{ + if (Comp == NULL) + return; + + delete Comp; +} + /*}}}*/ + // SourcesWriter::SourcesWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -SourcesWriter::SourcesWriter(string const &BOverrides,string const &SOverrides, - string const &ExtOverrides) +SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string const &SOverrides, + string const &ExtOverrides) : + Db(DB), Stats(Db.Stats) { Output = stdout; AddPattern("*.dsc"); @@ -504,7 +576,12 @@ SourcesWriter::SourcesWriter(string const &BOverrides,string const &SOverrides, BufSize = 0; // Process the command line options + DoMD5 = _config->FindB("APT::FTPArchive::Sources::MD5",DoMD5); + DoSHA1 = _config->FindB("APT::FTPArchive::Sources::SHA1",DoSHA1); + DoSHA256 = _config->FindB("APT::FTPArchive::Sources::SHA256",DoSHA256); + DoSHA512 = _config->FindB("APT::FTPArchive::Sources::SHA512",DoSHA512); NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false); + DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false); // Read the override file if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false) @@ -527,65 +604,62 @@ SourcesWriter::SourcesWriter(string const &BOverrides,string const &SOverrides, // --------------------------------------------------------------------- /* */ bool SourcesWriter::DoPackage(string FileName) -{ +{ // Open the archive - FileFd F(FileName,FileFd::ReadOnly); - if (_error->PendingError() == true) + FileFd F; + if (OpenMaybeClearSignedFile(FileName, F) == false) return false; - - // Stat the file for later - struct stat St; - if (fstat(F.Fd(),&St) != 0) - return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); - if (St.st_size > 128*1024) + unsigned long long const FSize = F.FileSize(); + //FIXME: do we really need to enforce a maximum size of the dsc file? + if (FSize > 128*1024) return _error->Error("DSC file '%s' is too large!",FileName.c_str()); - - if (BufSize < (unsigned)St.st_size+1) + + if (BufSize < FSize + 2) { - BufSize = St.st_size+1; - Buffer = (char *)realloc(Buffer,St.st_size+1); + BufSize = FSize + 2; + Buffer = (char *)realloc(Buffer , BufSize); } - - if (F.Read(Buffer,St.st_size) == false) + + if (F.Read(Buffer, FSize) == false) return false; + // Stat the file for later (F might be clearsigned, so not F.FileSize()) + struct stat St; + if (stat(FileName.c_str(), &St) != 0) + return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); + // Hash the file char *Start = Buffer; - char *BlkEnd = Buffer + St.st_size; - MD5Summation MD5; - MD5.Add((unsigned char *)Start,BlkEnd - Start); - - // Add an extra \n to the end, just in case - *BlkEnd++ = '\n'; - - /* Remove the PGP trailer. Some .dsc's have this without a blank line - before */ - const char *Key = "-----BEGIN PGP SIGNATURE-----"; - for (char *MsgEnd = Start; MsgEnd < BlkEnd - strlen(Key) -1; MsgEnd++) + char *BlkEnd = Buffer + FSize; + + Hashes DscHashes; + if (FSize == (unsigned long long) St.st_size) { - if (*MsgEnd == '\n' && strncmp(MsgEnd+1,Key,strlen(Key)) == 0) - { - MsgEnd[1] = '\n'; - break; - } + if (DoMD5 == true) + DscHashes.MD5.Add((unsigned char *)Start,BlkEnd - Start); + if (DoSHA1 == true) + DscHashes.SHA1.Add((unsigned char *)Start,BlkEnd - Start); + if (DoSHA256 == true) + DscHashes.SHA256.Add((unsigned char *)Start,BlkEnd - Start); + if (DoSHA512 == true) + DscHashes.SHA512.Add((unsigned char *)Start,BlkEnd - Start); } - - /* Read records until we locate the Source record. This neatly skips the - GPG header (which is RFC822 formed) without any trouble. */ - pkgTagSection Tags; - do + else { - unsigned Pos; - if (Tags.Scan(Start,BlkEnd - Start) == false) - return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str()); - if (Tags.Find("Source",Pos) == true) - break; - Start += Tags.size(); + FileFd DscFile(FileName, FileFd::ReadOnly); + DscHashes.AddFD(DscFile, St.st_size, DoMD5, DoSHA1, DoSHA256, DoSHA512); } - while (1); + + // Add extra \n to the end, just in case (as in clearsigned they are missing) + *BlkEnd++ = '\n'; + *BlkEnd++ = '\n'; + + pkgTagSection Tags; + if (Tags.Scan(Start,BlkEnd - Start) == false || Tags.Exists("Source") == false) + return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str()); Tags.Trim(); - + // Lookup the overide information, finding first the best priority. string BestPrio; string Bins = Tags.FindS("Binary"); @@ -646,12 +720,28 @@ bool SourcesWriter::DoPackage(string FileName) } // Add the dsc to the files hash list - char Files[1000]; - snprintf(Files,sizeof(Files),"\n %s %lu %s\n %s", - string(MD5.Result()).c_str(),St.st_size, - flNotDir(FileName).c_str(), - Tags.FindS("Files").c_str()); - + string const strippedName = flNotDir(FileName); + std::ostringstream ostreamFiles; + if (DoMD5 == true && Tags.Exists("Files")) + ostreamFiles << "\n " << string(DscHashes.MD5.Result()) << " " << St.st_size << " " + << strippedName << "\n " << Tags.FindS("Files"); + string const Files = ostreamFiles.str(); + + std::ostringstream ostreamSha1; + if (DoSHA1 == true && Tags.Exists("Checksums-Sha1")) + ostreamSha1 << "\n " << string(DscHashes.SHA1.Result()) << " " << St.st_size << " " + << strippedName << "\n " << Tags.FindS("Checksums-Sha1"); + + std::ostringstream ostreamSha256; + if (DoSHA256 == true && Tags.Exists("Checksums-Sha256")) + ostreamSha256 << "\n " << string(DscHashes.SHA256.Result()) << " " << St.st_size << " " + << strippedName << "\n " << Tags.FindS("Checksums-Sha256"); + + std::ostringstream ostreamSha512; + if (DoSHA512 == true && Tags.Exists("Checksums-Sha512")) + ostreamSha512 << "\n " << string(DscHashes.SHA512.Result()) << " " << St.st_size << " " + << strippedName << "\n " << Tags.FindS("Checksums-Sha512"); + // Strip the DirStrip prefix from the FileName and add the PathPrefix string NewFileName; if (DirStrip.empty() == false && @@ -666,9 +756,10 @@ bool SourcesWriter::DoPackage(string FileName) string Directory = flNotFile(OriginalPath); string Package = Tags.FindS("Source"); - // Perform the delinking operation over all of the files + // Perform operation over all of the files string ParseJnk; - const char *C = Files; + const char *C = Files.c_str(); + char *RealPath = NULL; for (;isspace(*C); C++); while (*C != 0) { @@ -677,13 +768,41 @@ bool SourcesWriter::DoPackage(string FileName) ParseQuoteWord(C,ParseJnk) == false || ParseQuoteWord(C,ParseJnk) == false) return _error->Error("Error parsing file record"); - - char Jnk[2]; + string OriginalPath = Directory + ParseJnk; - if (RealPath != 0 && readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 && - realpath(OriginalPath.c_str(),RealPath) != 0) + + // Add missing hashes to source files + if ((DoSHA1 == true && !Tags.Exists("Checksums-Sha1")) || + (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) || + (DoSHA512 == true && !Tags.Exists("Checksums-Sha512"))) + { + if (Db.GetFileInfo(OriginalPath, false, false, false, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) + == false) + { + return _error->Error("Error getting file info"); + } + + if (DoSHA1 == true && !Tags.Exists("Checksums-Sha1")) + ostreamSha1 << "\n " << string(Db.SHA1Res) << " " + << Db.GetFileSize() << " " << ParseJnk; + + if (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) + ostreamSha256 << "\n " << string(Db.SHA256Res) << " " + << Db.GetFileSize() << " " << ParseJnk; + + if (DoSHA512 == true && !Tags.Exists("Checksums-Sha512")) + ostreamSha512 << "\n " << string(Db.SHA512Res) << " " + << Db.GetFileSize() << " " << ParseJnk; + } + + // Perform the delinking operation + char Jnk[2]; + + if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 && + (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0) { string RP = RealPath; + free(RealPath); if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false) return false; } @@ -693,13 +812,24 @@ bool SourcesWriter::DoPackage(string FileName) if (Directory.length() > 2) Directory.erase(Directory.end()-1); + string const ChecksumsSha1 = ostreamSha1.str(); + string const ChecksumsSha256 = ostreamSha256.str(); + string const ChecksumsSha512 = ostreamSha512.str(); + // This lists all the changes to the fields we are going to make. - // (5 hardcoded + maintainer + end marker) - TFRewriteData Changes[5+1+SOverItem->FieldOverride.size()+1]; + // (5 hardcoded + checksums + maintainer + end marker) + TFRewriteData Changes[5+2+1+SOverItem->FieldOverride.size()+1]; unsigned int End = 0; SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package"); - SetTFRewriteData(Changes[End++],"Files",Files); + if (Files.empty() == false) + SetTFRewriteData(Changes[End++],"Files",Files.c_str()); + if (ChecksumsSha1.empty() == false) + SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1.c_str()); + if (ChecksumsSha256.empty() == false) + SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256.c_str()); + if (ChecksumsSha512.empty() == false) + SetTFRewriteData(Changes[End++],"Checksums-Sha512",ChecksumsSha512.c_str()); if (Directory != "./") SetTFRewriteData(Changes[End++],"Directory",Directory.c_str()); SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str()); @@ -721,7 +851,7 @@ bool SourcesWriter::DoPackage(string FileName) SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str()); for (map::const_iterator I = SOverItem->FieldOverride.begin(); - I != SOverItem->FieldOverride.end(); I++) + I != SOverItem->FieldOverride.end(); ++I) SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); SetTFRewriteData(Changes[End++], 0, 0); @@ -733,18 +863,18 @@ bool SourcesWriter::DoPackage(string FileName) Stats.Packages++; - return true; + return Db.Finish(); } /*}}}*/ // ContentsWriter::ContentsWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -ContentsWriter::ContentsWriter(string const &DB) : - Db(DB), Stats(Db.Stats) +ContentsWriter::ContentsWriter(string const &DB, string const &Arch) : + FTWScanner(Arch), Db(DB), Stats(Db.Stats) { - AddPattern("*.deb"); + SetExts(".deb"); Output = stdout; } /*}}}*/ @@ -778,22 +908,16 @@ bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompres MultiCompress Pkgs(PkgFile,PkgCompress,0,false); if (_error->PendingError() == true) return false; - + // Open the package file - int CompFd = -1; - pid_t Proc = -1; - if (Pkgs.OpenOld(CompFd,Proc) == false) + FileFd Fd; + if (Pkgs.OpenOld(Fd) == false) return false; - - // No auto-close FD - FileFd Fd(CompFd,false); + pkgTagFile Tags(&Fd); if (_error->PendingError() == true) - { - Pkgs.CloseOld(CompFd,Proc); return false; - } - + // Parse. pkgTagSection Section; while (Tags.Step(Section) == true) @@ -815,11 +939,10 @@ bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompres _error->DumpErrors(); } } - + // Tidy the compressor - if (Pkgs.CloseOld(CompFd,Proc) == false) - return false; - + Fd.Close(); + return true; } @@ -830,19 +953,31 @@ bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompres /* */ ReleaseWriter::ReleaseWriter(string const &DB) { - AddPattern("Packages"); - AddPattern("Packages.gz"); - AddPattern("Packages.bz2"); - AddPattern("Packages.lzma"); - AddPattern("Sources"); - AddPattern("Sources.gz"); - AddPattern("Sources.bz2"); - AddPattern("Sources.lzma"); - AddPattern("Release"); - AddPattern("md5sum.txt"); + if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true) + { + AddPattern("Packages"); + AddPattern("Packages.gz"); + AddPattern("Packages.bz2"); + AddPattern("Packages.lzma"); + AddPattern("Packages.xz"); + AddPattern("Translation-*"); + AddPattern("Sources"); + AddPattern("Sources.gz"); + AddPattern("Sources.bz2"); + AddPattern("Sources.lzma"); + AddPattern("Sources.xz"); + AddPattern("Release"); + AddPattern("Contents-*"); + AddPattern("Index"); + AddPattern("md5sum.txt"); + } + AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns")); Output = stdout; time_t const now = time(NULL); + + setlocale(LC_TIME, "C"); + char datestr[128]; if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC", gmtime(&now)) == 0) @@ -850,6 +985,17 @@ ReleaseWriter::ReleaseWriter(string const &DB) datestr[0] = '\0'; } + time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0); + char validstr[128]; + if (now == validuntil || + strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC", + gmtime(&validuntil)) == 0) + { + validstr[0] = '\0'; + } + + setlocale(LC_TIME, ""); + map Fields; Fields["Origin"] = ""; Fields["Label"] = ""; @@ -857,6 +1003,7 @@ ReleaseWriter::ReleaseWriter(string const &DB) Fields["Version"] = ""; Fields["Codename"] = ""; Fields["Date"] = datestr; + Fields["Valid-Until"] = validstr; Fields["Architectures"] = ""; Fields["Components"] = ""; Fields["Description"] = ""; @@ -872,6 +1019,10 @@ ReleaseWriter::ReleaseWriter(string const &DB) fprintf(Output, "%s: %s\n", (*I).first.c_str(), Value.c_str()); } + + DoMD5 = _config->FindB("APT::FTPArchive::Release::MD5",DoMD5); + DoSHA1 = _config->FindB("APT::FTPArchive::Release::SHA1",DoSHA1); + DoSHA256 = _config->FindB("APT::FTPArchive::Release::SHA256",DoSHA256); } /*}}}*/ // ReleaseWriter::DoPackage - Process a single package /*{{{*/ @@ -904,22 +1055,18 @@ bool ReleaseWriter::DoPackage(string FileName) CheckSums[NewFileName].size = fd.Size(); - MD5Summation MD5; - MD5.AddFD(fd.Fd(), fd.Size()); - CheckSums[NewFileName].MD5 = MD5.Result(); - - fd.Seek(0); - SHA1Summation SHA1; - SHA1.AddFD(fd.Fd(), fd.Size()); - CheckSums[NewFileName].SHA1 = SHA1.Result(); - - fd.Seek(0); - SHA256Summation SHA256; - SHA256.AddFD(fd.Fd(), fd.Size()); - CheckSums[NewFileName].SHA256 = SHA256.Result(); - + Hashes hs; + hs.AddFD(fd, 0, DoMD5, DoSHA1, DoSHA256, DoSHA512); + if (DoMD5 == true) + CheckSums[NewFileName].MD5 = hs.MD5.Result(); + if (DoSHA1 == true) + CheckSums[NewFileName].SHA1 = hs.SHA1.Result(); + if (DoSHA256 == true) + CheckSums[NewFileName].SHA256 = hs.SHA256.Result(); + if (DoSHA512 == true) + CheckSums[NewFileName].SHA512 = hs.SHA512.Result(); fd.Close(); - + return true; } @@ -928,37 +1075,52 @@ bool ReleaseWriter::DoPackage(string FileName) // --------------------------------------------------------------------- void ReleaseWriter::Finish() { - fprintf(Output, "MD5Sum:\n"); - for(map::const_iterator I = CheckSums.begin(); - I != CheckSums.end(); - ++I) + if (DoMD5 == true) { - fprintf(Output, " %s %16ld %s\n", - (*I).second.MD5.c_str(), - (*I).second.size, - (*I).first.c_str()); + fprintf(Output, "MD5Sum:\n"); + for(map::const_iterator I = CheckSums.begin(); + I != CheckSums.end(); ++I) + { + fprintf(Output, " %s %16llu %s\n", + (*I).second.MD5.c_str(), + (*I).second.size, + (*I).first.c_str()); + } } - - fprintf(Output, "SHA1:\n"); - for(map::const_iterator I = CheckSums.begin(); - I != CheckSums.end(); - ++I) + if (DoSHA1 == true) { - fprintf(Output, " %s %16ld %s\n", - (*I).second.SHA1.c_str(), - (*I).second.size, - (*I).first.c_str()); + fprintf(Output, "SHA1:\n"); + for(map::const_iterator I = CheckSums.begin(); + I != CheckSums.end(); ++I) + { + fprintf(Output, " %s %16llu %s\n", + (*I).second.SHA1.c_str(), + (*I).second.size, + (*I).first.c_str()); + } + } + if (DoSHA256 == true) + { + fprintf(Output, "SHA256:\n"); + for(map::const_iterator I = CheckSums.begin(); + I != CheckSums.end(); ++I) + { + fprintf(Output, " %s %16llu %s\n", + (*I).second.SHA256.c_str(), + (*I).second.size, + (*I).first.c_str()); + } } - fprintf(Output, "SHA256:\n"); + fprintf(Output, "SHA512:\n"); for(map::const_iterator I = CheckSums.begin(); I != CheckSums.end(); ++I) { - fprintf(Output, " %s %16ld %s\n", - (*I).second.SHA256.c_str(), + fprintf(Output, " %s %16llu %s\n", + (*I).second.SHA512.c_str(), (*I).second.size, (*I).first.c_str()); } -} +}