##################################################################### */
/*}}}*/
// Include Files /*{{{*/
-#ifdef __GNUG__
-#pragma implementation "writer.h"
-#endif
+#include <config.h>
-#include "writer.h"
-
-#include <apti18n.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/md5.h>
-#include <apt-pkg/sha1.h>
-#include <apt-pkg/sha256.h>
+#include <apt-pkg/hashes.h>
#include <apt-pkg/deblistparser.h>
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/gpgv.h>
#include <sys/types.h>
#include <unistd.h>
#include <ftw.h>
#include <fnmatch.h>
#include <iostream>
-
+#include <sstream>
+#include <memory>
+
+#include "writer.h"
#include "cachedb.h"
#include "apt-ftparchive.h"
#include "multicompress.h"
+
+#include <apti18n.h>
/*}}}*/
using namespace std;
FTWScanner *FTWScanner::Owner;
// FTWScanner::FTWScanner - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-FTWScanner::FTWScanner()
+FTWScanner::FTWScanner(string const &Arch): Arch(Arch)
{
ErrorPrinted = false;
NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
- RealPath = 0;
- long PMax = pathconf(".",_PC_PATH_MAX);
- if (PMax > 0)
- RealPath = new char[PMax];
+
+ DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
+ DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true);
+ DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true);
+ DoSHA512 = _config->FindB("APT::FTPArchive::SHA512",true);
}
/*}}}*/
// FTWScanner::Scanner - FTW Scanner /*{{{*/
// FTWScanner::ScannerFile - File Scanner /*{{{*/
// ---------------------------------------------------------------------
/* */
-int FTWScanner::ScannerFile(const char *File, bool ReadLink)
+int FTWScanner::ScannerFile(const char *File, bool const &ReadLink)
{
const char *LastComponent = strrchr(File, '/');
+ char *RealPath = NULL;
+
if (LastComponent == NULL)
LastComponent = File;
else
LastComponent++;
- vector<string>::iterator I;
+ vector<string>::const_iterator I;
for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I)
{
if (fnmatch((*I).c_str(), LastComponent, 0) == 0)
given are not links themselves. */
char Jnk[2];
Owner->OriginalPath = File;
- if (ReadLink && Owner->RealPath != 0 &&
+ if (ReadLink &&
readlink(File,Jnk,sizeof(Jnk)) != -1 &&
- realpath(File,Owner->RealPath) != 0)
- Owner->DoPackage(Owner->RealPath);
+ (RealPath = realpath(File,NULL)) != 0)
+ {
+ Owner->DoPackage(RealPath);
+ free(RealPath);
+ }
else
Owner->DoPackage(File);
{
Owner->NewLine(1);
- bool Type = _error->PopMessage(Err);
+ bool const Type = _error->PopMessage(Err);
if (Type == true)
cerr << _("E: ") << Err << endl;
else
// FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool FTWScanner::RecursiveScan(string Dir)
+bool FTWScanner::RecursiveScan(string const &Dir)
{
+ char *RealPath = NULL;
/* If noprefix is set then jam the scan root in, so we don't generate
link followed paths out of control */
if (InternalPrefix.empty() == true)
{
- if (realpath(Dir.c_str(),RealPath) == 0)
+ if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
- InternalPrefix = RealPath;
+ InternalPrefix = RealPath;
+ free(RealPath);
}
// Do recursive directory searching
Owner = this;
- int Res = ftw(Dir.c_str(),ScannerFTW,30);
+ int const Res = ftw(Dir.c_str(),ScannerFTW,30);
// Error treewalking?
if (Res != 0)
// ---------------------------------------------------------------------
/* This is an alternative to using FTW to locate files, it reads the list
of files from another file. */
-bool FTWScanner::LoadFileList(string Dir,string File)
+bool FTWScanner::LoadFileList(string const &Dir, string const &File)
{
+ char *RealPath = NULL;
/* If noprefix is set then jam the scan root in, so we don't generate
link followed paths out of control */
if (InternalPrefix.empty() == true)
{
- if (realpath(Dir.c_str(),RealPath) == 0)
+ if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
InternalPrefix = RealPath;
+ free(RealPath);
}
Owner = this;
// ---------------------------------------------------------------------
/* */
bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
- unsigned long &DeLinkBytes,
- off_t FileSize)
+ unsigned long long &DeLinkBytes,
+ unsigned long long const &FileSize)
{
// See if this isn't an internaly prefix'd file name.
if (InternalPrefix.empty() == false &&
if (link(FileName.c_str(),OriginalPath) != 0)
{
// Panic! Restore the symlink
- symlink(OldLink,OriginalPath);
+ if (symlink(OldLink,OriginalPath) != 0)
+ _error->Errno("symlink", "failed to restore symlink");
return _error->Errno("link",_("*** Failed to link %s to %s"),
FileName.c_str(),
OriginalPath);
// PackagesWriter::PackagesWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides,
- string aArch) :
- Db(DB),Stats(Db.Stats), Arch(aArch)
+PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides,
+ string const &Arch) :
+ FTWScanner(Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL)
{
Output = stdout;
- SetExts(".deb .udeb .foo .bar .baz");
- AddPattern("*.deb");
+ SetExts(".deb .udeb");
DeLinkLimit = 0;
-
+
// Process the command line options
- DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
- DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true);
- DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true);
+ DoMD5 = _config->FindB("APT::FTPArchive::Packages::MD5",DoMD5);
+ DoSHA1 = _config->FindB("APT::FTPArchive::Packages::SHA1",DoSHA1);
+ DoSHA256 = _config->FindB("APT::FTPArchive::Packages::SHA256",DoSHA256);
+ DoSHA512 = _config->FindB("APT::FTPArchive::Packages::SHA512",DoSHA512);
+ DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
DoContents = _config->FindB("APT::FTPArchive::Contents",true);
NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
+ LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
if (Db.Loaded() == false)
DoContents = false;
-
+
// Read the override file
if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
return;
// FTWScanner::SetExts - Set extensions to support /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool FTWScanner::SetExts(string Vals)
+bool FTWScanner::SetExts(string const &Vals)
{
ClearPatterns();
string::size_type Start = 0;
while (Start <= Vals.length()-1)
{
- string::size_type Space = Vals.find(' ',Start);
- string::size_type Length;
- if (Space == string::npos)
+ string::size_type const Space = Vals.find(' ',Start);
+ string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
+ if ( Arch.empty() == false )
{
- Length = Vals.length()-Start;
+ AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
+ AddPattern(string("*_all") + Vals.substr(Start, Length));
}
else
- {
- Length = Space-Start;
- }
- AddPattern(string("*") + Vals.substr(Start, Length));
+ AddPattern(string("*") + Vals.substr(Start, Length));
+
Start += Length + 1;
}
bool PackagesWriter::DoPackage(string FileName)
{
// Pull all the data we need form the DB
- if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256)
+ if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat)
== false)
{
return false;
}
- off_t FileSize = Db.GetFileSize();
+ unsigned long long FileSize = Db.GetFileSize();
if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
return false;
}
char Size[40];
- sprintf(Size,"%lu", (unsigned long) FileSize);
+ sprintf(Size,"%llu", (unsigned long long) FileSize);
// Strip the DirStrip prefix from the FileName and add the PathPrefix
string NewFileName;
NewFileName = FileName;
if (PathPrefix.empty() == false)
NewFileName = flCombine(PathPrefix,NewFileName);
-
+
+ /* Configuration says we don't want to include the long Description
+ in the package file - instead we want to ship a separated file */
+ string desc;
+ if (LongDescription == false) {
+ desc = Tags.FindS("Description").append("\n");
+ OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str();
+ }
+
// This lists all the changes to the fields we are going to make.
// (7 hardcoded + maintainer + suggests + end marker)
- TFRewriteData Changes[6+2+OverItem->FieldOverride.size()+1];
+ TFRewriteData Changes[6+2+OverItem->FieldOverride.size()+1+1];
unsigned int End = 0;
SetTFRewriteData(Changes[End++], "Size", Size);
- SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str());
- SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str());
- SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str());
+ if (DoMD5 == true)
+ SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str());
+ if (DoSHA1 == true)
+ SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str());
+ if (DoSHA256 == true)
+ SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str());
+ if (DoSHA512 == true)
+ SetTFRewriteData(Changes[End++], "SHA512", Db.SHA512Res.c_str());
SetTFRewriteData(Changes[End++], "Filename", NewFileName.c_str());
SetTFRewriteData(Changes[End++], "Priority", OverItem->Priority.c_str());
SetTFRewriteData(Changes[End++], "Status", 0);
SetTFRewriteData(Changes[End++], "Optional", 0);
+ string DescriptionMd5;
+ if (LongDescription == false) {
+ MD5Summation descmd5;
+ descmd5.Add(desc.c_str());
+ DescriptionMd5 = descmd5.Result().Value();
+ SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str());
+ if (TransWriter != NULL)
+ TransWriter->DoPackage(Package, desc, DescriptionMd5);
+ }
+
// Rewrite the maintainer field if necessary
bool MaintFailed;
string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
/* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
- dpkg-scanpackages does.. Well sort of. dpkg-scanpackages just does renaming
+ dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
but dpkg does this append bit. So we do the append bit, at least that way the
status file and package file will remain similar. There are other transforms
but optional is the only legacy one still in use for some lazy reason. */
SetTFRewriteData(Changes[End++], "Suggests", OptionalStr.c_str());
}
- for (map<string,string>::iterator I = OverItem->FieldOverride.begin();
- I != OverItem->FieldOverride.end(); I++)
+ for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
+ I != OverItem->FieldOverride.end(); ++I)
SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
SetTFRewriteData(Changes[End++], 0, 0);
}
/*}}}*/
+// TranslationWriter::TranslationWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Create a Translation-Master file for this Packages file */
+TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
+ mode_t const &Permissions) : Output(NULL),
+ RefCounter(0)
+{
+ if (File.empty() == true)
+ return;
+
+ Comp = new MultiCompress(File, TransCompress, Permissions);
+ Output = Comp->Input;
+}
+ /*}}}*/
+// TranslationWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* Create a Translation-Master file for this Packages file */
+bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
+ string const &MD5)
+{
+ if (Output == NULL)
+ return true;
+
+ // Different archs can include different versions and therefore
+ // different descriptions - so we need to check for both name and md5.
+ string const Record = Pkg + ":" + MD5;
+
+ if (Included.find(Record) != Included.end())
+ return true;
+
+ fprintf(Output, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
+ Pkg.c_str(), MD5.c_str(), Desc.c_str());
+
+ Included.insert(Record);
+ return true;
+}
+ /*}}}*/
+// TranslationWriter::~TranslationWriter - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+TranslationWriter::~TranslationWriter()
+{
+ if (Comp == NULL)
+ return;
+
+ delete Comp;
+}
+ /*}}}*/
+
// SourcesWriter::SourcesWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-SourcesWriter::SourcesWriter(string BOverrides,string SOverrides,
- string ExtOverrides)
+SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string const &SOverrides,
+ string const &ExtOverrides) :
+ Db(DB), Stats(Db.Stats)
{
Output = stdout;
AddPattern("*.dsc");
BufSize = 0;
// Process the command line options
+ DoMD5 = _config->FindB("APT::FTPArchive::Sources::MD5",DoMD5);
+ DoSHA1 = _config->FindB("APT::FTPArchive::Sources::SHA1",DoSHA1);
+ DoSHA256 = _config->FindB("APT::FTPArchive::Sources::SHA256",DoSHA256);
+ DoSHA512 = _config->FindB("APT::FTPArchive::Sources::SHA512",DoSHA512);
NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
+ DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
// Read the override file
if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
// ---------------------------------------------------------------------
/* */
bool SourcesWriter::DoPackage(string FileName)
-{
+{
// Open the archive
- FileFd F(FileName,FileFd::ReadOnly);
- if (_error->PendingError() == true)
+ FileFd F;
+ if (OpenMaybeClearSignedFile(FileName, F) == false)
return false;
-
- // Stat the file for later
- struct stat St;
- if (fstat(F.Fd(),&St) != 0)
- return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
- if (St.st_size > 128*1024)
+ unsigned long long const FSize = F.FileSize();
+ //FIXME: do we really need to enforce a maximum size of the dsc file?
+ if (FSize > 128*1024)
return _error->Error("DSC file '%s' is too large!",FileName.c_str());
-
- if (BufSize < (unsigned)St.st_size+1)
+
+ if (BufSize < FSize + 2)
{
- BufSize = St.st_size+1;
- Buffer = (char *)realloc(Buffer,St.st_size+1);
+ BufSize = FSize + 2;
+ Buffer = (char *)realloc(Buffer , BufSize);
}
-
- if (F.Read(Buffer,St.st_size) == false)
+
+ if (F.Read(Buffer, FSize) == false)
return false;
+ // Stat the file for later (F might be clearsigned, so not F.FileSize())
+ struct stat St;
+ if (stat(FileName.c_str(), &St) != 0)
+ return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
+
// Hash the file
char *Start = Buffer;
- char *BlkEnd = Buffer + St.st_size;
- MD5Summation MD5;
- MD5.Add((unsigned char *)Start,BlkEnd - Start);
-
- // Add an extra \n to the end, just in case
- *BlkEnd++ = '\n';
-
- /* Remove the PGP trailer. Some .dsc's have this without a blank line
- before */
- const char *Key = "-----BEGIN PGP SIGNATURE-----";
- for (char *MsgEnd = Start; MsgEnd < BlkEnd - strlen(Key) -1; MsgEnd++)
+ char *BlkEnd = Buffer + FSize;
+
+ Hashes DscHashes;
+ if (FSize == (unsigned long long) St.st_size)
{
- if (*MsgEnd == '\n' && strncmp(MsgEnd+1,Key,strlen(Key)) == 0)
- {
- MsgEnd[1] = '\n';
- break;
- }
+ if (DoMD5 == true)
+ DscHashes.MD5.Add((unsigned char *)Start,BlkEnd - Start);
+ if (DoSHA1 == true)
+ DscHashes.SHA1.Add((unsigned char *)Start,BlkEnd - Start);
+ if (DoSHA256 == true)
+ DscHashes.SHA256.Add((unsigned char *)Start,BlkEnd - Start);
+ if (DoSHA512 == true)
+ DscHashes.SHA512.Add((unsigned char *)Start,BlkEnd - Start);
}
-
- /* Read records until we locate the Source record. This neatly skips the
- GPG header (which is RFC822 formed) without any trouble. */
- pkgTagSection Tags;
- do
+ else
{
- unsigned Pos;
- if (Tags.Scan(Start,BlkEnd - Start) == false)
- return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
- if (Tags.Find("Source",Pos) == true)
- break;
- Start += Tags.size();
+ FileFd DscFile(FileName, FileFd::ReadOnly);
+ DscHashes.AddFD(DscFile, St.st_size, DoMD5, DoSHA1, DoSHA256, DoSHA512);
}
- while (1);
+
+ // Add extra \n to the end, just in case (as in clearsigned they are missing)
+ *BlkEnd++ = '\n';
+ *BlkEnd++ = '\n';
+
+ pkgTagSection Tags;
+ if (Tags.Scan(Start,BlkEnd - Start) == false || Tags.Exists("Source") == false)
+ return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
Tags.Trim();
-
+
// Lookup the overide information, finding first the best priority.
string BestPrio;
string Bins = Tags.FindS("Binary");
}
// Add the dsc to the files hash list
- char Files[1000];
- snprintf(Files,sizeof(Files),"\n %s %lu %s\n %s",
- string(MD5.Result()).c_str(),St.st_size,
- flNotDir(FileName).c_str(),
- Tags.FindS("Files").c_str());
-
+ string const strippedName = flNotDir(FileName);
+ std::ostringstream ostreamFiles;
+ if (DoMD5 == true && Tags.Exists("Files"))
+ ostreamFiles << "\n " << string(DscHashes.MD5.Result()) << " " << St.st_size << " "
+ << strippedName << "\n " << Tags.FindS("Files");
+ string const Files = ostreamFiles.str();
+
+ std::ostringstream ostreamSha1;
+ if (DoSHA1 == true && Tags.Exists("Checksums-Sha1"))
+ ostreamSha1 << "\n " << string(DscHashes.SHA1.Result()) << " " << St.st_size << " "
+ << strippedName << "\n " << Tags.FindS("Checksums-Sha1");
+
+ std::ostringstream ostreamSha256;
+ if (DoSHA256 == true && Tags.Exists("Checksums-Sha256"))
+ ostreamSha256 << "\n " << string(DscHashes.SHA256.Result()) << " " << St.st_size << " "
+ << strippedName << "\n " << Tags.FindS("Checksums-Sha256");
+
+ std::ostringstream ostreamSha512;
+ if (DoSHA512 == true && Tags.Exists("Checksums-Sha512"))
+ ostreamSha512 << "\n " << string(DscHashes.SHA512.Result()) << " " << St.st_size << " "
+ << strippedName << "\n " << Tags.FindS("Checksums-Sha512");
+
// Strip the DirStrip prefix from the FileName and add the PathPrefix
string NewFileName;
if (DirStrip.empty() == false &&
string Directory = flNotFile(OriginalPath);
string Package = Tags.FindS("Source");
- // Perform the delinking operation over all of the files
+ // Perform operation over all of the files
string ParseJnk;
- const char *C = Files;
+ const char *C = Files.c_str();
+ char *RealPath = NULL;
for (;isspace(*C); C++);
while (*C != 0)
{
ParseQuoteWord(C,ParseJnk) == false ||
ParseQuoteWord(C,ParseJnk) == false)
return _error->Error("Error parsing file record");
-
- char Jnk[2];
+
string OriginalPath = Directory + ParseJnk;
- if (RealPath != 0 && readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
- realpath(OriginalPath.c_str(),RealPath) != 0)
+
+ // Add missing hashes to source files
+ if ((DoSHA1 == true && !Tags.Exists("Checksums-Sha1")) ||
+ (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) ||
+ (DoSHA512 == true && !Tags.Exists("Checksums-Sha512")))
+ {
+ if (Db.GetFileInfo(OriginalPath, false, false, false, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat)
+ == false)
+ {
+ return _error->Error("Error getting file info");
+ }
+
+ if (DoSHA1 == true && !Tags.Exists("Checksums-Sha1"))
+ ostreamSha1 << "\n " << string(Db.SHA1Res) << " "
+ << Db.GetFileSize() << " " << ParseJnk;
+
+ if (DoSHA256 == true && !Tags.Exists("Checksums-Sha256"))
+ ostreamSha256 << "\n " << string(Db.SHA256Res) << " "
+ << Db.GetFileSize() << " " << ParseJnk;
+
+ if (DoSHA512 == true && !Tags.Exists("Checksums-Sha512"))
+ ostreamSha512 << "\n " << string(Db.SHA512Res) << " "
+ << Db.GetFileSize() << " " << ParseJnk;
+ }
+
+ // Perform the delinking operation
+ char Jnk[2];
+
+ if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
+ (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0)
{
string RP = RealPath;
+ free(RealPath);
if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
return false;
}
if (Directory.length() > 2)
Directory.erase(Directory.end()-1);
+ string const ChecksumsSha1 = ostreamSha1.str();
+ string const ChecksumsSha256 = ostreamSha256.str();
+ string const ChecksumsSha512 = ostreamSha512.str();
+
// This lists all the changes to the fields we are going to make.
- // (5 hardcoded + maintainer + end marker)
- TFRewriteData Changes[5+1+SOverItem->FieldOverride.size()+1];
+ // (5 hardcoded + checksums + maintainer + end marker)
+ TFRewriteData Changes[5+2+1+SOverItem->FieldOverride.size()+1];
unsigned int End = 0;
SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package");
- SetTFRewriteData(Changes[End++],"Files",Files);
+ if (Files.empty() == false)
+ SetTFRewriteData(Changes[End++],"Files",Files.c_str());
+ if (ChecksumsSha1.empty() == false)
+ SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1.c_str());
+ if (ChecksumsSha256.empty() == false)
+ SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256.c_str());
+ if (ChecksumsSha512.empty() == false)
+ SetTFRewriteData(Changes[End++],"Checksums-Sha512",ChecksumsSha512.c_str());
if (Directory != "./")
SetTFRewriteData(Changes[End++],"Directory",Directory.c_str());
SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str());
if (NewMaint.empty() == false)
SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
- for (map<string,string>::iterator I = SOverItem->FieldOverride.begin();
- I != SOverItem->FieldOverride.end(); I++)
+ for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
+ I != SOverItem->FieldOverride.end(); ++I)
SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
SetTFRewriteData(Changes[End++], 0, 0);
Stats.Packages++;
- return true;
+ return Db.Finish();
}
/*}}}*/
// ContentsWriter::ContentsWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-ContentsWriter::ContentsWriter(string DB) :
- Db(DB), Stats(Db.Stats)
+ContentsWriter::ContentsWriter(string const &DB, string const &Arch) :
+ FTWScanner(Arch), Db(DB), Stats(Db.Stats)
{
- AddPattern("*.deb");
+ SetExts(".deb");
Output = stdout;
}
/*}}}*/
// ---------------------------------------------------------------------
/* If Package is the empty string the control record will be parsed to
determine what the package name is. */
-bool ContentsWriter::DoPackage(string FileName,string Package)
+bool ContentsWriter::DoPackage(string FileName, string Package)
{
- if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false))
+ if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false))
{
return false;
}
// ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress)
+bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
{
MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
if (_error->PendingError() == true)
return false;
-
+
// Open the package file
- int CompFd = -1;
- pid_t Proc = -1;
- if (Pkgs.OpenOld(CompFd,Proc) == false)
+ FileFd Fd;
+ if (Pkgs.OpenOld(Fd) == false)
return false;
-
- // No auto-close FD
- FileFd Fd(CompFd,false);
+
pkgTagFile Tags(&Fd);
if (_error->PendingError() == true)
- {
- Pkgs.CloseOld(CompFd,Proc);
return false;
- }
-
+
// Parse.
pkgTagSection Section;
while (Tags.Step(Section) == true)
_error->DumpErrors();
}
}
-
+
// Tidy the compressor
- if (Pkgs.CloseOld(CompFd,Proc) == false)
- return false;
-
+ Fd.Close();
+
return true;
}
// ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-ReleaseWriter::ReleaseWriter(string DB)
+ReleaseWriter::ReleaseWriter(string const &DB)
{
- AddPattern("Packages");
- AddPattern("Packages.gz");
- AddPattern("Packages.bz2");
- AddPattern("Sources");
- AddPattern("Sources.gz");
- AddPattern("Sources.bz2");
- AddPattern("Release");
- AddPattern("md5sum.txt");
+ if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
+ {
+ AddPattern("Packages");
+ AddPattern("Packages.gz");
+ AddPattern("Packages.bz2");
+ AddPattern("Packages.lzma");
+ AddPattern("Packages.xz");
+ AddPattern("Translation-*");
+ AddPattern("Sources");
+ AddPattern("Sources.gz");
+ AddPattern("Sources.bz2");
+ AddPattern("Sources.lzma");
+ AddPattern("Sources.xz");
+ AddPattern("Release");
+ AddPattern("Contents-*");
+ AddPattern("Index");
+ AddPattern("md5sum.txt");
+ }
+ AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
Output = stdout;
- time_t now = time(NULL);
+ time_t const now = time(NULL);
+
+ setlocale(LC_TIME, "C");
+
char datestr[128];
if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
gmtime(&now)) == 0)
datestr[0] = '\0';
}
+ time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
+ char validstr[128];
+ if (now == validuntil ||
+ strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC",
+ gmtime(&validuntil)) == 0)
+ {
+ validstr[0] = '\0';
+ }
+
+ setlocale(LC_TIME, "");
+
map<string,string> Fields;
Fields["Origin"] = "";
Fields["Label"] = "";
Fields["Version"] = "";
Fields["Codename"] = "";
Fields["Date"] = datestr;
+ Fields["Valid-Until"] = validstr;
Fields["Architectures"] = "";
Fields["Components"] = "";
Fields["Description"] = "";
fprintf(Output, "%s: %s\n", (*I).first.c_str(), Value.c_str());
}
+
+ DoMD5 = _config->FindB("APT::FTPArchive::Release::MD5",DoMD5);
+ DoSHA1 = _config->FindB("APT::FTPArchive::Release::SHA1",DoSHA1);
+ DoSHA256 = _config->FindB("APT::FTPArchive::Release::SHA256",DoSHA256);
}
/*}}}*/
// ReleaseWriter::DoPackage - Process a single package /*{{{*/
CheckSums[NewFileName].size = fd.Size();
- MD5Summation MD5;
- MD5.AddFD(fd.Fd(), fd.Size());
- CheckSums[NewFileName].MD5 = MD5.Result();
-
- fd.Seek(0);
- SHA1Summation SHA1;
- SHA1.AddFD(fd.Fd(), fd.Size());
- CheckSums[NewFileName].SHA1 = SHA1.Result();
-
- fd.Seek(0);
- SHA256Summation SHA256;
- SHA256.AddFD(fd.Fd(), fd.Size());
- CheckSums[NewFileName].SHA256 = SHA256.Result();
-
+ Hashes hs;
+ hs.AddFD(fd, 0, DoMD5, DoSHA1, DoSHA256, DoSHA512);
+ if (DoMD5 == true)
+ CheckSums[NewFileName].MD5 = hs.MD5.Result();
+ if (DoSHA1 == true)
+ CheckSums[NewFileName].SHA1 = hs.SHA1.Result();
+ if (DoSHA256 == true)
+ CheckSums[NewFileName].SHA256 = hs.SHA256.Result();
+ if (DoSHA512 == true)
+ CheckSums[NewFileName].SHA512 = hs.SHA512.Result();
fd.Close();
-
+
return true;
}
// ---------------------------------------------------------------------
void ReleaseWriter::Finish()
{
- fprintf(Output, "MD5Sum:\n");
- for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
- I != CheckSums.end();
- ++I)
+ if (DoMD5 == true)
{
- fprintf(Output, " %s %16ld %s\n",
- (*I).second.MD5.c_str(),
- (*I).second.size,
- (*I).first.c_str());
+ fprintf(Output, "MD5Sum:\n");
+ for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
+ I != CheckSums.end(); ++I)
+ {
+ fprintf(Output, " %s %16llu %s\n",
+ (*I).second.MD5.c_str(),
+ (*I).second.size,
+ (*I).first.c_str());
+ }
}
-
- fprintf(Output, "SHA1:\n");
- for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
- I != CheckSums.end();
- ++I)
+ if (DoSHA1 == true)
{
- fprintf(Output, " %s %16ld %s\n",
- (*I).second.SHA1.c_str(),
- (*I).second.size,
- (*I).first.c_str());
+ fprintf(Output, "SHA1:\n");
+ for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
+ I != CheckSums.end(); ++I)
+ {
+ fprintf(Output, " %s %16llu %s\n",
+ (*I).second.SHA1.c_str(),
+ (*I).second.size,
+ (*I).first.c_str());
+ }
+ }
+ if (DoSHA256 == true)
+ {
+ fprintf(Output, "SHA256:\n");
+ for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
+ I != CheckSums.end(); ++I)
+ {
+ fprintf(Output, " %s %16llu %s\n",
+ (*I).second.SHA256.c_str(),
+ (*I).second.size,
+ (*I).first.c_str());
+ }
}
- fprintf(Output, "SHA256:\n");
- for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
+ fprintf(Output, "SHA512:\n");
+ for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
I != CheckSums.end();
++I)
{
- fprintf(Output, " %s %16ld %s\n",
- (*I).second.SHA256.c_str(),
+ fprintf(Output, " %s %16llu %s\n",
+ (*I).second.SHA512.c_str(),
(*I).second.size,
(*I).first.c_str());
}
-}
+}