]> git.saurik.com Git - apt.git/blobdiff - ftparchive/writer.cc
Add new Acquire::MaxReleaseFileSize=10*1000*1000 option
[apt.git] / ftparchive / writer.cc
index edc0fddea6b9f0bbc421bbacf8b0964d057527c7..db617e92ac9bfd43bd0f9aa3cf98a85023f34acf 100644 (file)
 // Include Files                                                       /*{{{*/
 #include <config.h>
 
-#include <apt-pkg/strutl.h>
-#include <apt-pkg/error.h>
 #include <apt-pkg/configuration.h>
-#include <apt-pkg/aptconfiguration.h>
-#include <apt-pkg/md5.h>
-#include <apt-pkg/hashes.h>
 #include <apt-pkg/deblistparser.h>
+#include <apt-pkg/error.h>
 #include <apt-pkg/fileutl.h>
 #include <apt-pkg/gpgv.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/md5.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/sha1.h>
+#include <apt-pkg/sha2.h>
+#include <apt-pkg/tagfile.h>
 
+#include <ctype.h>
+#include <fnmatch.h>
+#include <ftw.h>
+#include <locale.h>
+#include <string.h>
+#include <sys/stat.h>
 #include <sys/types.h>
 #include <unistd.h>
 #include <ctime>
-#include <ftw.h>
-#include <fnmatch.h>
 #include <iostream>
 #include <sstream>
 #include <memory>
+#include <utility>
 
+#include "apt-ftparchive.h"
 #include "writer.h"
 #include "cachedb.h"
-#include "apt-ftparchive.h"
 #include "multicompress.h"
 
 #include <apti18n.h>
@@ -45,29 +54,42 @@ FTWScanner *FTWScanner::Owner;
 // SetTFRewriteData - Helper for setting rewrite lists                 /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-inline void SetTFRewriteData(struct TFRewriteData &tfrd,
-                            const char *tag,
+static inline TFRewriteData SetTFRewriteData(const char *tag,
                             const char *rewrite,
                             const char *newtag = 0)
 {
-    tfrd.Tag = tag;
-    tfrd.Rewrite = rewrite;
-    tfrd.NewTag = newtag;
+   TFRewriteData tfrd;
+   tfrd.Tag = tag;
+   tfrd.Rewrite = rewrite;
+   tfrd.NewTag = newtag;
+   return tfrd;
+}
+                                                                       /*}}}*/
+// ConfigToDoHashes - which hashes to generate                         /*{{{*/
+static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
+{
+   if (_config->FindB(Conf, true) == true)
+      DoHashes |= Flag;
+   else
+      DoHashes &= ~Flag;
+}
+static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
+{
+   SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
+   SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
+   SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
+   SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
 }
                                                                        /*}}}*/
 
 // FTWScanner::FTWScanner - Constructor                                        /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-FTWScanner::FTWScanner(string const &Arch): Arch(Arch)
+FTWScanner::FTWScanner(string const &Arch): Arch(Arch), DoHashes(~0)
 {
    ErrorPrinted = false;
    NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
-
-   DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
-   DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true);
-   DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true);
-   DoSHA512 = _config->FindB("APT::FTPArchive::SHA512",true);
+   ConfigToDoHashes(DoHashes, "APT::FTPArchive");
 }
                                                                        /*}}}*/
 // FTWScanner::Scanner - FTW Scanner                                   /*{{{*/
@@ -318,10 +340,7 @@ PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string c
    DeLinkLimit = 0;
 
    // Process the command line options
-   DoMD5 = _config->FindB("APT::FTPArchive::Packages::MD5",DoMD5);
-   DoSHA1 = _config->FindB("APT::FTPArchive::Packages::SHA1",DoSHA1);
-   DoSHA256 = _config->FindB("APT::FTPArchive::Packages::SHA256",DoSHA256);
-   DoSHA512 = _config->FindB("APT::FTPArchive::Packages::SHA512",DoSHA512);
+   ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
    DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
    DoContents = _config->FindB("APT::FTPArchive::Contents",true);
    NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
@@ -376,10 +395,14 @@ bool FTWScanner::SetExts(string const &Vals)
 bool PackagesWriter::DoPackage(string FileName)
 {      
    // Pull all the data we need form the DB
-   if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat)
-                 == false)
+   if (Db.GetFileInfo(FileName,
+           true, /* DoControl */
+           DoContents,
+           true, /* GenContentsOnly */
+           false, /* DoSource */
+           DoHashes, DoAlwaysStat) == false)
    {
-      return false;
+     return false;
    }
 
    unsigned long long FileSize = Db.GetFileSize();
@@ -441,30 +464,27 @@ bool PackagesWriter::DoPackage(string FileName)
    }
 
    // This lists all the changes to the fields we are going to make.
-   // (7 hardcoded + maintainer + suggests + end marker)
-   TFRewriteData Changes[6+2+OverItem->FieldOverride.size()+1+1];
-
-   unsigned int End = 0;
-   SetTFRewriteData(Changes[End++], "Size", Size);
-   if (DoMD5 == true)
-      SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str());
-   if (DoSHA1 == true)
-      SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str());
-   if (DoSHA256 == true)
-      SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str());
-   if (DoSHA512 == true)
-      SetTFRewriteData(Changes[End++], "SHA512", Db.SHA512Res.c_str());
-   SetTFRewriteData(Changes[End++], "Filename", NewFileName.c_str());
-   SetTFRewriteData(Changes[End++], "Priority", OverItem->Priority.c_str());
-   SetTFRewriteData(Changes[End++], "Status", 0);
-   SetTFRewriteData(Changes[End++], "Optional", 0);
+   std::vector<TFRewriteData> Changes;
+
+   Changes.push_back(SetTFRewriteData("Size", Size));
+   for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
+   {
+      if (hs->HashType() == "MD5Sum")
+        Changes.push_back(SetTFRewriteData("MD5sum", hs->HashValue().c_str()));
+      else
+        Changes.push_back(SetTFRewriteData(hs->HashType().c_str(), hs->HashValue().c_str()));
+   }
+   Changes.push_back(SetTFRewriteData("Filename", NewFileName.c_str()));
+   Changes.push_back(SetTFRewriteData("Priority", OverItem->Priority.c_str()));
+   Changes.push_back(SetTFRewriteData("Status", 0));
+   Changes.push_back(SetTFRewriteData("Optional", 0));
 
    string DescriptionMd5;
    if (LongDescription == false) {
       MD5Summation descmd5;
       descmd5.Add(desc.c_str());
       DescriptionMd5 = descmd5.Result().Value();
-      SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str());
+      Changes.push_back(SetTFRewriteData("Description-md5", DescriptionMd5.c_str()));
       if (TransWriter != NULL)
         TransWriter->DoPackage(Package, desc, DescriptionMd5);
    }
@@ -479,12 +499,12 @@ bool PackagesWriter::DoPackage(string FileName)
         NewLine(1);
         ioprintf(c1out, _("  %s maintainer is %s not %s\n"),
               Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
-      }      
+      }
    }
-   
+
    if (NewMaint.empty() == false)
-      SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
-   
+      Changes.push_back(SetTFRewriteData("Maintainer", NewMaint.c_str()));
+
    /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
       dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
       but dpkg does this append bit. So we do the append bit, at least that way the
@@ -495,17 +515,17 @@ bool PackagesWriter::DoPackage(string FileName)
    {
       if (Tags.FindS("Suggests").empty() == false)
         OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
-      SetTFRewriteData(Changes[End++], "Suggests", OptionalStr.c_str());
+      Changes.push_back(SetTFRewriteData("Suggests", OptionalStr.c_str()));
    }
 
-   for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin(); 
+   for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
         I != OverItem->FieldOverride.end(); ++I)
-      SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
+      Changes.push_back(SetTFRewriteData(I->first.c_str(),I->second.c_str()));
 
-   SetTFRewriteData(Changes[End++], 0, 0);
+   Changes.push_back(SetTFRewriteData( 0, 0));
 
    // Rewrite and store the fields.
-   if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes) == false)
+   if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes.data()) == false)
       return false;
    fprintf(Output,"\n");
 
@@ -576,10 +596,7 @@ SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string c
    BufSize = 0;
    
    // Process the command line options
-   DoMD5 = _config->FindB("APT::FTPArchive::Sources::MD5",DoMD5);
-   DoSHA1 = _config->FindB("APT::FTPArchive::Sources::SHA1",DoSHA1);
-   DoSHA256 = _config->FindB("APT::FTPArchive::Sources::SHA256",DoSHA256);
-   DoSHA512 = _config->FindB("APT::FTPArchive::Sources::SHA512",DoSHA512);
+   ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
    NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
    DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
 
@@ -601,63 +618,48 @@ SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string c
 }
                                                                        /*}}}*/
 // SourcesWriter::DoPackage - Process a single package                 /*{{{*/
-// ---------------------------------------------------------------------
-/* */
+static std::ostream& addDscHash(std::ostream &out, unsigned int const DoHashes,
+      Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
+      HashString const * const Hash, unsigned long long Size, std::string FileName)
+{
+   if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
+      return out;
+   out << "\n " << Hash->HashValue() << " " << Size << " " << FileName
+      << "\n " << Tags.FindS(FieldName);
+   return out;
+}
 bool SourcesWriter::DoPackage(string FileName)
 {
-   // Open the archive
-   FileFd F;
-   if (OpenMaybeClearSignedFile(FileName, F) == false)
-      return false;
-
-   unsigned long long const FSize = F.FileSize();
-   //FIXME: do we really need to enforce a maximum size of the dsc file?
-   if (FSize > 128*1024)
-      return _error->Error("DSC file '%s' is too large!",FileName.c_str());
-
-   if (BufSize < FSize + 2)
+   // Pull all the data we need form the DB
+   if (Db.GetFileInfo(FileName,
+           false, /* DoControl */
+           false, /* DoContents */
+           false, /* GenContentsOnly */
+           true, /* DoSource */
+           DoHashes, DoAlwaysStat) == false)
    {
-      BufSize = FSize + 2;
-      Buffer = (char *)realloc(Buffer , BufSize);
-   }
-
-   if (F.Read(Buffer, FSize) == false)
       return false;
+   }
 
-   // Stat the file for later (F might be clearsigned, so not F.FileSize())
-   struct stat St;
-   if (stat(FileName.c_str(), &St) != 0)
-      return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
+   // we need to perform a "write" here (this is what finish is doing)
+   // because the call to Db.GetFileInfo() in the loop will change
+   // the "db cursor"
+   Db.Finish();
 
-   // Hash the file
-   char *Start = Buffer;
-   char *BlkEnd = Buffer + FSize;
-
-   Hashes DscHashes;
-   if (FSize == (unsigned long long) St.st_size)
-   {
-      if (DoMD5 == true)
-        DscHashes.MD5.Add((unsigned char *)Start,BlkEnd - Start);
-      if (DoSHA1 == true)
-        DscHashes.SHA1.Add((unsigned char *)Start,BlkEnd - Start);
-      if (DoSHA256 == true)
-        DscHashes.SHA256.Add((unsigned char *)Start,BlkEnd - Start);
-      if (DoSHA512 == true)
-        DscHashes.SHA512.Add((unsigned char *)Start,BlkEnd - Start);
-   }
-   else
-   {
-      FileFd DscFile(FileName, FileFd::ReadOnly);
-      DscHashes.AddFD(DscFile, St.st_size, DoMD5, DoSHA1, DoSHA256, DoSHA512);
-   }
+   // read stuff
+   char *Start = Db.Dsc.Data;
+   char *BlkEnd = Db.Dsc.Data + Db.Dsc.Length;
 
    // Add extra \n to the end, just in case (as in clearsigned they are missing)
    *BlkEnd++ = '\n';
    *BlkEnd++ = '\n';
 
    pkgTagSection Tags;
-   if (Tags.Scan(Start,BlkEnd - Start) == false || Tags.Exists("Source") == false)
+   if (Tags.Scan(Start,BlkEnd - Start) == false)
       return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
+   
+   if (Tags.Exists("Source") == false)
+      return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
    Tags.Trim();
 
    // Lookup the overide information, finding first the best priority.
@@ -705,6 +707,10 @@ bool SourcesWriter::DoPackage(string FileName)
       OverItem = auto_ptr<Override::Item>(new Override::Item);
    }
    
+   struct stat St;
+   if (stat(FileName.c_str(), &St) != 0)
+      return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
+
    auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
    // const auto_ptr<Override::Item> autoSOverItem(SOverItem);
    if (SOverItem.get() == 0)
@@ -718,29 +724,19 @@ bool SourcesWriter::DoPackage(string FileName)
         *SOverItem = *OverItem;
       }
    }
-   
+
    // Add the dsc to the files hash list
    string const strippedName = flNotDir(FileName);
    std::ostringstream ostreamFiles;
-   if (DoMD5 == true && Tags.Exists("Files"))
-      ostreamFiles << "\n " << string(DscHashes.MD5.Result()) << " " << St.st_size << " "
-                  << strippedName << "\n " << Tags.FindS("Files");
+   addDscHash(ostreamFiles, DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
    string const Files = ostreamFiles.str();
 
    std::ostringstream ostreamSha1;
-   if (DoSHA1 == true && Tags.Exists("Checksums-Sha1"))
-      ostreamSha1 << "\n " << string(DscHashes.SHA1.Result()) << " " << St.st_size << " "
-                  << strippedName << "\n " << Tags.FindS("Checksums-Sha1");
-
+   addDscHash(ostreamSha1, DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName);
    std::ostringstream ostreamSha256;
-   if (DoSHA256 == true && Tags.Exists("Checksums-Sha256"))
-      ostreamSha256 << "\n " << string(DscHashes.SHA256.Result()) << " " << St.st_size << " "
-                  << strippedName << "\n " << Tags.FindS("Checksums-Sha256");
-
+   addDscHash(ostreamSha256, DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName);
    std::ostringstream ostreamSha512;
-   if (DoSHA512 == true && Tags.Exists("Checksums-Sha512"))
-      ostreamSha512 << "\n " << string(DscHashes.SHA512.Result()) << " " << St.st_size << " "
-                  << strippedName << "\n " << Tags.FindS("Checksums-Sha512");
+   addDscHash(ostreamSha512, DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName);
 
    // Strip the DirStrip prefix from the FileName and add the PathPrefix
    string NewFileName;
@@ -772,27 +768,54 @@ bool SourcesWriter::DoPackage(string FileName)
       string OriginalPath = Directory + ParseJnk;
 
       // Add missing hashes to source files
-      if ((DoSHA1 == true && !Tags.Exists("Checksums-Sha1")) ||
-          (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) ||
-          (DoSHA512 == true && !Tags.Exists("Checksums-Sha512")))
+      if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) ||
+          ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) ||
+          ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512")))
       {
-         if (Db.GetFileInfo(OriginalPath, false, false, false, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat)
-               == false)
+         if (Db.GetFileInfo(OriginalPath,
+                            false, /* DoControl */
+                            false, /* DoContents */
+                            false, /* GenContentsOnly */
+                            false, /* DoSource */
+                            DoHashes,
+                            DoAlwaysStat) == false)
          {
             return _error->Error("Error getting file info");
          }
 
-         if (DoSHA1 == true && !Tags.Exists("Checksums-Sha1"))
-            ostreamSha1 << "\n " << string(Db.SHA1Res) << " "
-               << Db.GetFileSize() << " " << ParseJnk;
-
-         if (DoSHA256 == true && !Tags.Exists("Checksums-Sha256"))
-            ostreamSha256 << "\n " << string(Db.SHA256Res) << " "
-               << Db.GetFileSize() << " " << ParseJnk;
+         for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
+        {
+           if (hs->HashType() == "MD5Sum")
+              continue;
+           char const * fieldname;
+           std::ostream * out;
+           if (hs->HashType() == "SHA1")
+           {
+              fieldname = "Checksums-Sha1";
+              out = &ostreamSha1;
+           }
+           else if (hs->HashType() == "SHA256")
+           {
+              fieldname = "Checksums-Sha256";
+              out = &ostreamSha256;
+           }
+           else if (hs->HashType() == "SHA512")
+           {
+              fieldname = "Checksums-Sha512";
+              out = &ostreamSha512;
+           }
+           else
+           {
+              _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
+              continue;
+           }
+           if (Tags.Exists(fieldname) == true)
+              continue;
+           (*out) << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk;
+        }
 
-         if (DoSHA512 == true && !Tags.Exists("Checksums-Sha512"))
-            ostreamSha512 << "\n " << string(Db.SHA512Res) << " "
-               << Db.GetFileSize() << " " << ParseJnk;
+        // write back the GetFileInfo() stats data 
+        Db.Finish();
       }
 
       // Perform the delinking operation
@@ -818,22 +841,21 @@ bool SourcesWriter::DoPackage(string FileName)
 
    // This lists all the changes to the fields we are going to make.
    // (5 hardcoded + checksums + maintainer + end marker)
-   TFRewriteData Changes[5+2+1+SOverItem->FieldOverride.size()+1];
+   std::vector<TFRewriteData> Changes;
 
-   unsigned int End = 0;
-   SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package");
+   Changes.push_back(SetTFRewriteData("Source",Package.c_str(),"Package"));
    if (Files.empty() == false)
-      SetTFRewriteData(Changes[End++],"Files",Files.c_str());
+      Changes.push_back(SetTFRewriteData("Files",Files.c_str()));
    if (ChecksumsSha1.empty() == false)
-      SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1.c_str());
+      Changes.push_back(SetTFRewriteData("Checksums-Sha1",ChecksumsSha1.c_str()));
    if (ChecksumsSha256.empty() == false)
-      SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256.c_str());
+      Changes.push_back(SetTFRewriteData("Checksums-Sha256",ChecksumsSha256.c_str()));
    if (ChecksumsSha512.empty() == false)
-      SetTFRewriteData(Changes[End++],"Checksums-Sha512",ChecksumsSha512.c_str());
+      Changes.push_back(SetTFRewriteData("Checksums-Sha512",ChecksumsSha512.c_str()));
    if (Directory != "./")
-      SetTFRewriteData(Changes[End++],"Directory",Directory.c_str());
-   SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str());
-   SetTFRewriteData(Changes[End++],"Status",0);
+      Changes.push_back(SetTFRewriteData("Directory",Directory.c_str()));
+   Changes.push_back(SetTFRewriteData("Priority",BestPrio.c_str()));
+   Changes.push_back(SetTFRewriteData("Status",0));
 
    // Rewrite the maintainer field if necessary
    bool MaintFailed;
@@ -848,22 +870,22 @@ bool SourcesWriter::DoPackage(string FileName)
       }      
    }
    if (NewMaint.empty() == false)
-      SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
+      Changes.push_back(SetTFRewriteData("Maintainer", NewMaint.c_str()));
    
    for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin(); 
         I != SOverItem->FieldOverride.end(); ++I)
-      SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
+      Changes.push_back(SetTFRewriteData(I->first.c_str(),I->second.c_str()));
 
-   SetTFRewriteData(Changes[End++], 0, 0);
+   Changes.push_back(SetTFRewriteData(0, 0));
       
    // Rewrite and store the fields.
-   if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes) == false)
+   if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes.data()) == false)
       return false;
    fprintf(Output,"\n");
 
    Stats.Packages++;
    
-   return Db.Finish();
+   return true;
 }
                                                                        /*}}}*/
 
@@ -884,7 +906,13 @@ ContentsWriter::ContentsWriter(string const &DB, string const &Arch) :
    determine what the package name is. */
 bool ContentsWriter::DoPackage(string FileName, string Package)
 {
-   if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false))
+   if (!Db.GetFileInfo(FileName,
+           Package.empty(), /* DoControl */
+           true, /* DoContents */
+           false, /* GenContentsOnly */
+           false, /* DoSource */
+           0, /* DoHashes */
+           false /* checkMtime */))
    {
       return false;
    }
@@ -1020,9 +1048,7 @@ ReleaseWriter::ReleaseWriter(string const &/*DB*/)
       fprintf(Output, "%s: %s\n", (*I).first.c_str(), Value.c_str());
    }
 
-   DoMD5 = _config->FindB("APT::FTPArchive::Release::MD5",DoMD5);
-   DoSHA1 = _config->FindB("APT::FTPArchive::Release::SHA1",DoSHA1);
-   DoSHA256 = _config->FindB("APT::FTPArchive::Release::SHA256",DoSHA256);
+   ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
 }
                                                                        /*}}}*/
 // ReleaseWriter::DoPackage - Process a single package                 /*{{{*/
@@ -1056,15 +1082,8 @@ bool ReleaseWriter::DoPackage(string FileName)
    CheckSums[NewFileName].size = fd.Size();
 
    Hashes hs;
-   hs.AddFD(fd, 0, DoMD5, DoSHA1, DoSHA256, DoSHA512);
-   if (DoMD5 == true)
-      CheckSums[NewFileName].MD5 = hs.MD5.Result();
-   if (DoSHA1 == true)
-      CheckSums[NewFileName].SHA1 = hs.SHA1.Result();
-   if (DoSHA256 == true)
-      CheckSums[NewFileName].SHA256 = hs.SHA256.Result();
-   if (DoSHA512 == true)
-      CheckSums[NewFileName].SHA512 = hs.SHA512.Result();
+   hs.AddFD(fd, 0, DoHashes);
+   CheckSums[NewFileName].Hashes = hs.GetHashStringList();
    fd.Close();
 
    return true;
@@ -1073,54 +1092,29 @@ bool ReleaseWriter::DoPackage(string FileName)
                                                                        /*}}}*/
 // ReleaseWriter::Finish - Output the checksums                                /*{{{*/
 // ---------------------------------------------------------------------
-void ReleaseWriter::Finish()
+static void printChecksumTypeRecord(FILE * const Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
 {
-   if (DoMD5 == true)
-   {
-      fprintf(Output, "MD5Sum:\n");
-      for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
-         I != CheckSums.end(); ++I)
-      {
-        fprintf(Output, " %s %16llu %s\n",
-                (*I).second.MD5.c_str(),
-                (*I).second.size,
-                (*I).first.c_str());
-      }
-   }
-   if (DoSHA1 == true)
-   {
-      fprintf(Output, "SHA1:\n");
-      for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
-         I != CheckSums.end(); ++I)
-      {
-        fprintf(Output, " %s %16llu %s\n",
-                (*I).second.SHA1.c_str(),
-                (*I).second.size,
-                (*I).first.c_str());
-      }
-   }
-   if (DoSHA256 == true)
-   {
-      fprintf(Output, "SHA256:\n");
-      for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
+      fprintf(Output, "%s:\n", Type);
+      for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
          I != CheckSums.end(); ++I)
       {
+        HashString const * const hs = I->second.Hashes.find(Type);
+        if (hs == NULL)
+           continue;
         fprintf(Output, " %s %16llu %s\n",
-                (*I).second.SHA256.c_str(),
+                hs->HashValue().c_str(),
                 (*I).second.size,
                 (*I).first.c_str());
       }
-   }
-
-   fprintf(Output, "SHA512:\n");
-   for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
-       I != CheckSums.end();
-       ++I)
-   {
-      fprintf(Output, " %s %16llu %s\n",
-              (*I).second.SHA512.c_str(),
-              (*I).second.size,
-              (*I).first.c_str());
-   }
-
+}
+void ReleaseWriter::Finish()
+{
+   if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
+      printChecksumTypeRecord(Output, "MD5Sum", CheckSums);
+   if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
+      printChecksumTypeRecord(Output, "SHA1", CheckSums);
+   if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
+      printChecksumTypeRecord(Output, "SHA256", CheckSums);
+   if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
+      printChecksumTypeRecord(Output, "SHA512", CheckSums);
 }