]> git.saurik.com Git - apt.git/blobdiff - ftparchive/writer.cc
don't change owner/perms/times through file:// symlinks
[apt.git] / ftparchive / writer.cc
index b2ebdca8a30190c6fda77cdc0fd7bcbf90a0fd3c..c34a04d1a387d225184bfced5d79a2ec3bdfd33b 100644 (file)
    ##################################################################### */
                                                                        /*}}}*/
 // Include Files                                                       /*{{{*/
-#include "writer.h"
-    
-#include <apti18n.h>
-#include <apt-pkg/strutl.h>
-#include <apt-pkg/error.h>
+#include <config.h>
+
 #include <apt-pkg/configuration.h>
+#include <apt-pkg/deblistparser.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/gpgv.h>
+#include <apt-pkg/hashes.h>
 #include <apt-pkg/md5.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/pkgcache.h>
 #include <apt-pkg/sha1.h>
-#include <apt-pkg/sha256.h>
-#include <apt-pkg/deblistparser.h>
+#include <apt-pkg/sha2.h>
+#include <apt-pkg/tagfile.h>
 
+#include <ctype.h>
+#include <fnmatch.h>
+#include <ftw.h>
+#include <locale.h>
+#include <string.h>
+#include <sys/stat.h>
 #include <sys/types.h>
 #include <unistd.h>
 #include <ctime>
-#include <ftw.h>
-#include <fnmatch.h>
 #include <iostream>
+#include <iomanip>
+#include <sstream>
 #include <memory>
-    
-#include "cachedb.h"
+#include <utility>
+#include <algorithm>
+
 #include "apt-ftparchive.h"
+#include "writer.h"
+#include "cachedb.h"
 #include "multicompress.h"
+#include "byhash.h"
+
+#include <apti18n.h>
                                                                        /*}}}*/
 using namespace std;
 FTWScanner *FTWScanner::Owner;
 
-// SetTFRewriteData - Helper for setting rewrite lists                 /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-inline void SetTFRewriteData(struct TFRewriteData &tfrd,
-                            const char *tag,
-                            const char *rewrite,
-                            const char *newtag = 0)
+// ConfigToDoHashes - which hashes to generate                         /*{{{*/
+static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
+{
+   if (_config->FindB(Conf, (DoHashes & Flag) == Flag) == true)
+      DoHashes |= Flag;
+   else
+      DoHashes &= ~Flag;
+}
+static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
 {
-    tfrd.Tag = tag;
-    tfrd.Rewrite = rewrite;
-    tfrd.NewTag = newtag;
+   SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
+   SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
+   SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
+   SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
 }
                                                                        /*}}}*/
 
 // FTWScanner::FTWScanner - Constructor                                        /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-FTWScanner::FTWScanner()
+FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch, bool const IncludeArchAll)
+   : Arch(Arch), IncludeArchAll(IncludeArchAll), DoHashes(~0)
 {
+   if (GivenOutput == NULL)
+   {
+      Output = new FileFd;
+      OwnsOutput = true;
+      Output->OpenDescriptor(STDOUT_FILENO, FileFd::WriteOnly, false);
+   }
+   else
+   {
+      Output = GivenOutput;
+      OwnsOutput = false;
+   }
    ErrorPrinted = false;
    NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
-   RealPath = 0;
-   long PMax = pathconf(".",_PC_PATH_MAX);
-   if (PMax > 0)
-      RealPath = new char[PMax];
+   ConfigToDoHashes(DoHashes, "APT::FTPArchive");
 }
                                                                        /*}}}*/
+FTWScanner::~FTWScanner()
+{
+   if (Output != NULL && OwnsOutput)
+      delete Output;
+}
 // FTWScanner::Scanner - FTW Scanner                                   /*{{{*/
 // ---------------------------------------------------------------------
-/* This is the FTW scanner, it processes each directory element in the 
+/* This is the FTW scanner, it processes each directory element in the
    directory tree. */
-int FTWScanner::ScannerFTW(const char *File,const struct stat *sb,int Flag)
+int FTWScanner::ScannerFTW(const char *File,const struct stat * /*sb*/,int Flag)
 {
    if (Flag == FTW_DNR)
    {
@@ -89,15 +121,17 @@ int FTWScanner::ScannerFTW(const char *File,const struct stat *sb,int Flag)
 // FTWScanner::ScannerFile - File Scanner                              /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-int FTWScanner::ScannerFile(const char *File, bool ReadLink)
+int FTWScanner::ScannerFile(const char *File, bool const &ReadLink)
 {
    const char *LastComponent = strrchr(File, '/');
+   char *RealPath = NULL;
+
    if (LastComponent == NULL)
       LastComponent = File;
    else
       LastComponent++;
 
-   vector<string>::iterator I;
+   vector<string>::const_iterator I;
    for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I)
    {
       if (fnmatch((*I).c_str(), LastComponent, 0) == 0)
@@ -111,10 +145,13 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink)
       given are not links themselves. */
    char Jnk[2];
    Owner->OriginalPath = File;
-   if (ReadLink && Owner->RealPath != 0 &&
+   if (ReadLink &&
        readlink(File,Jnk,sizeof(Jnk)) != -1 &&
-       realpath(File,Owner->RealPath) != 0)
-      Owner->DoPackage(Owner->RealPath);
+       (RealPath = realpath(File,NULL)) != 0)
+   {
+      Owner->DoPackage(RealPath);
+      free(RealPath);
+   }
    else
       Owner->DoPackage(File);
    
@@ -127,7 +164,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink)
       {
         Owner->NewLine(1);
         
-        bool Type = _error->PopMessage(Err);
+        bool const Type = _error->PopMessage(Err);
         if (Type == true)
            cerr << _("E: ") << Err << endl;
         else
@@ -148,20 +185,22 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink)
 // FTWScanner::RecursiveScan - Just scan a directory tree              /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-bool FTWScanner::RecursiveScan(string Dir)
+bool FTWScanner::RecursiveScan(string const &Dir)
 {
+   char *RealPath = NULL;
    /* If noprefix is set then jam the scan root in, so we don't generate
       link followed paths out of control */
    if (InternalPrefix.empty() == true)
    {
-      if (realpath(Dir.c_str(),RealPath) == 0)
+      if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
         return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
-      InternalPrefix = RealPath;      
+      InternalPrefix = RealPath;
+      free(RealPath);
    }
    
    // Do recursive directory searching
    Owner = this;
-   int Res = ftw(Dir.c_str(),ScannerFTW,30);
+   int const Res = ftw(Dir.c_str(),ScannerFTW,30);
    
    // Error treewalking?
    if (Res != 0)
@@ -178,15 +217,17 @@ bool FTWScanner::RecursiveScan(string Dir)
 // ---------------------------------------------------------------------
 /* This is an alternative to using FTW to locate files, it reads the list
    of files from another file. */
-bool FTWScanner::LoadFileList(string Dir,string File)
+bool FTWScanner::LoadFileList(string const &Dir, string const &File)
 {
+   char *RealPath = NULL;
    /* If noprefix is set then jam the scan root in, so we don't generate
       link followed paths out of control */
    if (InternalPrefix.empty() == true)
    {
-      if (realpath(Dir.c_str(),RealPath) == 0)
+      if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
         return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
       InternalPrefix = RealPath;      
+      free(RealPath);
    }
    
    Owner = this;
@@ -235,8 +276,8 @@ bool FTWScanner::LoadFileList(string Dir,string File)
 // ---------------------------------------------------------------------
 /* */
 bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
-                       unsigned long &DeLinkBytes,
-                       off_t FileSize)
+                       unsigned long long &DeLinkBytes,
+                       unsigned long long const &FileSize)
 {
    // See if this isn't an internaly prefix'd file name.
    if (InternalPrefix.empty() == false &&
@@ -262,14 +303,13 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
               _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
            else
            {
-              if (unlink(OriginalPath) != 0)
-                 _error->Errno("unlink",_("Failed to unlink %s"),OriginalPath);
-              else
+              if (RemoveFile("FTWScanner::Delink", OriginalPath))
               {
                  if (link(FileName.c_str(),OriginalPath) != 0)
                  {
                     // Panic! Restore the symlink
-                    symlink(OldLink,OriginalPath);
+                    if (symlink(OldLink,OriginalPath) != 0)
+                        _error->Errno("symlink", "failed to restore symlink");
                     return _error->Errno("link",_("*** Failed to link %s to %s"),
                                          FileName.c_str(),
                                          OriginalPath);
@@ -286,6 +326,32 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
       FileName = OriginalPath;
    }
    
+   return true;
+}
+                                                                       /*}}}*/
+// FTWScanner::SetExts - Set extensions to support                      /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::SetExts(string const &Vals)
+{
+   ClearPatterns();
+   string::size_type Start = 0;
+   while (Start <= Vals.length()-1)
+   {
+      string::size_type const Space = Vals.find(' ',Start);
+      string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
+      if ( Arch.empty() == false )
+      {
+        AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
+        if (IncludeArchAll == true && Arch != "all")
+           AddPattern(string("*_all") + Vals.substr(Start, Length));
+      }
+      else
+        AddPattern(string("*") + Vals.substr(Start, Length));
+
+      Start += Length + 1;
+   }
+
    return true;
 }
                                                                        /*}}}*/
@@ -293,26 +359,24 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
 // PackagesWriter::PackagesWriter - Constructor                                /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides,
-                              string aArch) :
-   Db(DB),Stats(Db.Stats), Arch(aArch)
+PackagesWriter::PackagesWriter(FileFd * const GivenOutput, TranslationWriter * const transWriter,
+      string const &DB,string const &Overrides,string const &ExtOverrides,
+      string const &Arch, bool const IncludeArchAll) :
+   FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats), TransWriter(transWriter)
 {
-   Output = stdout;
-   SetExts(".deb .udeb .foo .bar .baz");
-   AddPattern("*.deb");
+   SetExts(".deb .udeb");
    DeLinkLimit = 0;
-   
+
    // Process the command line options
-   DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
-   DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true);
-   DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true);
+   ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
+   DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
    DoContents = _config->FindB("APT::FTPArchive::Contents",true);
    NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
    LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
 
    if (Db.Loaded() == false)
       DoContents = false;
-      
+
    // Read the override file
    if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
       return;
@@ -325,33 +389,6 @@ PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides,
    _error->DumpErrors();
 }
                                                                         /*}}}*/
-// FTWScanner::SetExts - Set extensions to support                      /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool FTWScanner::SetExts(string Vals)
-{
-   ClearPatterns();
-   string::size_type Start = 0;
-   while (Start <= Vals.length()-1)
-   {
-      string::size_type Space = Vals.find(' ',Start);
-      string::size_type Length;
-      if (Space == string::npos)
-      {
-         Length = Vals.length()-Start;
-      }
-      else
-      {
-         Length = Space-Start;
-      }
-      AddPattern(string("*") + Vals.substr(Start, Length));
-      Start += Length + 1;
-   }
-
-   return true;
-}
-
-                                                                       /*}}}*/
 // PackagesWriter::DoPackage - Process a single package                        /*{{{*/
 // ---------------------------------------------------------------------
 /* This method takes a package and gets its control information and 
@@ -360,13 +397,17 @@ bool FTWScanner::SetExts(string Vals)
 bool PackagesWriter::DoPackage(string FileName)
 {      
    // Pull all the data we need form the DB
-   if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256) 
-                 == false)
+   if (Db.GetFileInfo(FileName,
+           true, /* DoControl */
+           DoContents,
+           true, /* GenContentsOnly */
+           false, /* DoSource */
+           DoHashes, DoAlwaysStat) == false)
    {
-      return false;
+     return false;
    }
 
-   off_t FileSize = Db.GetFileSize();
+   unsigned long long FileSize = Db.GetFileSize();
    if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
       return false;
    
@@ -382,7 +423,7 @@ bool PackagesWriter::DoPackage(string FileName)
       Architecture = Arch;
    else
       Architecture = Tags.FindS("Architecture");
-   auto_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
+   unique_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
    
    if (Package.empty() == true)
       return _error->Error(_("Archive had no package field"));
@@ -396,14 +437,11 @@ bool PackagesWriter::DoPackage(string FileName)
         ioprintf(c1out, _("  %s has no override entry\n"), Package.c_str());
       }
       
-      OverItem = auto_ptr<Override::Item>(new Override::Item);
+      OverItem = unique_ptr<Override::Item>(new Override::Item);
       OverItem->FieldOverride["Section"] = Tags.FindS("Section");
       OverItem->Priority = Tags.FindS("Priority");
    }
 
-   char Size[40];
-   sprintf(Size,"%lu", (unsigned long) FileSize);
-   
    // Strip the DirStrip prefix from the FileName and add the PathPrefix
    string NewFileName;
    if (DirStrip.empty() == false &&
@@ -425,25 +463,34 @@ bool PackagesWriter::DoPackage(string FileName)
    }
 
    // This lists all the changes to the fields we are going to make.
-   // (7 hardcoded + maintainer + suggests + end marker)
-   TFRewriteData Changes[6+2+OverItem->FieldOverride.size()+1+1];
-
-   unsigned int End = 0;
-   SetTFRewriteData(Changes[End++], "Size", Size);
-   SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str());
-   SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str());
-   SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str());
-   SetTFRewriteData(Changes[End++], "Filename", NewFileName.c_str());
-   SetTFRewriteData(Changes[End++], "Priority", OverItem->Priority.c_str());
-   SetTFRewriteData(Changes[End++], "Status", 0);
-   SetTFRewriteData(Changes[End++], "Optional", 0);
+   std::vector<pkgTagSection::Tag> Changes;
+
+   std::string Size;
+   strprintf(Size, "%llu", (unsigned long long) FileSize);
+   Changes.push_back(pkgTagSection::Tag::Rewrite("Size", Size));
+
+   for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
+   {
+      if (hs->HashType() == "MD5Sum")
+        Changes.push_back(pkgTagSection::Tag::Rewrite("MD5sum", hs->HashValue()));
+      else if (hs->HashType() == "Checksum-FileSize")
+        continue;
+      else
+        Changes.push_back(pkgTagSection::Tag::Rewrite(hs->HashType(), hs->HashValue()));
+   }
+   Changes.push_back(pkgTagSection::Tag::Rewrite("Filename", NewFileName));
+   Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", OverItem->Priority));
+   Changes.push_back(pkgTagSection::Tag::Remove("Status"));
+   Changes.push_back(pkgTagSection::Tag::Remove("Optional"));
 
    string DescriptionMd5;
    if (LongDescription == false) {
       MD5Summation descmd5;
       descmd5.Add(desc.c_str());
       DescriptionMd5 = descmd5.Result().Value();
-      SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str());
+      Changes.push_back(pkgTagSection::Tag::Rewrite("Description-md5", DescriptionMd5));
+      if (TransWriter != NULL)
+        TransWriter->DoPackage(Package, desc, DescriptionMd5);
    }
 
    // Rewrite the maintainer field if necessary
@@ -456,14 +503,14 @@ bool PackagesWriter::DoPackage(string FileName)
         NewLine(1);
         ioprintf(c1out, _("  %s maintainer is %s not %s\n"),
               Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
-      }      
+      }
    }
-   
+
    if (NewMaint.empty() == false)
-      SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
-   
+      Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint));
+
    /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
-      dpkg-scanpackages does.. Well sort of. dpkg-scanpackages just does renaming
+      dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
       but dpkg does this append bit. So we do the append bit, at least that way the
       status file and package file will remain similar. There are other transforms
       but optional is the only legacy one still in use for some lazy reason. */
@@ -472,38 +519,90 @@ bool PackagesWriter::DoPackage(string FileName)
    {
       if (Tags.FindS("Suggests").empty() == false)
         OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
-      SetTFRewriteData(Changes[End++], "Suggests", OptionalStr.c_str());
+      Changes.push_back(pkgTagSection::Tag::Rewrite("Suggests", OptionalStr));
    }
 
-   for (map<string,string>::iterator I = OverItem->FieldOverride.begin(); 
-        I != OverItem->FieldOverride.end(); I++) 
-      SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
-
-   SetTFRewriteData(Changes[End++], 0, 0);
+   for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
+        I != OverItem->FieldOverride.end(); ++I)
+      Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
 
    // Rewrite and store the fields.
-   if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes) == false)
+   if (Tags.Write(*Output, TFRewritePackageOrder, Changes) == false ||
+        Output->Write("\n", 1) == false)
       return false;
-   fprintf(Output,"\n");
 
    return Db.Finish();
 }
                                                                        /*}}}*/
+PackagesWriter::~PackagesWriter()                                      /*{{{*/
+{
+}
+                                                                       /*}}}*/
+
+// TranslationWriter::TranslationWriter - Constructor                  /*{{{*/
+// ---------------------------------------------------------------------
+/* Create a Translation-Master file for this Packages file */
+TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
+                                       mode_t const &Permissions) : Comp(NULL), Output(NULL)
+{
+   if (File.empty() == true)
+      return;
+
+   Comp = new MultiCompress(File, TransCompress, Permissions);
+   Output = &Comp->Input;
+}
+                                                                       /*}}}*/
+// TranslationWriter::DoPackage - Process a single package             /*{{{*/
+// ---------------------------------------------------------------------
+/* Create a Translation-Master file for this Packages file */
+bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
+                                 string const &MD5)
+{
+   if (Output == NULL)
+      return true;
+
+   // Different archs can include different versions and therefore
+   // different descriptions - so we need to check for both name and md5.
+   string const Record = Pkg + ":" + MD5;
+
+   if (Included.find(Record) != Included.end())
+      return true;
+
+   std::string out;
+   strprintf(out, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
+          Pkg.c_str(), MD5.c_str(), Desc.c_str());
+   Output->Write(out.c_str(), out.length());
+
+   Included.insert(Record);
+   return true;
+}
+                                                                       /*}}}*/
+// TranslationWriter::~TranslationWriter - Destructor                  /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+TranslationWriter::~TranslationWriter()
+{
+   if (Comp != NULL)
+      delete Comp;
+}
+                                                                       /*}}}*/
 
 // SourcesWriter::SourcesWriter - Constructor                          /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-SourcesWriter::SourcesWriter(string BOverrides,string SOverrides,
-                            string ExtOverrides)
+SourcesWriter::SourcesWriter(FileFd * const GivenOutput, string const &DB, string const &BOverrides,string const &SOverrides,
+                            string const &ExtOverrides) :
+   FTWScanner(GivenOutput), Db(DB), Stats(Db.Stats)
 {
-   Output = stdout;
    AddPattern("*.dsc");
    DeLinkLimit = 0;
    Buffer = 0;
    BufSize = 0;
    
    // Process the command line options
+   ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
    NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
+   DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
 
    // Read the override file
    if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
@@ -523,73 +622,48 @@ SourcesWriter::SourcesWriter(string BOverrides,string SOverrides,
 }
                                                                        /*}}}*/
 // SourcesWriter::DoPackage - Process a single package                 /*{{{*/
-// ---------------------------------------------------------------------
-/* */
+static std::string getDscHash(unsigned int const DoHashes,
+      Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
+      HashString const * const Hash, unsigned long long Size, std::string const &FileName)
+{
+   if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
+      return "";
+   std::ostringstream out;
+   out << "\n " << Hash->HashValue() << " " << std::to_string(Size) << " " << FileName
+      << "\n " << Tags.FindS(FieldName);
+   return out.str();
+}
 bool SourcesWriter::DoPackage(string FileName)
-{      
-   // Open the archive
-   FileFd F(FileName,FileFd::ReadOnly);
-   if (_error->PendingError() == true)
-      return false;
-   
-   // Stat the file for later
-   struct stat St;
-   if (fstat(F.Fd(),&St) != 0)
-      return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
-
-   if (St.st_size > 128*1024)
-      return _error->Error("DSC file '%s' is too large!",FileName.c_str());
-         
-   if (BufSize < (unsigned)St.st_size+1)
+{
+   // Pull all the data we need form the DB
+   if (Db.GetFileInfo(FileName,
+           false, /* DoControl */
+           false, /* DoContents */
+           false, /* GenContentsOnly */
+           true, /* DoSource */
+           DoHashes, DoAlwaysStat) == false)
    {
-      BufSize = St.st_size+1;
-      Buffer = (char *)realloc(Buffer,St.st_size+1);
-   }
-   
-   if (F.Read(Buffer,St.st_size) == false)
       return false;
-
-   // Hash the file
-   char *Start = Buffer;
-   char *BlkEnd = Buffer + St.st_size;
-   MD5Summation MD5;
-   MD5.Add((unsigned char *)Start,BlkEnd - Start);
-      
-   // Add an extra \n to the end, just in case
-   *BlkEnd++ = '\n';
-   
-   /* Remove the PGP trailer. Some .dsc's have this without a blank line 
-      before */
-   const char *Key = "-----BEGIN PGP SIGNATURE-----";
-   for (char *MsgEnd = Start; MsgEnd < BlkEnd - strlen(Key) -1; MsgEnd++)
-   {
-      if (*MsgEnd == '\n' && strncmp(MsgEnd+1,Key,strlen(Key)) == 0)
-      {
-        MsgEnd[1] = '\n';
-        break;
-      }      
    }
-   
-   /* Read records until we locate the Source record. This neatly skips the
-      GPG header (which is RFC822 formed) without any trouble. */
+
+   // we need to perform a "write" here (this is what finish is doing)
+   // because the call to Db.GetFileInfo() in the loop will change
+   // the "db cursor"
+   Db.Finish();
+
    pkgTagSection Tags;
-   do
-   {
-      unsigned Pos;
-      if (Tags.Scan(Start,BlkEnd - Start) == false)
-        return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
-      if (Tags.Find("Source",Pos) == true)
-        break;
-      Start += Tags.size();
-   }
-   while (1);
+   if (Tags.Scan(Db.Dsc.Data.c_str(), Db.Dsc.Data.length()) == false)
+      return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
+
+   if (Tags.Exists("Source") == false)
+      return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
    Tags.Trim();
-      
+
    // Lookup the overide information, finding first the best priority.
    string BestPrio;
    string Bins = Tags.FindS("Binary");
    char Buffer[Bins.length() + 1];
-   auto_ptr<Override::Item> OverItem(0);
+   unique_ptr<Override::Item> OverItem(nullptr);
    if (Bins.empty() == false)
    {
       strcpy(Buffer,Bins.c_str());
@@ -602,7 +676,7 @@ bool SourcesWriter::DoPackage(string FileName)
       unsigned char BestPrioV = pkgCache::State::Extra;
       for (unsigned I = 0; BinList[I] != 0; I++)
       {
-        auto_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
+        unique_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
         if (Itm.get() == 0)
            continue;
 
@@ -614,7 +688,7 @@ bool SourcesWriter::DoPackage(string FileName)
         }       
 
         if (OverItem.get() == 0)
-           OverItem = Itm;
+           OverItem = std::move(Itm);
       }
    }
    
@@ -627,30 +701,34 @@ bool SourcesWriter::DoPackage(string FileName)
         ioprintf(c1out, _("  %s has no override entry\n"), Tags.FindS("Source").c_str());
       }
       
-      OverItem = auto_ptr<Override::Item>(new Override::Item);
+      OverItem.reset(new Override::Item);
    }
    
-   auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
-   // const auto_ptr<Override::Item> autoSOverItem(SOverItem);
+   struct stat St;
+   if (stat(FileName.c_str(), &St) != 0)
+      return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
+
+   unique_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
+   // const unique_ptr<Override::Item> autoSOverItem(SOverItem);
    if (SOverItem.get() == 0)
    {
       ioprintf(c1out, _("  %s has no source override entry\n"), Tags.FindS("Source").c_str());
-      SOverItem = auto_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
+      SOverItem = unique_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
       if (SOverItem.get() == 0)
       {
         ioprintf(c1out, _("  %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
-        SOverItem = auto_ptr<Override::Item>(new Override::Item);
+        SOverItem = unique_ptr<Override::Item>(new Override::Item);
         *SOverItem = *OverItem;
       }
    }
-   
+
    // Add the dsc to the files hash list
-   char Files[1000];
-   snprintf(Files,sizeof(Files),"\n %s %lu %s\n %s",
-           string(MD5.Result()).c_str(),St.st_size,
-           flNotDir(FileName).c_str(),
-           Tags.FindS("Files").c_str());
-   
+   string const strippedName = flNotDir(FileName);
+   std::string const Files = getDscHash(DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
+   std::string ChecksumsSha1 = getDscHash(DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName);
+   std::string ChecksumsSha256 = getDscHash(DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName);
+   std::string ChecksumsSha512 = getDscHash(DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName);
+
    // Strip the DirStrip prefix from the FileName and add the PathPrefix
    string NewFileName;
    if (DirStrip.empty() == false &&
@@ -665,24 +743,82 @@ bool SourcesWriter::DoPackage(string FileName)
    string Directory = flNotFile(OriginalPath);
    string Package = Tags.FindS("Source");
 
-   // Perform the delinking operation over all of the files
+   // Perform operation over all of the files
    string ParseJnk;
-   const char *C = Files;
+   const char *C = Files.c_str();
+   char *RealPath = NULL;
    for (;isspace(*C); C++);
    while (*C != 0)
-   {   
+   {
       // Parse each of the elements
       if (ParseQuoteWord(C,ParseJnk) == false ||
          ParseQuoteWord(C,ParseJnk) == false ||
          ParseQuoteWord(C,ParseJnk) == false)
         return _error->Error("Error parsing file record");
-      
-      char Jnk[2];
+
       string OriginalPath = Directory + ParseJnk;
-      if (RealPath != 0 && readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
-         realpath(OriginalPath.c_str(),RealPath) != 0)
+
+      // Add missing hashes to source files
+      if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) ||
+          ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) ||
+          ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512")))
+      {
+         if (Db.GetFileInfo(OriginalPath,
+                            false, /* DoControl */
+                            false, /* DoContents */
+                            false, /* GenContentsOnly */
+                            false, /* DoSource */
+                            DoHashes,
+                            DoAlwaysStat) == false)
+         {
+            return _error->Error("Error getting file info");
+         }
+
+         for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
+        {
+           if (hs->HashType() == "MD5Sum" || hs->HashType() == "Checksum-FileSize")
+              continue;
+           char const * fieldname;
+           std::string * out;
+           if (hs->HashType() == "SHA1")
+           {
+              fieldname = "Checksums-Sha1";
+              out = &ChecksumsSha1;
+           }
+           else if (hs->HashType() == "SHA256")
+           {
+              fieldname = "Checksums-Sha256";
+              out = &ChecksumsSha256;
+           }
+           else if (hs->HashType() == "SHA512")
+           {
+              fieldname = "Checksums-Sha512";
+              out = &ChecksumsSha512;
+           }
+           else
+           {
+              _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
+              continue;
+           }
+           if (Tags.Exists(fieldname) == true)
+              continue;
+           std::ostringstream streamout;
+           streamout << "\n " << hs->HashValue() << " " << std::to_string(Db.GetFileSize()) << " " << ParseJnk;
+           out->append(streamout.str());
+        }
+
+        // write back the GetFileInfo() stats data
+        Db.Finish();
+      }
+
+      // Perform the delinking operation
+      char Jnk[2];
+
+      if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
+         (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0)
       {
         string RP = RealPath;
+        free(RealPath);
         if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
            return false;
       }
@@ -693,42 +829,47 @@ bool SourcesWriter::DoPackage(string FileName)
       Directory.erase(Directory.end()-1);
 
    // This lists all the changes to the fields we are going to make.
-   // (5 hardcoded + maintainer + end marker)
-   TFRewriteData Changes[5+1+SOverItem->FieldOverride.size()+1];
-
-   unsigned int End = 0;
-   SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package");
-   SetTFRewriteData(Changes[End++],"Files",Files);
+   // (5 hardcoded + checksums + maintainer + end marker)
+   std::vector<pkgTagSection::Tag> Changes;
+
+   Changes.push_back(pkgTagSection::Tag::Remove("Source"));
+   Changes.push_back(pkgTagSection::Tag::Rewrite("Package", Package));
+   if (Files.empty() == false)
+      Changes.push_back(pkgTagSection::Tag::Rewrite("Files", Files));
+   if (ChecksumsSha1.empty() == false)
+      Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha1", ChecksumsSha1));
+   if (ChecksumsSha256.empty() == false)
+      Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha256", ChecksumsSha256));
+   if (ChecksumsSha512.empty() == false)
+      Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha512", ChecksumsSha512));
    if (Directory != "./")
-      SetTFRewriteData(Changes[End++],"Directory",Directory.c_str());
-   SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str());
-   SetTFRewriteData(Changes[End++],"Status",0);
+      Changes.push_back(pkgTagSection::Tag::Rewrite("Directory", Directory));
+   Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", BestPrio));
+   Changes.push_back(pkgTagSection::Tag::Remove("Status"));
 
    // Rewrite the maintainer field if necessary
    bool MaintFailed;
-   string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
+   string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"), MaintFailed);
    if (MaintFailed == true)
    {
       if (NoOverride == false)
       {
-        NewLine(1);     
+        NewLine(1);
         ioprintf(c1out, _("  %s maintainer is %s not %s\n"), Package.c_str(),
               Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
-      }      
+      }
    }
    if (NewMaint.empty() == false)
-      SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
-   
-   for (map<string,string>::iterator I = SOverItem->FieldOverride.begin(); 
-        I != SOverItem->FieldOverride.end(); I++) 
-      SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
+      Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint.c_str()));
+
+   for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
+        I != SOverItem->FieldOverride.end(); ++I)
+      Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
 
-   SetTFRewriteData(Changes[End++], 0, 0);
-      
    // Rewrite and store the fields.
-   if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes) == false)
+   if (Tags.Write(*Output, TFRewriteSourceOrder, Changes) == false ||
+        Output->Write("\n", 1) == false)
       return false;
-   fprintf(Output,"\n");
 
    Stats.Packages++;
    
@@ -739,21 +880,27 @@ bool SourcesWriter::DoPackage(string FileName)
 // ContentsWriter::ContentsWriter - Constructor                                /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-ContentsWriter::ContentsWriter(string DB) : 
-                   Db(DB), Stats(Db.Stats)
+ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB,
+      string const &Arch, bool const IncludeArchAll) :
+                   FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats)
 
 {
-   AddPattern("*.deb");
-   Output = stdout;
+   SetExts(".deb");
 }
                                                                        /*}}}*/
 // ContentsWriter::DoPackage - Process a single package                        /*{{{*/
 // ---------------------------------------------------------------------
 /* If Package is the empty string the control record will be parsed to
    determine what the package name is. */
-bool ContentsWriter::DoPackage(string FileName,string Package)
+bool ContentsWriter::DoPackage(string FileName, string Package)
 {
-   if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false))
+   if (!Db.GetFileInfo(FileName,
+           Package.empty(), /* DoControl */
+           true, /* DoContents */
+           false, /* GenContentsOnly */
+           false, /* DoSource */
+           0, /* DoHashes */
+           false /* checkMtime */))
    {
       return false;
    }
@@ -772,27 +919,21 @@ bool ContentsWriter::DoPackage(string FileName,string Package)
 // ContentsWriter::ReadFromPkgs - Read from a packages file            /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress)
+bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
 {
    MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
    if (_error->PendingError() == true)
       return false;
-   
+
    // Open the package file
-   int CompFd = -1;
-   pid_t Proc = -1;
-   if (Pkgs.OpenOld(CompFd,Proc) == false)
+   FileFd Fd;
+   if (Pkgs.OpenOld(Fd) == false)
       return false;
-   
-   // No auto-close FD
-   FileFd Fd(CompFd,false);   
+
    pkgTagFile Tags(&Fd);
    if (_error->PendingError() == true)
-   {
-      Pkgs.CloseOld(CompFd,Proc);
       return false;
-   }
-   
+
    // Parse.
    pkgTagSection Section;
    while (Tags.Step(Section) == true)
@@ -814,11 +955,10 @@ bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress)
         _error->DumpErrors();
       }
    }
-   
+
    // Tidy the compressor
-   if (Pkgs.CloseOld(CompFd,Proc) == false)
-      return false;
-   
+   Fd.Close();
+
    return true;
 }
 
@@ -827,27 +967,41 @@ bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress)
 // ReleaseWriter::ReleaseWriter - Constructor                          /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-ReleaseWriter::ReleaseWriter(string DB)
+static std::string formatUTCDateTime(time_t const now)
+{
+   bool const NumericTimezone = _config->FindB("APT::FTPArchive::Release::NumericTimezone", true);
+   // TimeRFC1123 uses GMT to satisfy HTTP/1.1
+   std::string datetime = TimeRFC1123(now, NumericTimezone);
+   if (NumericTimezone == false)
+   {
+      auto const lastspace = datetime.rfind(' ');
+      if (likely(lastspace != std::string::npos))
+        datetime.replace(lastspace + 1, 3, "UTC");
+   }
+   return datetime;
+}
+ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) : FTWScanner(GivenOutput)
 {
-   AddPattern("Packages");
-   AddPattern("Packages.gz");
-   AddPattern("Packages.bz2");
-   AddPattern("Packages.lzma");
-   AddPattern("Sources");
-   AddPattern("Sources.gz");
-   AddPattern("Sources.bz2");
-   AddPattern("Sources.lzma");
-   AddPattern("Release");
-   AddPattern("md5sum.txt");
-
-   Output = stdout;
-   time_t now = time(NULL);
-   char datestr[128];
-   if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
-                gmtime(&now)) == 0)
-   {
-      datestr[0] = '\0';
+   if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
+   {
+      AddPattern("Packages");
+      AddPattern("Packages.*");
+      AddPattern("Translation-*");
+      AddPattern("Sources");
+      AddPattern("Sources.*");
+      AddPattern("Release");
+      AddPattern("Contents-*");
+      AddPattern("Index");
+      AddPattern("icons-*.tar");
+      AddPattern("icons-*.tar.*");
+      AddPattern("Components-*.yml");
+      AddPattern("Components-*.yml.*");
+      AddPattern("md5sum.txt");
    }
+   AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
+
+   time_t const now = time(NULL);
+   time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
 
    map<string,string> Fields;
    Fields["Origin"] = "";
@@ -855,11 +1009,16 @@ ReleaseWriter::ReleaseWriter(string DB)
    Fields["Suite"] = "";
    Fields["Version"] = "";
    Fields["Codename"] = "";
-   Fields["Date"] = datestr;
+   Fields["Date"] = formatUTCDateTime(now);
+   if (validuntil != now)
+      Fields["Valid-Until"] = formatUTCDateTime(validuntil);
    Fields["Architectures"] = "";
    Fields["Components"] = "";
    Fields["Description"] = "";
-
+   Fields["Signed-By"] = "";
+   if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
+      Fields["Acquire-By-Hash"] = "true";
+   
    for(map<string,string>::const_iterator I = Fields.begin();
        I != Fields.end();
        ++I)
@@ -869,8 +1028,11 @@ ReleaseWriter::ReleaseWriter(string DB)
       if (Value == "")
          continue;
 
-      fprintf(Output, "%s: %s\n", (*I).first.c_str(), Value.c_str());
+      std::string const out = I->first + ": " + Value + "\n";
+      Output->Write(out.c_str(), out.length());
    }
+
+   ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
 }
                                                                        /*}}}*/
 // ReleaseWriter::DoPackage - Process a single package                 /*{{{*/
@@ -903,61 +1065,109 @@ bool ReleaseWriter::DoPackage(string FileName)
 
    CheckSums[NewFileName].size = fd.Size();
 
-   MD5Summation MD5;
-   MD5.AddFD(fd.Fd(), fd.Size());
-   CheckSums[NewFileName].MD5 = MD5.Result();
-
-   fd.Seek(0);
-   SHA1Summation SHA1;
-   SHA1.AddFD(fd.Fd(), fd.Size());
-   CheckSums[NewFileName].SHA1 = SHA1.Result();
+   Hashes hs(DoHashes);
+   hs.AddFD(fd);
+   CheckSums[NewFileName].Hashes = hs.GetHashStringList();
+   fd.Close();
 
-   fd.Seek(0);
-   SHA256Summation SHA256;
-   SHA256.AddFD(fd.Fd(), fd.Size());
-   CheckSums[NewFileName].SHA256 = SHA256.Result();
+   // FIXME: wrong layer in the code(?)
+   // FIXME2: symlink instead of create a copy
+   if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
+   {
+      std::string Input = FileName;
+      HashStringList hsl = hs.GetHashStringList();
+      for(HashStringList::const_iterator h = hsl.begin();
+          h != hsl.end(); ++h)
+      {
+         if (!h->usable())
+            continue;
+         if (flNotDir(FileName) == "Release" || flNotDir(FileName) == "InRelease")
+            continue;
+
+         std::string ByHashOutputFile = GenByHashFilename(Input, *h);
+         std::string ByHashOutputDir = flNotFile(ByHashOutputFile);
+         if(!CreateDirectory(flNotFile(Input), ByHashOutputDir))
+            return _error->Warning("can not create dir %s", flNotFile(ByHashOutputFile).c_str());
+
+         // write new hashes
+         FileFd In(Input, FileFd::ReadOnly);
+         FileFd Out(ByHashOutputFile, FileFd::WriteEmpty);
+         if(!CopyFile(In, Out))
+            return _error->Warning("failed to copy %s %s", Input.c_str(), ByHashOutputFile.c_str());
+      }
+   }
 
-   fd.Close();
-   
    return true;
 }
 
                                                                        /*}}}*/
 // ReleaseWriter::Finish - Output the checksums                                /*{{{*/
 // ---------------------------------------------------------------------
-void ReleaseWriter::Finish()
+static void printChecksumTypeRecord(FileFd &Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
 {
-   fprintf(Output, "MD5Sum:\n");
-   for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
-       I != CheckSums.end();
-       ++I)
    {
-      fprintf(Output, " %s %16ld %s\n",
-              (*I).second.MD5.c_str(),
-              (*I).second.size,
-              (*I).first.c_str());
+      std::string out;
+      strprintf(out, "%s:\n", Type);
+      Output.Write(out.c_str(), out.length());
    }
-
-   fprintf(Output, "SHA1:\n");
-   for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
-       I != CheckSums.end();
-       ++I)
+   for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
+        I != CheckSums.end(); ++I)
    {
-      fprintf(Output, " %s %16ld %s\n",
-              (*I).second.SHA1.c_str(),
-              (*I).second.size,
-              (*I).first.c_str());
+      HashString const * const hs = I->second.Hashes.find(Type);
+      if (hs == NULL)
+        continue;
+      std::string out;
+      strprintf(out, " %s %16llu %s\n",
+           hs->HashValue().c_str(),
+           (*I).second.size,
+           (*I).first.c_str());
+      Output.Write(out.c_str(), out.length());
    }
-
-   fprintf(Output, "SHA256:\n");
-   for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
-       I != CheckSums.end();
-       ++I)
+}
+void ReleaseWriter::Finish()
+{
+   if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
+      printChecksumTypeRecord(*Output, "MD5Sum", CheckSums);
+   if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
+      printChecksumTypeRecord(*Output, "SHA1", CheckSums);
+   if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
+      printChecksumTypeRecord(*Output, "SHA256", CheckSums);
+   if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
+      printChecksumTypeRecord(*Output, "SHA512", CheckSums);
+
+   // go by-hash cleanup
+   map<string,ReleaseWriter::CheckSum>::const_iterator prev = CheckSums.begin();
+   if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
    {
-      fprintf(Output, " %s %16ld %s\n",
-              (*I).second.SHA256.c_str(),
-              (*I).second.size,
-              (*I).first.c_str());
+      for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
+        I != CheckSums.end(); ++I)
+      {
+         if (I->first == "Release" || I->first == "InRelease")
+            continue;
+
+         // keep iterating until we find a new subdir
+         if(flNotFile(I->first) == flNotFile(prev->first))
+            continue;
+
+         // clean that subdir up
+         int keepFiles = _config->FindI("APT::FTPArchive::By-Hash-Keep", 3);
+         // calculate how many compressors are used (the amount of files
+         // in that subdir generated for this run)
+         keepFiles *= std::distance(prev, I);
+         prev = I;
+
+         HashStringList hsl = prev->second.Hashes;
+         for(HashStringList::const_iterator h = hsl.begin();
+             h != hsl.end(); ++h)
+         {
+
+            if (!h->usable())
+               continue;
+
+            std::string RealFilename = DirStrip+"/"+prev->first;
+            std::string ByHashOutputFile = GenByHashFilename(RealFilename, *h);
+            DeleteAllButMostRecent(flNotFile(ByHashOutputFile), keepFiles);
+         }
+      }
    }
 }
-