]> git.saurik.com Git - apt.git/blobdiff - ftparchive/writer.cc
travis: Move codecov from after_success to after_script
[apt.git] / ftparchive / writer.cc
index 7cf7e6efc778577ccabc7cf60305a26bf570222a..018cf005203143ba9e8186dcf147ab3582a6bbd1 100644 (file)
 #include <unistd.h>
 #include <ctime>
 #include <iostream>
+#include <iomanip>
 #include <sstream>
 #include <memory>
 #include <utility>
+#include <algorithm>
 
 #include "apt-ftparchive.h"
 #include "writer.h"
 #include "cachedb.h"
 #include "multicompress.h"
+#include "byhash.h"
 
 #include <apti18n.h>
                                                                        /*}}}*/
@@ -54,7 +57,7 @@ FTWScanner *FTWScanner::Owner;
 // ConfigToDoHashes - which hashes to generate                         /*{{{*/
 static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
 {
-   if (_config->FindB(Conf, true) == true)
+   if (_config->FindB(Conf, (DoHashes & Flag) == Flag) == true)
       DoHashes |= Flag;
    else
       DoHashes &= ~Flag;
@@ -69,22 +72,30 @@ static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
                                                                        /*}}}*/
 
 // FTWScanner::FTWScanner - Constructor                                        /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch): Arch(Arch), DoHashes(~0)
+FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch, bool const IncludeArchAll)
+   : Arch(Arch), IncludeArchAll(IncludeArchAll), DoHashes(~0)
 {
    if (GivenOutput == NULL)
    {
       Output = new FileFd;
+      OwnsOutput = true;
       Output->OpenDescriptor(STDOUT_FILENO, FileFd::WriteOnly, false);
    }
    else
+   {
       Output = GivenOutput;
+      OwnsOutput = false;
+   }
    ErrorPrinted = false;
    NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
    ConfigToDoHashes(DoHashes, "APT::FTPArchive");
 }
                                                                        /*}}}*/
+FTWScanner::~FTWScanner()
+{
+   if (Output != NULL && OwnsOutput)
+      delete Output;
+}
 // FTWScanner::Scanner - FTW Scanner                                   /*{{{*/
 // ---------------------------------------------------------------------
 /* This is the FTW scanner, it processes each directory element in the
@@ -292,9 +303,7 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
               _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
            else
            {
-              if (unlink(OriginalPath) != 0)
-                 _error->Errno("unlink",_("Failed to unlink %s"),OriginalPath);
-              else
+              if (RemoveFile("FTWScanner::Delink", OriginalPath))
               {
                  if (link(FileName.c_str(),OriginalPath) != 0)
                  {
@@ -317,6 +326,32 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
       FileName = OriginalPath;
    }
    
+   return true;
+}
+                                                                       /*}}}*/
+// FTWScanner::SetExts - Set extensions to support                      /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::SetExts(string const &Vals)
+{
+   ClearPatterns();
+   string::size_type Start = 0;
+   while (Start <= Vals.length()-1)
+   {
+      string::size_type const Space = Vals.find(' ',Start);
+      string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
+      if ( Arch.empty() == false )
+      {
+        AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
+        if (IncludeArchAll == true && Arch != "all")
+           AddPattern(string("*_all") + Vals.substr(Start, Length));
+      }
+      else
+        AddPattern(string("*") + Vals.substr(Start, Length));
+
+      Start += Length + 1;
+   }
+
    return true;
 }
                                                                        /*}}}*/
@@ -324,9 +359,10 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
 // PackagesWriter::PackagesWriter - Constructor                                /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-PackagesWriter::PackagesWriter(FileFd * const GivenOutput, string const &DB,string const &Overrides,string const &ExtOverrides,
-                              string const &Arch) :
-   FTWScanner(GivenOutput, Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL)
+PackagesWriter::PackagesWriter(FileFd * const GivenOutput, TranslationWriter * const transWriter,
+      string const &DB,string const &Overrides,string const &ExtOverrides,
+      string const &Arch, bool const IncludeArchAll) :
+   FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats), TransWriter(transWriter)
 {
    SetExts(".deb .udeb");
    DeLinkLimit = 0;
@@ -353,32 +389,6 @@ PackagesWriter::PackagesWriter(FileFd * const GivenOutput, string const &DB,stri
    _error->DumpErrors();
 }
                                                                         /*}}}*/
-// FTWScanner::SetExts - Set extensions to support                      /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool FTWScanner::SetExts(string const &Vals)
-{
-   ClearPatterns();
-   string::size_type Start = 0;
-   while (Start <= Vals.length()-1)
-   {
-      string::size_type const Space = Vals.find(' ',Start);
-      string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
-      if ( Arch.empty() == false )
-      {
-        AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
-        AddPattern(string("*_all") + Vals.substr(Start, Length));
-      }
-      else
-        AddPattern(string("*") + Vals.substr(Start, Length));
-
-      Start += Length + 1;
-   }
-
-   return true;
-}
-
-                                                                       /*}}}*/
 // PackagesWriter::DoPackage - Process a single package                        /*{{{*/
 // ---------------------------------------------------------------------
 /* This method takes a package and gets its control information and 
@@ -413,7 +423,7 @@ bool PackagesWriter::DoPackage(string FileName)
       Architecture = Arch;
    else
       Architecture = Tags.FindS("Architecture");
-   auto_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
+   unique_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
    
    if (Package.empty() == true)
       return _error->Error(_("Archive had no package field"));
@@ -427,7 +437,7 @@ bool PackagesWriter::DoPackage(string FileName)
         ioprintf(c1out, _("  %s has no override entry\n"), Package.c_str());
       }
       
-      OverItem = auto_ptr<Override::Item>(new Override::Item);
+      OverItem = unique_ptr<Override::Item>(new Override::Item);
       OverItem->FieldOverride["Section"] = Tags.FindS("Section");
       OverItem->Priority = Tags.FindS("Priority");
    }
@@ -524,12 +534,16 @@ bool PackagesWriter::DoPackage(string FileName)
    return Db.Finish();
 }
                                                                        /*}}}*/
+PackagesWriter::~PackagesWriter()                                      /*{{{*/
+{
+}
+                                                                       /*}}}*/
 
 // TranslationWriter::TranslationWriter - Constructor                  /*{{{*/
 // ---------------------------------------------------------------------
 /* Create a Translation-Master file for this Packages file */
 TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
-                                       mode_t const &Permissions) : RefCounter(0)
+                                       mode_t const &Permissions) : Comp(NULL), Output(NULL)
 {
    if (File.empty() == true)
       return;
@@ -568,10 +582,8 @@ bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
 /* */
 TranslationWriter::~TranslationWriter()
 {
-   if (Comp == NULL)
-      return;
-
-   delete Comp;
+   if (Comp != NULL)
+      delete Comp;
 }
                                                                        /*}}}*/
 
@@ -612,12 +624,12 @@ SourcesWriter::SourcesWriter(FileFd * const GivenOutput, string const &DB, strin
 // SourcesWriter::DoPackage - Process a single package                 /*{{{*/
 static std::string getDscHash(unsigned int const DoHashes,
       Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
-      HashString const * const Hash, unsigned long long Size, std::string FileName)
+      HashString const * const Hash, unsigned long long Size, std::string const &FileName)
 {
    if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
       return "";
    std::ostringstream out;
-   out << "\n " << Hash->HashValue() << " " << Size << " " << FileName
+   out << "\n " << Hash->HashValue() << " " << std::to_string(Size) << " " << FileName
       << "\n " << Tags.FindS(FieldName);
    return out.str();
 }
@@ -651,7 +663,7 @@ bool SourcesWriter::DoPackage(string FileName)
    string BestPrio;
    string Bins = Tags.FindS("Binary");
    char Buffer[Bins.length() + 1];
-   auto_ptr<Override::Item> OverItem(0);
+   unique_ptr<Override::Item> OverItem(nullptr);
    if (Bins.empty() == false)
    {
       strcpy(Buffer,Bins.c_str());
@@ -664,7 +676,7 @@ bool SourcesWriter::DoPackage(string FileName)
       unsigned char BestPrioV = pkgCache::State::Extra;
       for (unsigned I = 0; BinList[I] != 0; I++)
       {
-        auto_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
+        unique_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
         if (Itm.get() == 0)
            continue;
 
@@ -676,7 +688,7 @@ bool SourcesWriter::DoPackage(string FileName)
         }       
 
         if (OverItem.get() == 0)
-           OverItem = Itm;
+           OverItem = std::move(Itm);
       }
    }
    
@@ -689,23 +701,23 @@ bool SourcesWriter::DoPackage(string FileName)
         ioprintf(c1out, _("  %s has no override entry\n"), Tags.FindS("Source").c_str());
       }
       
-      OverItem = auto_ptr<Override::Item>(new Override::Item);
+      OverItem.reset(new Override::Item);
    }
    
    struct stat St;
    if (stat(FileName.c_str(), &St) != 0)
       return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
 
-   auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
-   // const auto_ptr<Override::Item> autoSOverItem(SOverItem);
+   unique_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
+   // const unique_ptr<Override::Item> autoSOverItem(SOverItem);
    if (SOverItem.get() == 0)
    {
       ioprintf(c1out, _("  %s has no source override entry\n"), Tags.FindS("Source").c_str());
-      SOverItem = auto_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
+      SOverItem = unique_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
       if (SOverItem.get() == 0)
       {
         ioprintf(c1out, _("  %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
-        SOverItem = auto_ptr<Override::Item>(new Override::Item);
+        SOverItem = unique_ptr<Override::Item>(new Override::Item);
         *SOverItem = *OverItem;
       }
    }
@@ -791,7 +803,7 @@ bool SourcesWriter::DoPackage(string FileName)
            if (Tags.Exists(fieldname) == true)
               continue;
            std::ostringstream streamout;
-           streamout << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk;
+           streamout << "\n " << hs->HashValue() << " " << std::to_string(Db.GetFileSize()) << " " << ParseJnk;
            out->append(streamout.str());
         }
 
@@ -868,8 +880,9 @@ bool SourcesWriter::DoPackage(string FileName)
 // ContentsWriter::ContentsWriter - Constructor                                /*{{{*/
 // ---------------------------------------------------------------------
 /* */
-ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB, string const &Arch) :
-                   FTWScanner(GivenOutput, Arch), Db(DB), Stats(Db.Stats)
+ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB,
+      string const &Arch, bool const IncludeArchAll) :
+                   FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats)
 
 {
    SetExts(".deb");
@@ -954,49 +967,42 @@ bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompres
 // ReleaseWriter::ReleaseWriter - Constructor                          /*{{{*/
 // ---------------------------------------------------------------------
 /* */
+static std::string formatUTCDateTime(time_t const now)
+{
+   bool const NumericTimezone = _config->FindB("APT::FTPArchive::Release::NumericTimezone", true);
+   // TimeRFC1123 uses GMT to satisfy HTTP/1.1
+   std::string datetime = TimeRFC1123(now, NumericTimezone);
+   if (NumericTimezone == false)
+   {
+      auto const lastspace = datetime.rfind(' ');
+      if (likely(lastspace != std::string::npos))
+        datetime.replace(lastspace + 1, 3, "UTC");
+   }
+   return datetime;
+}
 ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) : FTWScanner(GivenOutput)
 {
    if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
    {
       AddPattern("Packages");
-      AddPattern("Packages.gz");
-      AddPattern("Packages.bz2");
-      AddPattern("Packages.lzma");
-      AddPattern("Packages.xz");
+      AddPattern("Packages.*");
       AddPattern("Translation-*");
       AddPattern("Sources");
-      AddPattern("Sources.gz");
-      AddPattern("Sources.bz2");
-      AddPattern("Sources.lzma");
-      AddPattern("Sources.xz");
+      AddPattern("Sources.*");
       AddPattern("Release");
       AddPattern("Contents-*");
       AddPattern("Index");
+      AddPattern("Index.*");
+      AddPattern("icons-*.tar");
+      AddPattern("icons-*.tar.*");
+      AddPattern("Components-*.yml");
+      AddPattern("Components-*.yml.*");
       AddPattern("md5sum.txt");
    }
    AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
 
    time_t const now = time(NULL);
-
-   setlocale(LC_TIME, "C");
-
-   char datestr[128];
-   if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
-                gmtime(&now)) == 0)
-   {
-      datestr[0] = '\0';
-   }
-
    time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
-   char validstr[128];
-   if (now == validuntil ||
-       strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC",
-                gmtime(&validuntil)) == 0)
-   {
-      validstr[0] = '\0';
-   }
-
-   setlocale(LC_TIME, "");
 
    map<string,string> Fields;
    Fields["Origin"] = "";
@@ -1004,12 +1010,16 @@ ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) :
    Fields["Suite"] = "";
    Fields["Version"] = "";
    Fields["Codename"] = "";
-   Fields["Date"] = datestr;
-   Fields["Valid-Until"] = validstr;
+   Fields["Date"] = formatUTCDateTime(now);
+   if (validuntil != now)
+      Fields["Valid-Until"] = formatUTCDateTime(validuntil);
    Fields["Architectures"] = "";
    Fields["Components"] = "";
    Fields["Description"] = "";
-
+   Fields["Signed-By"] = "";
+   if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
+      Fields["Acquire-By-Hash"] = "true";
+   
    for(map<string,string>::const_iterator I = Fields.begin();
        I != Fields.end();
        ++I)
@@ -1061,6 +1071,33 @@ bool ReleaseWriter::DoPackage(string FileName)
    CheckSums[NewFileName].Hashes = hs.GetHashStringList();
    fd.Close();
 
+   // FIXME: wrong layer in the code(?)
+   // FIXME2: symlink instead of create a copy
+   if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
+   {
+      std::string Input = FileName;
+      HashStringList hsl = hs.GetHashStringList();
+      for(HashStringList::const_iterator h = hsl.begin();
+          h != hsl.end(); ++h)
+      {
+         if (!h->usable())
+            continue;
+         if (flNotDir(FileName) == "Release" || flNotDir(FileName) == "InRelease")
+            continue;
+
+         std::string ByHashOutputFile = GenByHashFilename(Input, *h);
+         std::string ByHashOutputDir = flNotFile(ByHashOutputFile);
+         if(!CreateDirectory(flNotFile(Input), ByHashOutputDir))
+            return _error->Warning("can not create dir %s", flNotFile(ByHashOutputFile).c_str());
+
+         // write new hashes
+         FileFd In(Input, FileFd::ReadOnly);
+         FileFd Out(ByHashOutputFile, FileFd::WriteEmpty);
+         if(!CopyFile(In, Out))
+            return _error->Warning("failed to copy %s %s", Input.c_str(), ByHashOutputFile.c_str());
+      }
+   }
+
    return true;
 }
 
@@ -1098,4 +1135,40 @@ void ReleaseWriter::Finish()
       printChecksumTypeRecord(*Output, "SHA256", CheckSums);
    if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
       printChecksumTypeRecord(*Output, "SHA512", CheckSums);
+
+   // go by-hash cleanup
+   map<string,ReleaseWriter::CheckSum>::const_iterator prev = CheckSums.begin();
+   if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
+   {
+      for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
+        I != CheckSums.end(); ++I)
+      {
+         if (I->first == "Release" || I->first == "InRelease")
+            continue;
+
+         // keep iterating until we find a new subdir
+         if(flNotFile(I->first) == flNotFile(prev->first))
+            continue;
+
+         // clean that subdir up
+         int keepFiles = _config->FindI("APT::FTPArchive::By-Hash-Keep", 3);
+         // calculate how many compressors are used (the amount of files
+         // in that subdir generated for this run)
+         keepFiles *= std::distance(prev, I);
+         prev = I;
+
+         HashStringList hsl = prev->second.Hashes;
+         for(HashStringList::const_iterator h = hsl.begin();
+             h != hsl.end(); ++h)
+         {
+
+            if (!h->usable())
+               continue;
+
+            std::string RealFilename = DirStrip+"/"+prev->first;
+            std::string ByHashOutputFile = GenByHashFilename(RealFilename, *h);
+            DeleteAllButMostRecent(flNotFile(ByHashOutputFile), keepFiles);
+         }
+      }
+   }
 }