#include <unistd.h>
#include <ctime>
#include <iostream>
+#include <iomanip>
#include <sstream>
#include <memory>
#include <utility>
+#include <algorithm>
#include "apt-ftparchive.h"
#include "writer.h"
#include "cachedb.h"
#include "multicompress.h"
+#include "byhash.h"
#include <apti18n.h>
/*}}}*/
// ConfigToDoHashes - which hashes to generate /*{{{*/
static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
{
- if (_config->FindB(Conf, true) == true)
+ if (_config->FindB(Conf, (DoHashes & Flag) == Flag) == true)
DoHashes |= Flag;
else
DoHashes &= ~Flag;
/*}}}*/
// FTWScanner::FTWScanner - Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch): Arch(Arch), DoHashes(~0)
+FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch, bool const IncludeArchAll)
+ : Arch(Arch), IncludeArchAll(IncludeArchAll), DoHashes(~0)
{
if (GivenOutput == NULL)
{
Output = new FileFd;
+ OwnsOutput = true;
Output->OpenDescriptor(STDOUT_FILENO, FileFd::WriteOnly, false);
}
else
+ {
Output = GivenOutput;
+ OwnsOutput = false;
+ }
ErrorPrinted = false;
NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
ConfigToDoHashes(DoHashes, "APT::FTPArchive");
}
/*}}}*/
+FTWScanner::~FTWScanner()
+{
+ if (Output != NULL && OwnsOutput)
+ delete Output;
+}
// FTWScanner::Scanner - FTW Scanner /*{{{*/
// ---------------------------------------------------------------------
/* This is the FTW scanner, it processes each directory element in the
_error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
else
{
- if (unlink(OriginalPath) != 0)
- _error->Errno("unlink",_("Failed to unlink %s"),OriginalPath);
- else
+ if (RemoveFile("FTWScanner::Delink", OriginalPath))
{
if (link(FileName.c_str(),OriginalPath) != 0)
{
FileName = OriginalPath;
}
+ return true;
+}
+ /*}}}*/
+// FTWScanner::SetExts - Set extensions to support /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::SetExts(string const &Vals)
+{
+ ClearPatterns();
+ string::size_type Start = 0;
+ while (Start <= Vals.length()-1)
+ {
+ string::size_type const Space = Vals.find(' ',Start);
+ string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
+ if ( Arch.empty() == false )
+ {
+ AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
+ if (IncludeArchAll == true && Arch != "all")
+ AddPattern(string("*_all") + Vals.substr(Start, Length));
+ }
+ else
+ AddPattern(string("*") + Vals.substr(Start, Length));
+
+ Start += Length + 1;
+ }
+
return true;
}
/*}}}*/
// PackagesWriter::PackagesWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-PackagesWriter::PackagesWriter(FileFd * const GivenOutput, string const &DB,string const &Overrides,string const &ExtOverrides,
- string const &Arch) :
- FTWScanner(GivenOutput, Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL)
+PackagesWriter::PackagesWriter(FileFd * const GivenOutput, TranslationWriter * const transWriter,
+ string const &DB,string const &Overrides,string const &ExtOverrides,
+ string const &Arch, bool const IncludeArchAll) :
+ FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats), TransWriter(transWriter)
{
SetExts(".deb .udeb");
DeLinkLimit = 0;
_error->DumpErrors();
}
/*}}}*/
-// FTWScanner::SetExts - Set extensions to support /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool FTWScanner::SetExts(string const &Vals)
-{
- ClearPatterns();
- string::size_type Start = 0;
- while (Start <= Vals.length()-1)
- {
- string::size_type const Space = Vals.find(' ',Start);
- string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
- if ( Arch.empty() == false )
- {
- AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
- AddPattern(string("*_all") + Vals.substr(Start, Length));
- }
- else
- AddPattern(string("*") + Vals.substr(Start, Length));
-
- Start += Length + 1;
- }
-
- return true;
-}
-
- /*}}}*/
// PackagesWriter::DoPackage - Process a single package /*{{{*/
// ---------------------------------------------------------------------
/* This method takes a package and gets its control information and
Architecture = Arch;
else
Architecture = Tags.FindS("Architecture");
- auto_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
+ unique_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
if (Package.empty() == true)
return _error->Error(_("Archive had no package field"));
ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str());
}
- OverItem = auto_ptr<Override::Item>(new Override::Item);
+ OverItem = unique_ptr<Override::Item>(new Override::Item);
OverItem->FieldOverride["Section"] = Tags.FindS("Section");
OverItem->Priority = Tags.FindS("Priority");
}
return Db.Finish();
}
/*}}}*/
+PackagesWriter::~PackagesWriter() /*{{{*/
+{
+}
+ /*}}}*/
// TranslationWriter::TranslationWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* Create a Translation-Master file for this Packages file */
TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
- mode_t const &Permissions) : RefCounter(0)
+ mode_t const &Permissions) : Comp(NULL), Output(NULL)
{
if (File.empty() == true)
return;
/* */
TranslationWriter::~TranslationWriter()
{
- if (Comp == NULL)
- return;
-
- delete Comp;
+ if (Comp != NULL)
+ delete Comp;
}
/*}}}*/
// SourcesWriter::DoPackage - Process a single package /*{{{*/
static std::string getDscHash(unsigned int const DoHashes,
Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
- HashString const * const Hash, unsigned long long Size, std::string FileName)
+ HashString const * const Hash, unsigned long long Size, std::string const &FileName)
{
if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
return "";
std::ostringstream out;
- out << "\n " << Hash->HashValue() << " " << Size << " " << FileName
+ out << "\n " << Hash->HashValue() << " " << std::to_string(Size) << " " << FileName
<< "\n " << Tags.FindS(FieldName);
return out.str();
}
string BestPrio;
string Bins = Tags.FindS("Binary");
char Buffer[Bins.length() + 1];
- auto_ptr<Override::Item> OverItem(0);
+ unique_ptr<Override::Item> OverItem(nullptr);
if (Bins.empty() == false)
{
strcpy(Buffer,Bins.c_str());
unsigned char BestPrioV = pkgCache::State::Extra;
for (unsigned I = 0; BinList[I] != 0; I++)
{
- auto_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
+ unique_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
if (Itm.get() == 0)
continue;
}
if (OverItem.get() == 0)
- OverItem = Itm;
+ OverItem = std::move(Itm);
}
}
ioprintf(c1out, _(" %s has no override entry\n"), Tags.FindS("Source").c_str());
}
- OverItem = auto_ptr<Override::Item>(new Override::Item);
+ OverItem.reset(new Override::Item);
}
struct stat St;
if (stat(FileName.c_str(), &St) != 0)
return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
- auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
- // const auto_ptr<Override::Item> autoSOverItem(SOverItem);
+ unique_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
+ // const unique_ptr<Override::Item> autoSOverItem(SOverItem);
if (SOverItem.get() == 0)
{
ioprintf(c1out, _(" %s has no source override entry\n"), Tags.FindS("Source").c_str());
- SOverItem = auto_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
+ SOverItem = unique_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
if (SOverItem.get() == 0)
{
ioprintf(c1out, _(" %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
- SOverItem = auto_ptr<Override::Item>(new Override::Item);
+ SOverItem = unique_ptr<Override::Item>(new Override::Item);
*SOverItem = *OverItem;
}
}
if (Tags.Exists(fieldname) == true)
continue;
std::ostringstream streamout;
- streamout << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk;
+ streamout << "\n " << hs->HashValue() << " " << std::to_string(Db.GetFileSize()) << " " << ParseJnk;
out->append(streamout.str());
}
// ContentsWriter::ContentsWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB, string const &Arch) :
- FTWScanner(GivenOutput, Arch), Db(DB), Stats(Db.Stats)
+ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB,
+ string const &Arch, bool const IncludeArchAll) :
+ FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats)
{
SetExts(".deb");
// ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
+static std::string formatUTCDateTime(time_t const now)
+{
+ bool const NumericTimezone = _config->FindB("APT::FTPArchive::Release::NumericTimezone", true);
+ // TimeRFC1123 uses GMT to satisfy HTTP/1.1
+ std::string datetime = TimeRFC1123(now, NumericTimezone);
+ if (NumericTimezone == false)
+ {
+ auto const lastspace = datetime.rfind(' ');
+ if (likely(lastspace != std::string::npos))
+ datetime.replace(lastspace + 1, 3, "UTC");
+ }
+ return datetime;
+}
ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) : FTWScanner(GivenOutput)
{
if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
{
AddPattern("Packages");
- AddPattern("Packages.gz");
- AddPattern("Packages.bz2");
- AddPattern("Packages.lzma");
- AddPattern("Packages.xz");
+ AddPattern("Packages.*");
AddPattern("Translation-*");
AddPattern("Sources");
- AddPattern("Sources.gz");
- AddPattern("Sources.bz2");
- AddPattern("Sources.lzma");
- AddPattern("Sources.xz");
+ AddPattern("Sources.*");
AddPattern("Release");
AddPattern("Contents-*");
AddPattern("Index");
+ AddPattern("Index.*");
+ AddPattern("icons-*.tar");
+ AddPattern("icons-*.tar.*");
+ AddPattern("Components-*.yml");
+ AddPattern("Components-*.yml.*");
AddPattern("md5sum.txt");
}
AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
time_t const now = time(NULL);
-
- setlocale(LC_TIME, "C");
-
- char datestr[128];
- if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
- gmtime(&now)) == 0)
- {
- datestr[0] = '\0';
- }
-
time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
- char validstr[128];
- if (now == validuntil ||
- strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC",
- gmtime(&validuntil)) == 0)
- {
- validstr[0] = '\0';
- }
-
- setlocale(LC_TIME, "");
+ map<string,bool> BoolFields;
map<string,string> Fields;
Fields["Origin"] = "";
Fields["Label"] = "";
Fields["Suite"] = "";
Fields["Version"] = "";
Fields["Codename"] = "";
- Fields["Date"] = datestr;
- Fields["Valid-Until"] = validstr;
+ Fields["Date"] = formatUTCDateTime(now);
+ if (validuntil != now)
+ Fields["Valid-Until"] = formatUTCDateTime(validuntil);
Fields["Architectures"] = "";
Fields["Components"] = "";
Fields["Description"] = "";
+ Fields["Signed-By"] = "";
+ BoolFields["Acquire-By-Hash"] = _config->FindB("APT::FTPArchive::DoByHash", false);
+ BoolFields["NotAutomatic"] = false;
+ BoolFields["ButAutomaticUpgrades"] = false;
- for(map<string,string>::const_iterator I = Fields.begin();
- I != Fields.end();
- ++I)
+ // Read configuration for string fields, but don't output them
+ for (auto &&I : Fields)
{
- string Config = string("APT::FTPArchive::Release::") + (*I).first;
- string Value = _config->Find(Config, (*I).second.c_str());
- if (Value == "")
- continue;
+ string Config = string("APT::FTPArchive::Release::") + I.first;
+ I.second = _config->Find(Config, I.second);
+ }
+
+ // Read configuration for bool fields, and add them to Fields if true
+ for (auto &&I : BoolFields)
+ {
+ string Config = string("APT::FTPArchive::Release::") + I.first;
+ I.second = _config->FindB(Config, I.second);
+ if (I.second)
+ Fields[I.first] = "yes";
+ }
- std::string const out = I->first + ": " + Value + "\n";
+ // All configuration read and stored in Fields; output
+ for (auto &&I : Fields)
+ {
+ if (I.second.empty())
+ continue;
+ std::string const out = I.first + ": " + I.second + "\n";
Output->Write(out.c_str(), out.length());
}
CheckSums[NewFileName].Hashes = hs.GetHashStringList();
fd.Close();
+ // FIXME: wrong layer in the code(?)
+ // FIXME2: symlink instead of create a copy
+ if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
+ {
+ std::string Input = FileName;
+ HashStringList hsl = hs.GetHashStringList();
+ for(HashStringList::const_iterator h = hsl.begin();
+ h != hsl.end(); ++h)
+ {
+ if (!h->usable())
+ continue;
+ if (flNotDir(FileName) == "Release" || flNotDir(FileName) == "InRelease")
+ continue;
+
+ std::string ByHashOutputFile = GenByHashFilename(Input, *h);
+ std::string ByHashOutputDir = flNotFile(ByHashOutputFile);
+ if(!CreateDirectory(flNotFile(Input), ByHashOutputDir))
+ return _error->Warning("can not create dir %s", flNotFile(ByHashOutputFile).c_str());
+
+ // write new hashes
+ FileFd In(Input, FileFd::ReadOnly);
+ FileFd Out(ByHashOutputFile, FileFd::WriteEmpty);
+ if(!CopyFile(In, Out))
+ return _error->Warning("failed to copy %s %s", Input.c_str(), ByHashOutputFile.c_str());
+ }
+ }
+
return true;
}
printChecksumTypeRecord(*Output, "SHA256", CheckSums);
if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
printChecksumTypeRecord(*Output, "SHA512", CheckSums);
+
+ // go by-hash cleanup
+ map<string,ReleaseWriter::CheckSum>::const_iterator prev = CheckSums.begin();
+ if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
+ {
+ for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
+ I != CheckSums.end(); ++I)
+ {
+ if (I->first == "Release" || I->first == "InRelease")
+ continue;
+
+ // keep iterating until we find a new subdir
+ if(flNotFile(I->first) == flNotFile(prev->first))
+ continue;
+
+ // clean that subdir up
+ int keepFiles = _config->FindI("APT::FTPArchive::By-Hash-Keep", 3);
+ // calculate how many compressors are used (the amount of files
+ // in that subdir generated for this run)
+ keepFiles *= std::distance(prev, I);
+ prev = I;
+
+ HashStringList hsl = prev->second.Hashes;
+ for(HashStringList::const_iterator h = hsl.begin();
+ h != hsl.end(); ++h)
+ {
+
+ if (!h->usable())
+ continue;
+
+ std::string RealFilename = DirStrip+"/"+prev->first;
+ std::string ByHashOutputFile = GenByHashFilename(RealFilename, *h);
+ DeleteAllButMostRecent(flNotFile(ByHashOutputFile), keepFiles);
+ }
+ }
+ }
}