summaryrefslogtreecommitdiffstats
path: root/ftparchive
diff options
context:
space:
mode:
Diffstat (limited to 'ftparchive')
-rw-r--r--ftparchive/CMakeLists.txt13
-rw-r--r--ftparchive/apt-ftparchive.cc1048
-rw-r--r--ftparchive/apt-ftparchive.h26
-rw-r--r--ftparchive/byhash.cc61
-rw-r--r--ftparchive/byhash.h25
-rw-r--r--ftparchive/cachedb.cc579
-rw-r--r--ftparchive/cachedb.h195
-rw-r--r--ftparchive/contents.cc409
-rw-r--r--ftparchive/contents.h92
-rw-r--r--ftparchive/multicompress.cc360
-rw-r--r--ftparchive/multicompress.h62
-rw-r--r--ftparchive/override.cc288
-rw-r--r--ftparchive/override.h48
-rw-r--r--ftparchive/sources.cc63
-rw-r--r--ftparchive/sources.h24
-rw-r--r--ftparchive/writer.cc1181
-rw-r--r--ftparchive/writer.h208
17 files changed, 4682 insertions, 0 deletions
diff --git a/ftparchive/CMakeLists.txt b/ftparchive/CMakeLists.txt
new file mode 100644
index 0000000..4af2504
--- /dev/null
+++ b/ftparchive/CMakeLists.txt
@@ -0,0 +1,13 @@
+include_directories(${BERKELEY_INCLUDE_DIRS})
+# Definition of the C++ files used to build the program - note that this
+# is expanded at CMake time, so you have to rerun cmake if you add or remove
+# a file (you can just run cmake . in the build directory)
+file(GLOB_RECURSE source "*.cc")
+add_executable(apt-ftparchive ${source})
+
+# Link the executables against the libraries
+target_include_directories(apt-ftparchive PRIVATE ${BERKELEY_INCLUDE_DIRS})
+target_link_libraries(apt-ftparchive apt-pkg apt-private ${BERKELEY_LIBRARIES})
+
+# Install the executables
+install(TARGETS apt-ftparchive RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc
new file mode 100644
index 0000000..0f65872
--- /dev/null
+++ b/ftparchive/apt-ftparchive.cc
@@ -0,0 +1,1048 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ apt-ftparchive - Efficient work-alike for dpkg-scanpackages
+
+ Let contents be disabled from the conf
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/cmndline.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/init.h>
+#include <apt-pkg/strutl.h>
+
+#include <apt-private/private-cmndline.h>
+#include <apt-private/private-main.h>
+#include <apt-private/private-output.h>
+
+#include <algorithm>
+#include <chrono>
+#include <climits>
+#include <functional>
+#include <iostream>
+#include <string>
+#include <vector>
+#include <locale.h>
+#include <stdio.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <time.h>
+#include <math.h>
+
+#include "apt-ftparchive.h"
+#include "cachedb.h"
+#include "multicompress.h"
+#include "override.h"
+#include "writer.h"
+
+#include <apti18n.h>
+ /*}}}*/
+
+using namespace std;
+unsigned Quiet = 0;
+
+static struct timeval GetTimevalFromSteadyClock() /*{{{*/
+{
+ auto const Time = std::chrono::steady_clock::now().time_since_epoch();
+ auto const Time_sec = std::chrono::duration_cast<std::chrono::seconds>(Time);
+ auto const Time_usec = std::chrono::duration_cast<std::chrono::microseconds>(Time - Time_sec);
+ return { static_cast<time_t>(Time_sec.count()), static_cast<suseconds_t>(Time_usec.count()) };
+}
+ /*}}}*/
+static auto GetTimeDeltaSince(struct timeval StartTime) /*{{{*/
+{
+ auto const NewTime = GetTimevalFromSteadyClock();
+ std::chrono::duration<double> Delta =
+ std::chrono::seconds(NewTime.tv_sec - StartTime.tv_sec) +
+ std::chrono::microseconds(NewTime.tv_usec - StartTime.tv_usec);
+ return llround(Delta.count());
+}
+ /*}}}*/
+
+// struct PackageMap - List of all package files in the config file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+struct PackageMap
+{
+ // General Stuff
+ string BaseDir;
+ string InternalPrefix;
+ string FLFile;
+ string PkgExt;
+ string SrcExt;
+
+ // Stuff for the Package File
+ string PkgFile;
+ string BinCacheDB;
+ string SrcCacheDB;
+ string BinOverride;
+ string ExtraOverride;
+
+ // We generate for this given arch
+ string Arch;
+ bool IncludeArchAll;
+
+ // Stuff for the Source File
+ string SrcFile;
+ string SrcOverride;
+ string SrcExtraOverride;
+
+ // Translation master file
+ bool LongDesc;
+ TranslationWriter *TransWriter;
+
+ // Contents
+ string Contents;
+ string ContentsHead;
+
+ // Random things
+ string Tag;
+ string PkgCompress;
+ string CntCompress;
+ string SrcCompress;
+ string PathPrefix;
+ unsigned int DeLinkLimit;
+ mode_t Permissions;
+
+ bool ContentsDone;
+ bool PkgDone;
+ bool SrcDone;
+ time_t ContentsMTime;
+
+ struct ContentsCompare
+ {
+ inline bool operator() (const PackageMap &x,const PackageMap &y)
+ {return x.ContentsMTime < y.ContentsMTime;};
+ };
+
+ struct DBCompare
+ {
+ inline bool operator() (const PackageMap &x,const PackageMap &y)
+ {return x.BinCacheDB < y.BinCacheDB;};
+ };
+
+ struct SrcDBCompare
+ {
+ inline bool operator() (const PackageMap &x,const PackageMap &y)
+ {return x.SrcCacheDB < y.SrcCacheDB;};
+ };
+
+ void GetGeneral(Configuration &Setup,Configuration &Block);
+ bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats);
+ bool GenSources(Configuration &Setup,struct CacheDB::Stats &Stats);
+ bool GenContents(Configuration &Setup,
+ vector<PackageMap>::iterator Begin,
+ vector<PackageMap>::iterator End,
+ unsigned long &Left);
+
+ PackageMap() : IncludeArchAll(true), LongDesc(true), TransWriter(NULL),
+ DeLinkLimit(0), Permissions(1), ContentsDone(false),
+ PkgDone(false), SrcDone(false), ContentsMTime(0) {};
+};
+ /*}}}*/
+
+// PackageMap::GetGeneral - Common per-section definitions /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block)
+{
+ PathPrefix = Block.Find("PathPrefix");
+
+ if (Block.FindB("External-Links",true) == false)
+ DeLinkLimit = Setup.FindI("Default::DeLinkLimit", std::numeric_limits<unsigned int>::max());
+ else
+ DeLinkLimit = 0;
+
+ PkgCompress = Block.Find("Packages::Compress",
+ Setup.Find("Default::Packages::Compress",". gzip").c_str());
+ CntCompress = Block.Find("Contents::Compress",
+ Setup.Find("Default::Contents::Compress",". gzip").c_str());
+ SrcCompress = Block.Find("Sources::Compress",
+ Setup.Find("Default::Sources::Compress",". gzip").c_str());
+
+ SrcExt = Block.Find("Sources::Extensions",
+ Setup.Find("Default::Sources::Extensions",".dsc").c_str());
+ PkgExt = Block.Find("Packages::Extensions",
+ Setup.Find("Default::Packages::Extensions",".deb").c_str());
+
+ Permissions = Setup.FindI("Default::FileMode",0644);
+
+ if (FLFile.empty() == false)
+ FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile);
+
+ if (Contents == " ")
+ Contents= string();
+}
+ /*}}}*/
+// PackageMap::GenPackages - Actually generate a Package file /*{{{*/
+// ---------------------------------------------------------------------
+/* This generates the Package File described by this object. */
+bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats)
+{
+ if (PkgFile.empty() == true)
+ return true;
+
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ string OverrideDir = Setup.FindDir("Dir::OverrideDir");
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
+ struct timeval StartTime = GetTimevalFromSteadyClock();
+
+ PkgDone = true;
+
+ // Create a package writer object.
+ MultiCompress Comp(flCombine(ArchiveDir,PkgFile),
+ PkgCompress,Permissions);
+ PackagesWriter Packages(&Comp.Input, TransWriter, flCombine(CacheDir,BinCacheDB),
+ flCombine(OverrideDir,BinOverride),
+ flCombine(OverrideDir,ExtraOverride),
+ Arch, IncludeArchAll);
+ if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false)
+ return _error->Error(_("Package extension list is too long"));
+ if (_error->PendingError() == true)
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
+
+ Packages.PathPrefix = PathPrefix;
+ Packages.DirStrip = ArchiveDir;
+ Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
+
+ Packages.LongDescription = LongDesc;
+
+ Packages.Stats.DeLinkBytes = Stats.DeLinkBytes;
+ Packages.DeLinkLimit = DeLinkLimit;
+
+ if (_error->PendingError() == true)
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
+
+ c0out << ' ' << BaseDir << ":" << flush;
+
+ // Do recursive directory searching
+ if (FLFile.empty() == true)
+ {
+ if (Packages.RecursiveScan(flCombine(ArchiveDir,BaseDir)) == false)
+ return false;
+ }
+ else
+ {
+ if (Packages.LoadFileList(ArchiveDir,FLFile) == false)
+ return false;
+ }
+
+ Packages.Output = 0; // Just in case
+
+ // Finish compressing
+ unsigned long long Size;
+ if (Comp.Finalize(Size) == false)
+ {
+ c0out << endl;
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
+ }
+
+ if (Size != 0)
+ c0out << " New "
+ << SizeToStr(Size) << "B ";
+ else
+ c0out << ' ';
+
+ c0out << Packages.Stats.Packages << " files " <<
+/* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */
+ SizeToStr(Packages.Stats.Bytes) << "B " <<
+ TimeToStr(GetTimeDeltaSince(StartTime)) << endl;
+
+ if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true)
+ c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl;
+
+ Stats.Add(Packages.Stats);
+ Stats.DeLinkBytes = Packages.Stats.DeLinkBytes;
+
+ return !_error->PendingError();
+}
+
+ /*}}}*/
+// PackageMap::GenSources - Actually generate a Source file /*{{{*/
+// ---------------------------------------------------------------------
+/* This generates the Sources File described by this object. */
+bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
+{
+ if (SrcFile.empty() == true)
+ return true;
+
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ string OverrideDir = Setup.FindDir("Dir::OverrideDir");
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
+ struct timeval StartTime = GetTimevalFromSteadyClock();
+ SrcDone = true;
+
+ // Create a package writer object.
+ MultiCompress Comp(flCombine(ArchiveDir,SrcFile),
+ SrcCompress,Permissions);
+ SourcesWriter Sources(&Comp.Input, flCombine(CacheDir, SrcCacheDB),
+ flCombine(OverrideDir,BinOverride),
+ flCombine(OverrideDir,SrcOverride),
+ flCombine(OverrideDir,SrcExtraOverride));
+ if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false)
+ return _error->Error(_("Source extension list is too long"));
+ if (_error->PendingError() == true)
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
+
+ Sources.PathPrefix = PathPrefix;
+ Sources.DirStrip = ArchiveDir;
+ Sources.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
+
+ Sources.DeLinkLimit = DeLinkLimit;
+ Sources.Stats.DeLinkBytes = Stats.DeLinkBytes;
+
+ if (_error->PendingError() == true)
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
+
+ c0out << ' ' << BaseDir << ":" << flush;
+
+ // Do recursive directory searching
+ if (FLFile.empty() == true)
+ {
+ if (Sources.RecursiveScan(flCombine(ArchiveDir,BaseDir))== false)
+ return false;
+ }
+ else
+ {
+ if (Sources.LoadFileList(ArchiveDir,FLFile) == false)
+ return false;
+ }
+ Sources.Output = 0; // Just in case
+
+ // Finish compressing
+ unsigned long long Size;
+ if (Comp.Finalize(Size) == false)
+ {
+ c0out << endl;
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
+ }
+
+ if (Size != 0)
+ c0out << " New "
+ << SizeToStr(Size) << "B ";
+ else
+ c0out << ' ';
+
+ c0out << Sources.Stats.Packages << " pkgs in " <<
+ TimeToStr(GetTimeDeltaSince(StartTime)) << endl;
+
+ if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true)
+ c0out << " Misses in Cache: " << Sources.Stats.Misses << endl;
+
+ Stats.Add(Sources.Stats);
+ Stats.DeLinkBytes = Sources.Stats.DeLinkBytes;
+
+ return !_error->PendingError();
+}
+ /*}}}*/
+// PackageMap::GenContents - Actually generate a Contents file /*{{{*/
+// ---------------------------------------------------------------------
+/* This generates the contents file partially described by this object.
+ It searches the given iterator range for other package files that map
+ into this contents file and includes their data as well when building. */
+bool PackageMap::GenContents(Configuration &Setup,
+ vector<PackageMap>::iterator Begin,
+ vector<PackageMap>::iterator End,
+ unsigned long &Left)
+{
+ if (Contents.empty() == true)
+ return true;
+
+ if (Left == 0)
+ return true;
+
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+ string OverrideDir = Setup.FindDir("Dir::OverrideDir");
+
+ struct timeval StartTime = GetTimevalFromSteadyClock();
+
+ // Create a package writer object.
+ MultiCompress Comp(flCombine(ArchiveDir,this->Contents),
+ CntCompress,Permissions);
+ Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60;
+ ContentsWriter Contents(&Comp.Input, "", Arch, IncludeArchAll);
+ if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false)
+ return _error->Error(_("Package extension list is too long"));
+ if (_error->PendingError() == true)
+ return false;
+
+ if (_error->PendingError() == true)
+ return false;
+
+ // Write the header out.
+ if (ContentsHead.empty() == false)
+ {
+ FileFd Head(flCombine(OverrideDir,ContentsHead),FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ unsigned long long Size = Head.Size();
+ unsigned char Buf[4096];
+ while (Size != 0)
+ {
+ unsigned long long ToRead = Size;
+ if (Size > sizeof(Buf))
+ ToRead = sizeof(Buf);
+
+ if (Head.Read(Buf,ToRead) == false)
+ return false;
+
+ if (Comp.Input.Write(Buf, ToRead) == false)
+ return _error->Errno("fwrite",_("Error writing header to contents file"));
+
+ Size -= ToRead;
+ }
+ }
+
+ /* Go over all the package file records and parse all the package
+ files associated with this contents file into one great big honking
+ memory structure, then dump the sorted version */
+ c0out << ' ' << this->Contents << ":" << flush;
+ for (vector<PackageMap>::iterator I = Begin; I != End; ++I)
+ {
+ if (I->Contents != this->Contents)
+ continue;
+
+ Contents.Prefix = ArchiveDir;
+ Contents.ReadyDB(flCombine(CacheDir,I->BinCacheDB));
+ Contents.ReadFromPkgs(flCombine(ArchiveDir,I->PkgFile),
+ I->PkgCompress);
+
+ I->ContentsDone = true;
+ }
+
+ Contents.Finish();
+
+ // Finish compressing
+ unsigned long long Size;
+ if (Comp.Finalize(Size) == false || _error->PendingError() == true)
+ {
+ c0out << endl;
+ return _error->Error(_("Error processing contents %s"),
+ this->Contents.c_str());
+ }
+
+ if (Size != 0)
+ {
+ c0out << " New " << SizeToStr(Size) << "B ";
+ if (Left > Size)
+ Left -= Size;
+ else
+ Left = 0;
+ }
+ else
+ c0out << ' ';
+
+ if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true)
+ c0out << " Misses in Cache: " << Contents.Stats.Misses<< endl;
+
+ c0out << Contents.Stats.Packages << " files " <<
+ SizeToStr(Contents.Stats.Bytes) << "B " <<
+ TimeToStr(GetTimeDeltaSince(StartTime)) << endl;
+
+ return true;
+}
+ /*}}}*/
+
+// LoadTree - Load a 'tree' section from the Generate Config /*{{{*/
+// ---------------------------------------------------------------------
+/* This populates the PkgList with all the possible permutations of the
+ section/arch lists. */
+static void LoadTree(vector<PackageMap> &PkgList, std::vector<TranslationWriter*> &TransList, Configuration &Setup)
+{
+ // Load the defaults
+ string DDir = Setup.Find("TreeDefault::Directory",
+ "$(DIST)/$(SECTION)/binary-$(ARCH)/");
+ string DSDir = Setup.Find("TreeDefault::SrcDirectory",
+ "$(DIST)/$(SECTION)/source/");
+ string DPkg = Setup.Find("TreeDefault::Packages",
+ "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages");
+ string DTrans = Setup.Find("TreeDefault::Translation",
+ "$(DIST)/$(SECTION)/i18n/Translation-en");
+ string DIPrfx = Setup.Find("TreeDefault::InternalPrefix",
+ "$(DIST)/$(SECTION)/");
+ string DContents = Setup.Find("TreeDefault::Contents",
+ "$(DIST)/$(SECTION)/Contents-$(ARCH)");
+ string DContentsH = Setup.Find("TreeDefault::Contents::Header","");
+ string DBCache = Setup.Find("TreeDefault::BinCacheDB",
+ "packages-$(ARCH).db");
+ string SrcDBCache = Setup.Find("TreeDefault::SrcCacheDB",
+ "sources-$(SECTION).db");
+ string DSources = Setup.Find("TreeDefault::Sources",
+ "$(DIST)/$(SECTION)/source/Sources");
+ string DFLFile = Setup.Find("TreeDefault::FileList", "");
+ string DSFLFile = Setup.Find("TreeDefault::SourceFileList", "");
+
+ mode_t const Permissions = Setup.FindI("Default::FileMode",0644);
+
+ bool const LongDescription = Setup.FindB("Default::LongDescription",
+ _config->FindB("APT::FTPArchive::LongDescription", true));
+ string const TranslationCompress = Setup.Find("Default::Translation::Compress",". gzip").c_str();
+ bool const ConfIncludeArchAllExists = _config->Exists("APT::FTPArchive::IncludeArchitectureAll");
+ bool const ConfIncludeArchAll = _config->FindB("APT::FTPArchive::IncludeArchitectureAll", true);
+
+ // Process 'tree' type sections
+ const Configuration::Item *Top = Setup.Tree("tree");
+ for (Top = (Top == 0?0:Top->Child); Top != 0;)
+ {
+ Configuration Block(Top);
+ string Dist = Top->Tag;
+
+ // Parse the sections
+ string Tmp = Block.Find("Sections");
+ const char *Sections = Tmp.c_str();
+ string Section;
+ while (ParseQuoteWord(Sections,Section) == true)
+ {
+ struct SubstVar Vars[] = {{"$(DIST)",&Dist},
+ {"$(SECTION)",&Section},
+ {"$(ARCH)",nullptr},
+ {nullptr, nullptr}};
+ mode_t const Perms = Block.FindI("FileMode", Permissions);
+ bool const LongDesc = Block.FindB("LongDescription", LongDescription);
+ TranslationWriter *TransWriter = nullptr;
+
+ std::string Tmp2 = Block.Find("Architectures");
+ std::transform(Tmp2.begin(), Tmp2.end(), Tmp2.begin(), ::tolower);
+ std::vector<std::string> const Archs = VectorizeString(Tmp2, ' ');
+ bool IncludeArchAll;
+ if (ConfIncludeArchAllExists == true)
+ IncludeArchAll = ConfIncludeArchAll;
+ else
+ IncludeArchAll = std::find(Archs.begin(), Archs.end(), "all") == Archs.end();
+ for (auto const& Arch: Archs)
+ {
+ if (Arch.empty()) continue;
+ Vars[2].Contents = &Arch;
+ PackageMap Itm;
+ Itm.Permissions = Perms;
+ Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars);
+ Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars);
+
+ if (Arch == "source")
+ {
+ Itm.SrcOverride = SubstVar(Block.Find("SrcOverride"),Vars);
+ Itm.BaseDir = SubstVar(Block.Find("SrcDirectory",DSDir.c_str()),Vars);
+ Itm.SrcFile = SubstVar(Block.Find("Sources",DSources.c_str()),Vars);
+ Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars);
+ Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars);
+ Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars);
+ Itm.SrcCacheDB = SubstVar(Block.Find("SrcCacheDB",SrcDBCache.c_str()),Vars);
+ }
+ else
+ {
+ Itm.BinCacheDB = SubstVar(Block.Find("BinCacheDB",DBCache.c_str()),Vars);
+ Itm.BaseDir = SubstVar(Block.Find("Directory",DDir.c_str()),Vars);
+ Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars);
+ Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars);
+ Itm.Arch = Arch;
+ Itm.IncludeArchAll = IncludeArchAll;
+ Itm.LongDesc = LongDesc;
+ if (TransWriter == NULL && DTrans.empty() == false && LongDesc == false && DTrans != "/dev/null")
+ {
+ string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"),
+ SubstVar(Block.Find("Translation", DTrans.c_str()), Vars));
+ string const TransCompress = Block.Find("Translation::Compress", TranslationCompress);
+ TransWriter = new TranslationWriter(TranslationFile, TransCompress, Perms);
+ TransList.push_back(TransWriter);
+ }
+ Itm.TransWriter = TransWriter;
+ Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars);
+ Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars);
+ Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars);
+ Itm.ExtraOverride = SubstVar(Block.Find("ExtraOverride"),Vars);
+ }
+
+ Itm.GetGeneral(Setup,Block);
+ PkgList.push_back(Itm);
+ }
+ }
+
+ Top = Top->Next;
+ }
+}
+ /*}}}*/
+static void UnloadTree(std::vector<TranslationWriter*> const &Trans) /*{{{*/
+{
+ for (std::vector<TranslationWriter*>::const_reverse_iterator T = Trans.rbegin(); T != Trans.rend(); ++T)
+ delete *T;
+}
+ /*}}}*/
+// LoadBinDir - Load a 'bindirectory' section from the Generate Config /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+static void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup)
+{
+ mode_t const Permissions = Setup.FindI("Default::FileMode",0644);
+
+ // Process 'bindirectory' type sections
+ const Configuration::Item *Top = Setup.Tree("bindirectory");
+ for (Top = (Top == 0?0:Top->Child); Top != 0;)
+ {
+ Configuration Block(Top);
+
+ PackageMap Itm;
+ Itm.PkgFile = Block.Find("Packages");
+ Itm.SrcFile = Block.Find("Sources");
+ Itm.BinCacheDB = Block.Find("BinCacheDB");
+ Itm.SrcCacheDB = Block.Find("SrcCacheDB");
+ Itm.BinOverride = Block.Find("BinOverride");
+ Itm.ExtraOverride = Block.Find("ExtraOverride");
+ Itm.SrcExtraOverride = Block.Find("SrcExtraOverride");
+ Itm.SrcOverride = Block.Find("SrcOverride");
+ Itm.BaseDir = Top->Tag;
+ Itm.FLFile = Block.Find("FileList");
+ Itm.InternalPrefix = Block.Find("InternalPrefix",Top->Tag.c_str());
+ Itm.Contents = Block.Find("Contents");
+ Itm.ContentsHead = Block.Find("Contents::Header");
+ Itm.Permissions = Block.FindI("FileMode", Permissions);
+
+ Itm.GetGeneral(Setup,Block);
+ PkgList.push_back(Itm);
+
+ Top = Top->Next;
+ }
+}
+ /*}}}*/
+
+static bool ShowHelp(CommandLine &) /*{{{*/
+{
+ std::cout <<
+ _("Usage: apt-ftparchive [options] command\n"
+ "Commands: packages binarypath [overridefile [pathprefix]]\n"
+ " sources srcpath [overridefile [pathprefix]]\n"
+ " contents path\n"
+ " release path\n"
+ " generate config [groups]\n"
+ " clean config\n"
+ "\n"
+ "apt-ftparchive generates index files for Debian archives. It supports\n"
+ "many styles of generation from fully automated to functional replacements\n"
+ "for dpkg-scanpackages and dpkg-scansources\n"
+ "\n"
+ "apt-ftparchive generates Package files from a tree of .debs. The\n"
+ "Package file contains the contents of all the control fields from\n"
+ "each package as well as the MD5 hash and filesize. An override file\n"
+ "is supported to force the value of Priority and Section.\n"
+ "\n"
+ "Similarly apt-ftparchive generates Sources files from a tree of .dscs.\n"
+ "The --source-override option can be used to specify a src override file\n"
+ "\n"
+ "The 'packages' and 'sources' command should be run in the root of the\n"
+ "tree. BinaryPath should point to the base of the recursive search and \n"
+ "override file should contain the override flags. Pathprefix is\n"
+ "appended to the filename fields if present. Example usage from the \n"
+ "Debian archive:\n"
+ " apt-ftparchive packages dists/potato/main/binary-i386/ > \\\n"
+ " dists/potato/main/binary-i386/Packages\n"
+ "\n"
+ "Options:\n"
+ " -h This help text\n"
+ " --md5 Control MD5 generation\n"
+ " -s=? Source override file\n"
+ " -q Quiet\n"
+ " -d=? Select the optional caching database\n"
+ " --no-delink Enable delinking debug mode\n"
+ " --contents Control contents file generation\n"
+ " -c=? Read this configuration file\n"
+ " -o=? Set an arbitrary configuration option") << endl;
+ return true;
+}
+ /*}}}*/
+// SimpleGenPackages - Generate a Packages file for a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This emulates dpkg-scanpackages's command line interface. 'mostly' */
+static bool SimpleGenPackages(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ string Override;
+ if (CmdL.FileSize() >= 3)
+ Override = CmdL.FileList[2];
+
+ // Create a package writer object.
+ PackagesWriter Packages(NULL, NULL, _config->Find("APT::FTPArchive::DB"),
+ Override, "", _config->Find("APT::FTPArchive::Architecture"),
+ _config->FindB("APT::FTPArchive::IncludeArchitectureAll", true));
+ if (_error->PendingError() == true)
+ return false;
+
+ if (CmdL.FileSize() >= 4)
+ Packages.PathPrefix = CmdL.FileList[3];
+
+ // Do recursive directory searching
+ if (Packages.RecursiveScan(CmdL.FileList[1]) == false)
+ return false;
+
+ // Give some stats if asked for
+ if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true)
+ c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl;
+
+ return true;
+}
+ /*}}}*/
+// SimpleGenContents - Generate a Contents listing /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+static bool SimpleGenContents(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ // Create a package writer object.
+ ContentsWriter Contents(NULL, _config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture"));
+ if (_error->PendingError() == true)
+ return false;
+
+ // Do recursive directory searching
+ if (Contents.RecursiveScan(CmdL.FileList[1]) == false)
+ return false;
+
+ Contents.Finish();
+
+ return true;
+}
+ /*}}}*/
+// SimpleGenSources - Generate a Sources file for a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This emulates dpkg-scanpackages's command line interface. 'mostly' */
+static bool SimpleGenSources(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ string Override;
+ if (CmdL.FileSize() >= 3)
+ Override = CmdL.FileList[2];
+
+ string SOverride;
+ if (Override.empty() == false)
+ SOverride = Override + ".src";
+
+ SOverride = _config->Find("APT::FTPArchive::SourceOverride",
+ SOverride.c_str());
+
+ // Create a package writer object.
+ SourcesWriter Sources(NULL, _config->Find("APT::FTPArchive::DB"),Override,SOverride);
+ if (_error->PendingError() == true)
+ return false;
+
+ if (CmdL.FileSize() >= 4)
+ Sources.PathPrefix = CmdL.FileList[3];
+
+ // Do recursive directory searching
+ if (Sources.RecursiveScan(CmdL.FileList[1]) == false)
+ return false;
+
+ // Give some stats if asked for
+ if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true)
+ c0out << " Misses in Cache: " << Sources.Stats.Misses<< endl;
+
+ return true;
+}
+ /*}}}*/
+// SimpleGenRelease - Generate a Release file for a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+static bool SimpleGenRelease(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ string Dir = CmdL.FileList[1];
+
+ ReleaseWriter Release(NULL, "");
+ Release.DirStrip = Dir;
+
+ if (_error->PendingError() == true)
+ return false;
+
+ if (Release.RecursiveScan(Dir) == false)
+ return false;
+
+ Release.Finish();
+
+ return true;
+}
+
+ /*}}}*/
+// DoGeneratePackagesAndSources - Helper for Generate /*{{{*/
+// ---------------------------------------------------------------------
+static bool DoGeneratePackagesAndSources(Configuration &Setup,
+ vector<PackageMap> &PkgList,
+ struct CacheDB::Stats &SrcStats,
+ struct CacheDB::Stats &Stats,
+ CommandLine &CmdL)
+{
+ if (CmdL.FileSize() <= 2)
+ {
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
+ if (I->GenPackages(Setup,Stats) == false)
+ _error->DumpErrors();
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
+ if (I->GenSources(Setup,SrcStats) == false)
+ _error->DumpErrors();
+ }
+ else
+ {
+ // Make a choice list out of the package list..
+ RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1];
+ RxChoiceList *End = List;
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
+ {
+ End->UserData = &(*I);
+ End->Str = I->BaseDir.c_str();
+ End++;
+
+ End->UserData = &(*I);
+ End->Str = I->Tag.c_str();
+ End++;
+ }
+ End->Str = 0;
+
+ // Regex it
+ if (RegexChoice(List,CmdL.FileList + 2,CmdL.FileList + CmdL.FileSize()) == 0)
+ {
+ delete [] List;
+ return _error->Error(_("No selections matched"));
+ }
+ _error->DumpErrors();
+
+ // Do the generation for Packages
+ for (End = List; End->Str != 0; ++End)
+ {
+ if (End->Hit == false)
+ continue;
+
+ PackageMap * const I = static_cast<PackageMap *>(End->UserData);
+ if (I->PkgDone == true)
+ continue;
+ if (I->GenPackages(Setup,Stats) == false)
+ _error->DumpErrors();
+ }
+
+ // Do the generation for Sources
+ for (End = List; End->Str != 0; ++End)
+ {
+ if (End->Hit == false)
+ continue;
+
+ PackageMap * const I = static_cast<PackageMap *>(End->UserData);
+ if (I->SrcDone == true)
+ continue;
+ if (I->GenSources(Setup,SrcStats) == false)
+ _error->DumpErrors();
+ }
+
+ delete [] List;
+ }
+ return true;
+}
+
+ /*}}}*/
+// DoGenerateContents - Helper for Generate to generate the Contents /*{{{*/
+// ---------------------------------------------------------------------
+static bool DoGenerateContents(Configuration &Setup,
+ vector<PackageMap> &PkgList,
+ CommandLine &CmdL)
+{
+ c1out << "Packages done, Starting contents." << endl;
+
+ // Sort the contents file list by date
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
+ {
+ struct stat A;
+ if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),
+ I->CntCompress,A) == false)
+ time(&I->ContentsMTime);
+ else
+ I->ContentsMTime = A.st_mtime;
+ }
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::ContentsCompare());
+
+ /* Now for Contents.. The process here is to do a make-like dependency
+ check. Each contents file is verified to be newer than the package files
+ that describe the debs it indexes. Since the package files contain
+ hashes of the .debs this means they have not changed either so the
+ contents must be up to date. */
+ unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",
+ std::numeric_limits<unsigned int>::max())*1024;
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
+ {
+ // This record is not relevant
+ if (I->ContentsDone == true ||
+ I->Contents.empty() == true)
+ continue;
+
+ // Do not do everything if the user specified sections.
+ if (CmdL.FileSize() > 2 && I->PkgDone == false)
+ continue;
+
+ struct stat A,B;
+ if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),I->CntCompress,A) == true)
+ {
+ if (MultiCompress::GetStat(flCombine(ArchiveDir,I->PkgFile),I->PkgCompress,B) == false)
+ {
+ _error->Warning(_("Some files are missing in the package file group `%s'"),I->PkgFile.c_str());
+ continue;
+ }
+
+ if (A.st_mtime > B.st_mtime)
+ continue;
+ }
+
+ if (I->GenContents(Setup,PkgList.begin(),PkgList.end(),
+ MaxContentsChange) == false)
+ _error->DumpErrors();
+
+ // Hit the limit?
+ if (MaxContentsChange == 0)
+ {
+ c1out << "Hit contents update byte limit" << endl;
+ break;
+ }
+ }
+
+ return true;
+}
+
+ /*}}}*/
+// Generate - Full generate, using a config file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+static bool Generate(CommandLine &CmdL)
+{
+ struct CacheDB::Stats SrcStats;
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ struct timeval StartTime = GetTimevalFromSteadyClock();
+ struct CacheDB::Stats Stats;
+
+ // Read the configuration file.
+ Configuration Setup;
+ if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false)
+ return false;
+
+ vector<PackageMap> PkgList;
+ std::vector<TranslationWriter*> TransList;
+ LoadTree(PkgList, TransList, Setup);
+ LoadBinDir(PkgList,Setup);
+
+ // Sort by cache DB to improve IO locality.
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare());
+
+ // Generate packages
+ if (_config->FindB("APT::FTPArchive::ContentsOnly", false) == false)
+ {
+ if(DoGeneratePackagesAndSources(Setup, PkgList, SrcStats, Stats, CmdL) == false)
+ {
+ UnloadTree(TransList);
+ return false;
+ }
+ } else {
+ c1out << "Skipping Packages/Sources generation" << endl;
+ }
+
+ // do Contents if needed
+ if (_config->FindB("APT::FTPArchive::Contents", true) == true)
+ if (DoGenerateContents(Setup, PkgList, CmdL) == false)
+ {
+ UnloadTree(TransList);
+ return false;
+ }
+
+ c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages
+ << " archives. Took " << TimeToStr(GetTimeDeltaSince(StartTime)) << endl;
+
+ UnloadTree(TransList);
+ return true;
+}
+
+ /*}}}*/
+// Clean - Clean out the databases /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+static bool Clean(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() != 2)
+ return ShowHelp(CmdL);
+
+ // Read the configuration file.
+ Configuration Setup;
+ if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false)
+ return false;
+ // we don't need translation creation here
+ Setup.Set("TreeDefault::Translation", "/dev/null");
+
+ vector<PackageMap> PkgList;
+ std::vector<TranslationWriter*> TransList;
+ LoadTree(PkgList, TransList, Setup);
+ LoadBinDir(PkgList,Setup);
+
+ // Sort by cache DB to improve IO locality.
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare());
+
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); )
+ {
+ if(I->BinCacheDB != "")
+ c0out << I->BinCacheDB << endl;
+ if(I->SrcCacheDB != "")
+ c0out << I->SrcCacheDB << endl;
+ CacheDB DB(flCombine(CacheDir,I->BinCacheDB));
+ CacheDB DB_SRC(flCombine(CacheDir,I->SrcCacheDB));
+ if (DB.Clean() == false)
+ _error->DumpErrors();
+ if (DB_SRC.Clean() == false)
+ _error->DumpErrors();
+
+ I = std::find_if(I, PkgList.end(),
+ [&](PackageMap const &PM) { return PM.BinCacheDB != I->BinCacheDB || PM.SrcCacheDB != I->SrcCacheDB;
+ });
+ }
+
+ return true;
+}
+ /*}}}*/
+
+static std::vector<aptDispatchWithHelp> GetCommands() /*{{{*/
+{
+ return {
+ {"packages",&SimpleGenPackages, nullptr},
+ {"contents",&SimpleGenContents, nullptr},
+ {"sources",&SimpleGenSources, nullptr},
+ {"release",&SimpleGenRelease, nullptr},
+ {"generate",&Generate, nullptr},
+ {"clean",&Clean, nullptr},
+ {nullptr, nullptr, nullptr}
+ };
+}
+ /*}}}*/
+int main(int argc, const char *argv[]) /*{{{*/
+{
+ // Parse the command line and initialize the package library
+ CommandLine CmdL;
+ auto const Cmds = ParseCommandLine(CmdL, APT_CMD::APT_FTPARCHIVE, &_config, NULL, argc, argv, ShowHelp, &GetCommands);
+
+ _config->CndSet("quiet",0);
+ Quiet = _config->FindI("quiet",0);
+ InitOutput(clog.rdbuf());
+
+ return DispatchCommandLine(CmdL, Cmds);
+}
+ /*}}}*/
diff --git a/ftparchive/apt-ftparchive.h b/ftparchive/apt-ftparchive.h
new file mode 100644
index 0000000..e669555
--- /dev/null
+++ b/ftparchive/apt-ftparchive.h
@@ -0,0 +1,26 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ Writer
+
+ The file writer classes. These write various types of output, sources,
+ packages and contents.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef APT_FTPARCHIVE_H
+#define APT_FTPARCHIVE_H
+
+#include <fstream>
+
+using std::ostream;
+using std::ofstream;
+
+extern ostream c0out;
+extern ostream c1out;
+extern ostream c2out;
+extern ofstream devnull;
+extern unsigned Quiet;
+
+#endif
diff --git a/ftparchive/byhash.cc b/ftparchive/byhash.cc
new file mode 100644
index 0000000..b24f615
--- /dev/null
+++ b/ftparchive/byhash.cc
@@ -0,0 +1,61 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ ByHash
+
+ ByHash helper functions
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <algorithm>
+#include <string>
+
+#include <sys/stat.h>
+#include <unistd.h>
+
+#include "byhash.h"
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/hashes.h>
+
+// Delete all files in a directory except the most recent N ones
+void DeleteAllButMostRecent(std::string dir, int KeepFiles)
+{
+ struct Cmp {
+ bool operator() (const std::string& lhs, const std::string& rhs) {
+ struct stat buf_l, buf_r;
+ stat(lhs.c_str(), &buf_l);
+ stat(rhs.c_str(), &buf_r);
+ if (buf_l.st_mtim.tv_sec == buf_r.st_mtim.tv_sec)
+ return buf_l.st_mtim.tv_nsec < buf_r.st_mtim.tv_nsec;
+ return buf_l.st_mtim.tv_sec < buf_r.st_mtim.tv_sec;
+ }
+ };
+
+ if (!DirectoryExists(dir))
+ return;
+
+ auto files = GetListOfFilesInDir(dir, false);
+ std::sort(files.begin(), files.end(), Cmp());
+
+ for (auto I=files.begin(); I<files.end()-KeepFiles; ++I)
+ RemoveFile("DeleteAllButMostRecent", *I);
+}
+
+// Takes a input filename (e.g. binary-i386/Packages) and a hashstring
+// of the Input data and transforms it into a suitable by-hash filename
+std::string GenByHashFilename(std::string ByHashOutputFile, HashString const &h)
+{
+ std::string const ByHash = "/by-hash/" + h.HashType() + "/" + h.HashValue();
+ size_t trailing_slash = ByHashOutputFile.find_last_of("/");
+ if (trailing_slash == std::string::npos)
+ trailing_slash = 0;
+ ByHashOutputFile = ByHashOutputFile.replace(
+ trailing_slash,
+ ByHashOutputFile.substr(trailing_slash+1).size()+1,
+ ByHash);
+ return ByHashOutputFile;
+}
diff --git a/ftparchive/byhash.h b/ftparchive/byhash.h
new file mode 100644
index 0000000..9fbb479
--- /dev/null
+++ b/ftparchive/byhash.h
@@ -0,0 +1,25 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ ByHash
+
+ ByHash helper functions
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef BYHASH_H
+#define BYHASH_H
+
+#include <string>
+
+class HashString;
+
+// Delete all files in "dir" except for the number specified in "KeepFiles"
+// that are the most recent ones
+void DeleteAllButMostRecent(std::string dir, int KeepFiles);
+
+// takes a regular input filename
+std::string GenByHashFilename(std::string Input, HashString const &h);
+
+#endif
diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc
new file mode 100644
index 0000000..dedb01e
--- /dev/null
+++ b/ftparchive/cachedb.cc
@@ -0,0 +1,579 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ CacheDB
+
+ Simple uniform interface to a cache database.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/gpgv.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/strutl.h>
+
+#include <ctype.h>
+#include <netinet/in.h> // htonl, etc
+#include <stddef.h>
+#include <strings.h>
+#include <sys/stat.h>
+
+#include "cachedb.h"
+
+#include <apti18n.h>
+ /*}}}*/
+
+CacheDB::CacheDB(std::string const &DB)
+ : Dbp(0), Fd(NULL), DebFile(0)
+{
+ TmpKey[0]='\0';
+ ReadyDB(DB);
+}
+
+CacheDB::~CacheDB()
+{
+ ReadyDB();
+ delete DebFile;
+ CloseFile();
+}
+
+// CacheDB::ReadyDB - Ready the DB2 /*{{{*/
+// ---------------------------------------------------------------------
+/* This opens the DB2 file for caching package information */
+bool CacheDB::ReadyDB(std::string const &DB)
+{
+ int err;
+
+ ReadOnly = _config->FindB("APT::FTPArchive::ReadOnlyDB",false);
+
+ // Close the old DB
+ if (Dbp != 0)
+ Dbp->close(Dbp,0);
+
+ /* Check if the DB was disabled while running and deal with a
+ corrupted DB */
+ if (DBFailed() == true)
+ {
+ _error->Warning(_("DB was corrupted, file renamed to %s.old"),DBFile.c_str());
+ rename(DBFile.c_str(),(DBFile+".old").c_str());
+ }
+
+ DBLoaded = false;
+ Dbp = 0;
+ DBFile = std::string();
+
+ if (DB.empty())
+ return true;
+
+ db_create(&Dbp, NULL, 0);
+ if ((err = Dbp->open(Dbp, NULL, DB.c_str(), NULL, DB_BTREE,
+ (ReadOnly?DB_RDONLY:DB_CREATE),
+ 0644)) != 0)
+ {
+ if (err == DB_OLD_VERSION)
+ {
+ _error->Warning(_("DB is old, attempting to upgrade %s"),DBFile.c_str());
+ err = Dbp->upgrade(Dbp, DB.c_str(), 0);
+ if (!err)
+ err = Dbp->open(Dbp, NULL, DB.c_str(), NULL, DB_HASH,
+ (ReadOnly?DB_RDONLY:DB_CREATE), 0644);
+
+ }
+ // the database format has changed from DB_HASH to DB_BTREE in
+ // apt 0.6.44
+ if (err == EINVAL)
+ {
+ _error->Error(_("DB format is invalid. If you upgraded from an older version of apt, please remove and re-create the database."));
+ }
+ if (err)
+ {
+ Dbp = 0;
+ return _error->Error(_("Unable to open DB file %s: %s"),DB.c_str(), db_strerror(err));
+ }
+ }
+
+ DBFile = DB;
+ DBLoaded = true;
+ return true;
+}
+ /*}}}*/
+// CacheDB::OpenFile - Open the file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::OpenFile()
+{
+ // always close existing file first
+ CloseFile();
+
+ // open a new file
+ Fd = new FileFd(FileName,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ {
+ CloseFile();
+ return false;
+ }
+ return true;
+}
+ /*}}}*/
+// CacheDB::CloseFile - Close the file /*{{{*/
+void CacheDB::CloseFile()
+{
+ if(Fd != NULL)
+ {
+ delete Fd;
+ Fd = NULL;
+ }
+}
+ /*}}}*/
+// CacheDB::OpenDebFile - Open a debfile /*{{{*/
+bool CacheDB::OpenDebFile()
+{
+ // always close existing file first
+ CloseDebFile();
+
+ // first open the fd, then pass it to the debDebFile
+ if(OpenFile() == false)
+ return false;
+ DebFile = new debDebFile(*Fd);
+ if (_error->PendingError() == true)
+ return false;
+ return true;
+}
+ /*}}}*/
+// CacheDB::CloseDebFile - Close a debfile again /*{{{*/
+void CacheDB::CloseDebFile()
+{
+ CloseFile();
+
+ if(DebFile != NULL)
+ {
+ delete DebFile;
+ DebFile = NULL;
+ }
+}
+ /*}}}*/
+// CacheDB::GetFileStat - Get stats from the file /*{{{*/
+// ---------------------------------------------------------------------
+/* This gets the size from the database if it's there. If we need
+ * to look at the file, also get the mtime from the file. */
+bool CacheDB::GetFileStat(bool const &doStat)
+{
+ if ((CurStat.Flags & FlSize) == FlSize && doStat == false)
+ return true;
+
+ /* Get it from the file. */
+ if (OpenFile() == false)
+ return false;
+
+ // Stat the file
+ struct stat St;
+ if (fstat(Fd->Fd(),&St) != 0)
+ {
+ CloseFile();
+ return _error->Errno("fstat",
+ _("Failed to stat %s"),FileName.c_str());
+ }
+ CurStat.FileSize = St.st_size;
+ CurStat.mtime = htonl(St.st_mtime);
+ CurStat.Flags |= FlSize;
+
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetCurStatCompatOldFormat /*{{{*/
+// ---------------------------------------------------------------------
+/* Read the old (32bit FileSize) StateStore format from disk */
+bool CacheDB::GetCurStatCompatOldFormat()
+{
+ InitQueryStats();
+ Data.data = &CurStatOldFormat;
+ Data.flags = DB_DBT_USERMEM;
+ Data.ulen = sizeof(CurStatOldFormat);
+ if (Get() == false)
+ {
+ CurStat.Flags = 0;
+ } else {
+ CurStat.Flags = CurStatOldFormat.Flags;
+ CurStat.mtime = CurStatOldFormat.mtime;
+ CurStat.FileSize = CurStatOldFormat.FileSize;
+ memcpy(CurStat.MD5, CurStatOldFormat.MD5, sizeof(CurStat.MD5));
+ memcpy(CurStat.SHA1, CurStatOldFormat.SHA1, sizeof(CurStat.SHA1));
+ memcpy(CurStat.SHA256, CurStatOldFormat.SHA256, sizeof(CurStat.SHA256));
+ }
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetCurStatCompatOldFormat /*{{{*/
+// ---------------------------------------------------------------------
+/* Read the new (64bit FileSize) StateStore format from disk */
+bool CacheDB::GetCurStatCompatNewFormat()
+{
+ InitQueryStats();
+ Data.data = &CurStat;
+ Data.flags = DB_DBT_USERMEM;
+ Data.ulen = sizeof(CurStat);
+ if (Get() == false)
+ {
+ CurStat.Flags = 0;
+ }
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetCurStat - Set the CurStat variable. /*{{{*/
+// ---------------------------------------------------------------------
+/* Sets the CurStat variable. Either to 0 if no database is used
+ * or to the value in the database if one is used */
+bool CacheDB::GetCurStat()
+{
+ memset(&CurStat,0,sizeof(CurStat));
+
+ if (DBLoaded)
+ {
+ // do a first query to just get the size of the data on disk
+ InitQueryStats();
+ Data.data = &CurStat;
+ Data.flags = DB_DBT_USERMEM;
+ Data.ulen = 0;
+ Get();
+
+ if (Data.size == 0)
+ {
+ // nothing needs to be done, we just have not data for this deb
+ }
+ // check if the record is written in the old format (32bit filesize)
+ else if(Data.size == sizeof(CurStatOldFormat))
+ {
+ GetCurStatCompatOldFormat();
+ }
+ else if(Data.size == sizeof(CurStat))
+ {
+ GetCurStatCompatNewFormat();
+ } else {
+ return _error->Error("Cache record size mismatch (%ul)", Data.size);
+ }
+
+ CurStat.Flags = ntohl(CurStat.Flags);
+ CurStat.FileSize = ntohl(CurStat.FileSize);
+ }
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetFileInfo - Get all the info about the file /*{{{*/
+// ---------------------------------------------------------------------
+bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents,
+ bool const &GenContentsOnly, bool const DoSource, unsigned int const DoHashes,
+ bool const &checkMtime)
+{
+ this->FileName = FileName;
+
+ if (GetCurStat() == false)
+ return false;
+ OldStat = CurStat;
+
+ if (GetFileStat(checkMtime) == false)
+ return false;
+
+ /* if mtime changed, update CurStat from disk */
+ if (checkMtime == true && OldStat.mtime != CurStat.mtime)
+ CurStat.Flags = FlSize;
+
+ Stats.Bytes += CurStat.FileSize;
+ ++Stats.Packages;
+
+ if ((DoControl && LoadControl() == false)
+ || (DoContents && LoadContents(GenContentsOnly) == false)
+ || (DoSource && LoadSource() == false)
+ || (DoHashes != 0 && GetHashes(false, DoHashes) == false)
+ )
+ {
+ return false;
+ }
+
+ return true;
+}
+ /*}}}*/
+bool CacheDB::LoadSource() /*{{{*/
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlSource) == FlSource)
+ {
+ // Lookup the control information
+ InitQuerySource();
+ if (Get() == true && Dsc.TakeDsc(Data.data, Data.size) == true)
+ {
+ return true;
+ }
+ CurStat.Flags &= ~FlSource;
+ }
+ if (OpenFile() == false)
+ return false;
+
+ Stats.Misses++;
+ if (Dsc.Read(FileName) == false)
+ return false;
+
+ if (Dsc.Length == 0)
+ return _error->Error(_("Failed to read .dsc"));
+
+ // Write back the control information
+ InitQuerySource();
+ if (Put(Dsc.Data.c_str(), Dsc.Length) == true)
+ CurStat.Flags |= FlSource;
+
+ return true;
+}
+ /*}}}*/
+// CacheDB::LoadControl - Load Control information /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::LoadControl()
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlControl) == FlControl)
+ {
+ // Lookup the control information
+ InitQueryControl();
+ if (Get() == true && Control.TakeControl(Data.data,Data.size) == true)
+ return true;
+ CurStat.Flags &= ~FlControl;
+ }
+
+ if(OpenDebFile() == false)
+ return false;
+
+ Stats.Misses++;
+ if (Control.Read(*DebFile) == false)
+ return false;
+
+ if (Control.Control == 0)
+ return _error->Error(_("Archive has no control record"));
+
+ // Write back the control information
+ InitQueryControl();
+ if (Put(Control.Control,Control.Length) == true)
+ CurStat.Flags |= FlControl;
+ return true;
+}
+ /*}}}*/
+// CacheDB::LoadContents - Load the File Listing /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::LoadContents(bool const &GenOnly)
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlContents) == FlContents)
+ {
+ if (GenOnly == true)
+ return true;
+
+ // Lookup the contents information
+ InitQueryContent();
+ if (Get() == true)
+ {
+ if (Contents.TakeContents(Data.data,Data.size) == true)
+ return true;
+ }
+
+ CurStat.Flags &= ~FlContents;
+ }
+
+ if(OpenDebFile() == false)
+ return false;
+
+ Stats.Misses++;
+ if (Contents.Read(*DebFile) == false)
+ return false;
+
+ // Write back the control information
+ InitQueryContent();
+ if (Put(Contents.Data,Contents.CurSize) == true)
+ CurStat.Flags |= FlContents;
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetHashes - Get the hashes /*{{{*/
+static std::string bytes2hex(uint8_t *bytes, size_t length) {
+ char buf[3];
+ std::string space;
+
+ space.reserve(length*2 + 1);
+ for (size_t i = 0; i < length; i++) {
+ snprintf(buf, sizeof(buf), "%02x", bytes[i]);
+ space.append(buf);
+ }
+ return space;
+}
+
+static inline unsigned char xdig2num(char const &dig) {
+ if (isdigit(dig)) return dig - '0';
+ if ('a' <= dig && dig <= 'f') return dig - 'a' + 10;
+ if ('A' <= dig && dig <= 'F') return dig - 'A' + 10;
+ return 0;
+}
+
+static void hex2bytes(uint8_t *bytes, const char *hex, int length) {
+ while (length-- > 0) {
+ *bytes = 0;
+ if (isxdigit(hex[0]) && isxdigit(hex[1])) {
+ *bytes = xdig2num(hex[0]) * 16 + xdig2num(hex[1]);
+ hex += 2;
+ }
+ bytes++;
+ }
+}
+bool CacheDB::GetHashes(bool const GenOnly, unsigned int const DoHashes)
+{
+ unsigned int notCachedHashes = 0;
+ if ((CurStat.Flags & FlMD5) != FlMD5)
+ {
+ notCachedHashes = notCachedHashes | Hashes::MD5SUM;
+ }
+ if ((CurStat.Flags & FlSHA1) != FlSHA1)
+ {
+ notCachedHashes = notCachedHashes | Hashes::SHA1SUM;
+ }
+ if ((CurStat.Flags & FlSHA256) != FlSHA256)
+ {
+ notCachedHashes = notCachedHashes | Hashes::SHA256SUM;
+ }
+ if ((CurStat.Flags & FlSHA512) != FlSHA512)
+ {
+ notCachedHashes = notCachedHashes | Hashes::SHA512SUM;
+ }
+ unsigned int FlHashes = DoHashes & notCachedHashes;
+ HashesList.clear();
+
+ if (FlHashes != 0)
+ {
+ if (OpenFile() == false)
+ return false;
+
+ Hashes hashes(FlHashes);
+ if (Fd->Seek(0) == false || hashes.AddFD(*Fd, CurStat.FileSize) == false)
+ return false;
+
+ HashStringList hl = hashes.GetHashStringList();
+ for (HashStringList::const_iterator hs = hl.begin(); hs != hl.end(); ++hs)
+ {
+ HashesList.push_back(*hs);
+ if (strcasecmp(hs->HashType().c_str(), "SHA512") == 0)
+ {
+ Stats.SHA512Bytes += CurStat.FileSize;
+ hex2bytes(CurStat.SHA512, hs->HashValue().data(), sizeof(CurStat.SHA512));
+ CurStat.Flags |= FlSHA512;
+ }
+ else if (strcasecmp(hs->HashType().c_str(), "SHA256") == 0)
+ {
+ Stats.SHA256Bytes += CurStat.FileSize;
+ hex2bytes(CurStat.SHA256, hs->HashValue().data(), sizeof(CurStat.SHA256));
+ CurStat.Flags |= FlSHA256;
+ }
+ else if (strcasecmp(hs->HashType().c_str(), "SHA1") == 0)
+ {
+ Stats.SHA1Bytes += CurStat.FileSize;
+ hex2bytes(CurStat.SHA1, hs->HashValue().data(), sizeof(CurStat.SHA1));
+ CurStat.Flags |= FlSHA1;
+ }
+ else if (strcasecmp(hs->HashType().c_str(), "MD5Sum") == 0)
+ {
+ Stats.MD5Bytes += CurStat.FileSize;
+ hex2bytes(CurStat.MD5, hs->HashValue().data(), sizeof(CurStat.MD5));
+ CurStat.Flags |= FlMD5;
+ }
+ else if (strcasecmp(hs->HashType().c_str(), "Checksum-FileSize") == 0)
+ {
+ // we store it in a different field already
+ }
+ else
+ return _error->Error("Got unknown unrequested hashtype %s", hs->HashType().c_str());
+ }
+ }
+ if (GenOnly == true)
+ return true;
+
+ bool ret = true;
+#define PUSH_BACK_HASH(FLAG, TYPE, VALUE) \
+ if ((CurStat.Flags & FLAG) == FLAG) \
+ ret &= HashesList.push_back(HashString(TYPE, bytes2hex(VALUE, sizeof(VALUE))));
+ PUSH_BACK_HASH(FlMD5, "MD5Sum", CurStat.MD5);
+ PUSH_BACK_HASH(FlSHA1, "SHA1", CurStat.SHA1);
+ PUSH_BACK_HASH(FlSHA256, "SHA256", CurStat.SHA256);
+ PUSH_BACK_HASH(FlSHA512, "SHA512", CurStat.SHA512);
+ return ret;
+}
+ /*}}}*/
+// CacheDB::Finish - Write back the cache structure /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::Finish()
+{
+ // Optimize away some writes.
+ if (CurStat.Flags == OldStat.Flags &&
+ CurStat.mtime == OldStat.mtime)
+ return true;
+
+ // Write the stat information
+ CurStat.Flags = htonl(CurStat.Flags);
+ CurStat.FileSize = htonl(CurStat.FileSize);
+ InitQueryStats();
+ Put(&CurStat,sizeof(CurStat));
+ CurStat.Flags = ntohl(CurStat.Flags);
+ CurStat.FileSize = ntohl(CurStat.FileSize);
+
+ return true;
+}
+ /*}}}*/
+// CacheDB::Clean - Clean the Database /*{{{*/
+// ---------------------------------------------------------------------
+/* Tidy the database by removing files that no longer exist at all. */
+bool CacheDB::Clean()
+{
+ if (DBLoaded == false)
+ return true;
+
+ /* I'm not sure what VERSION_MINOR should be here.. 2.4.14 certainly
+ needs the lower one and 2.7.7 needs the upper.. */
+ DBC *Cursor;
+ if ((errno = Dbp->cursor(Dbp, NULL, &Cursor, 0)) != 0)
+ return _error->Error(_("Unable to get a cursor"));
+
+ DBT Key;
+ DBT Data;
+ memset(&Key,0,sizeof(Key));
+ memset(&Data,0,sizeof(Data));
+ while ((errno = Cursor->c_get(Cursor,&Key,&Data,DB_NEXT)) == 0)
+ {
+ const char *Colon = (char*)memrchr(Key.data, ':', Key.size);
+ if (Colon)
+ {
+ if (stringcmp(Colon + 1, (char *)Key.data+Key.size,"st") == 0 ||
+ stringcmp(Colon + 1, (char *)Key.data+Key.size,"cl") == 0 ||
+ stringcmp(Colon + 1, (char *)Key.data+Key.size,"cs") == 0 ||
+ stringcmp(Colon + 1, (char *)Key.data+Key.size,"cn") == 0)
+ {
+ std::string FileName = std::string((const char *)Key.data,Colon);
+ if (FileExists(FileName) == true) {
+ continue;
+ }
+ }
+ }
+ Cursor->c_del(Cursor,0);
+ }
+ int res = Dbp->compact(Dbp, NULL, NULL, NULL, NULL, DB_FREE_SPACE, NULL);
+ if (res < 0)
+ _error->Warning("compact failed with result %i", res);
+
+ if(_config->FindB("Debug::APT::FTPArchive::Clean", false) == true)
+ Dbp->stat_print(Dbp, 0);
+
+
+ return true;
+}
+ /*}}}*/
diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h
new file mode 100644
index 0000000..399e1f1
--- /dev/null
+++ b/ftparchive/cachedb.h
@@ -0,0 +1,195 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ CacheDB
+
+ Simple uniform interface to a cache database.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef CACHEDB_H
+#define CACHEDB_H
+
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/hashes.h>
+
+#include <db.h>
+#include <string>
+#include <errno.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "contents.h"
+#include "sources.h"
+
+class FileFd;
+
+
+class CacheDB
+{
+ protected:
+
+ // Database state/access
+ DBT Key;
+ DBT Data;
+ char TmpKey[600];
+ DB *Dbp;
+ bool DBLoaded;
+ bool ReadOnly;
+ std::string DBFile;
+
+ // Generate a key for the DB of a given type
+ void _InitQuery(const char *Type)
+ {
+ memset(&Key,0,sizeof(Key));
+ memset(&Data,0,sizeof(Data));
+ Key.data = TmpKey;
+ Key.size = snprintf(TmpKey,sizeof(TmpKey),"%s:%s",FileName.c_str(), Type);
+ }
+
+ void InitQueryStats() {
+ _InitQuery("st");
+ }
+ void InitQuerySource() {
+ _InitQuery("cs");
+ }
+ void InitQueryControl() {
+ _InitQuery("cl");
+ }
+ void InitQueryContent() {
+ _InitQuery("cn");
+ }
+
+ inline bool Get()
+ {
+ return Dbp->get(Dbp,0,&Key,&Data,0) == 0;
+ };
+ inline bool Put(const void *In,unsigned long const &Length)
+ {
+ if (ReadOnly == true)
+ return true;
+ Data.size = Length;
+ Data.data = (void *)In;
+ if (DBLoaded == true && (errno = Dbp->put(Dbp,0,&Key,&Data,0)) != 0)
+ {
+ DBLoaded = false;
+ return false;
+ }
+ return true;
+ }
+ bool OpenFile();
+ void CloseFile();
+
+ bool OpenDebFile();
+ void CloseDebFile();
+
+ // GetCurStat needs some compat code, see lp #1274466)
+ bool GetCurStatCompatOldFormat();
+ bool GetCurStatCompatNewFormat();
+ bool GetCurStat();
+
+ bool GetFileStat(bool const &doStat = false);
+ bool LoadControl();
+ bool LoadContents(bool const &GenOnly);
+ bool LoadSource();
+ bool GetHashes(bool const GenOnly, unsigned int const DoHashes);
+
+ // Stat info stored in the DB, Fixed types since it is written to disk.
+ enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2),
+ FlSize=(1<<3), FlSHA1=(1<<4), FlSHA256=(1<<5),
+ FlSHA512=(1<<6), FlSource=(1<<7)
+ };
+
+ // the on-disk format changed (FileSize increased to 64bit) in
+ // commit 650faab0 which will lead to corruption with old caches
+ struct StatStoreOldFormat
+ {
+ uint32_t Flags;
+ uint32_t mtime;
+ uint32_t FileSize;
+ uint8_t MD5[16];
+ uint8_t SHA1[20];
+ uint8_t SHA256[32];
+ } CurStatOldFormat;
+
+ // WARNING: this struct is read/written to the DB so do not change the
+ // layout of the fields (see lp #1274466), only append to it
+ struct StatStore
+ {
+ uint32_t Flags;
+ uint32_t mtime;
+ uint64_t FileSize;
+ uint8_t MD5[16];
+ uint8_t SHA1[20];
+ uint8_t SHA256[32];
+ uint8_t SHA512[64];
+ } CurStat;
+ struct StatStore OldStat;
+
+ // 'set' state
+ std::string FileName;
+ FileFd *Fd;
+ debDebFile *DebFile;
+
+ public:
+
+ // Data collection helpers
+ debDebFile::MemControlExtract Control;
+ ContentsExtract Contents;
+ DscExtract Dsc;
+ HashStringList HashesList;
+
+ // Runtime statistics
+ struct Stats
+ {
+ double Bytes;
+ double MD5Bytes;
+ double SHA1Bytes;
+ double SHA256Bytes;
+ double SHA512Bytes;
+ unsigned long Packages;
+ unsigned long Misses;
+ unsigned long long DeLinkBytes;
+
+ inline void Add(const Stats &S) {
+ Bytes += S.Bytes;
+ MD5Bytes += S.MD5Bytes;
+ SHA1Bytes += S.SHA1Bytes;
+ SHA256Bytes += S.SHA256Bytes;
+ SHA512Bytes += S.SHA512Bytes;
+ Packages += S.Packages;
+ Misses += S.Misses;
+ DeLinkBytes += S.DeLinkBytes;
+ };
+ Stats() : Bytes(0), MD5Bytes(0), SHA1Bytes(0), SHA256Bytes(0),
+ SHA512Bytes(0),Packages(0), Misses(0), DeLinkBytes(0) {};
+ } Stats;
+
+ bool ReadyDB(std::string const &DB = "");
+ inline bool DBFailed() {return Dbp != 0 && DBLoaded == false;};
+ inline bool Loaded() {return DBLoaded == true;};
+
+ inline unsigned long long GetFileSize(void) {return CurStat.FileSize;}
+
+ bool SetFile(std::string const &FileName,struct stat St,FileFd *Fd);
+
+ // terrible old overloaded interface
+ bool GetFileInfo(std::string const &FileName,
+ bool const &DoControl,
+ bool const &DoContents,
+ bool const &GenContentsOnly,
+ bool const DoSource,
+ unsigned int const DoHashes,
+ bool const &checkMtime = false);
+
+ bool Finish();
+
+ bool Clean();
+
+ explicit CacheDB(std::string const &DB);
+ ~CacheDB();
+};
+
+#endif
diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc
new file mode 100644
index 0000000..8a7adfd
--- /dev/null
+++ b/ftparchive/contents.cc
@@ -0,0 +1,409 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ contents - Archive contents generator
+
+ The GenContents class is a back end for an archive contents generator.
+ It takes a list of per-deb file name and merges it into a memory
+ database of all previous output. This database is stored as a set
+ of binary trees linked across directories to form a tree of all files+dirs
+ given to it. The tree will also be sorted as it is built up thus
+ removing the massive sort time overhead.
+
+ By breaking all the pathnames into components and storing them
+ separately a space saving is realized by not duplicating the string
+ over and over again. Ultimately this saving is sacrificed to storage of
+ the tree structure itself but the tree structure yields a speed gain
+ in the sorting and processing. Ultimately it takes about 5 seconds to
+ do 141000 nodes and about 5 meg of ram.
+
+ The tree looks something like:
+
+ usr/
+ / \ / libslang
+ bin/ lib/ --> libc6
+ / \ \ libfoo
+ games/ sbin/
+
+ The ---> is the DirDown link
+
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/dirstream.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include "contents.h"
+
+#include <apti18n.h>
+ /*}}}*/
+
+// GenContents::~GenContents - Free allocated memory /*{{{*/
+// ---------------------------------------------------------------------
+/* Since all our allocations are static big-block allocations all that is
+ needed is to free all of them. */
+GenContents::~GenContents()
+{
+ while (BlockList != 0)
+ {
+ BigBlock *Old = BlockList;
+ BlockList = Old->Next;
+ free(Old->Block);
+ delete Old;
+ }
+}
+ /*}}}*/
+// GenContents::Mystrdup - Custom strdup /*{{{*/
+// ---------------------------------------------------------------------
+/* This strdup also uses a large block allocator to eliminate glibc
+ overhead */
+char *GenContents::Mystrdup(const char *From)
+{
+ unsigned int Len = strlen(From) + 1;
+ if (StrLeft <= Len)
+ {
+ StrLeft = 4096*10;
+ StrPool = (char *)malloc(StrLeft);
+
+ BigBlock *Block = new BigBlock;
+ Block->Block = StrPool;
+ Block->Next = BlockList;
+ BlockList = Block;
+ }
+
+ memcpy(StrPool,From,Len);
+ StrLeft -= Len;
+
+ char *Res = StrPool;
+ StrPool += Len;
+ return Res;
+}
+ /*}}}*/
+// GenContents::Node::operator new - Big block allocator /*{{{*/
+// ---------------------------------------------------------------------
+/* This eliminates glibc's malloc overhead by allocating large blocks and
+ having a continuous set of Nodes. This takes about 8 bytes off each nodes
+ space needs. Freeing is not supported. */
+void *GenContents::Node::operator new(size_t Amount,GenContents *Owner)
+{
+ if (Owner->NodeLeft == 0)
+ {
+ Owner->NodeLeft = 10000;
+ Owner->NodePool = static_cast<Node *>(malloc(Amount*Owner->NodeLeft));
+ BigBlock *Block = new BigBlock;
+ Block->Block = Owner->NodePool;
+ Block->Next = Owner->BlockList;
+ Owner->BlockList = Block;
+ }
+
+ Owner->NodeLeft--;
+ return Owner->NodePool++;
+}
+ /*}}}*/
+// GenContents::Grab - Grab a new node representing Name under Top /*{{{*/
+// ---------------------------------------------------------------------
+/* This grabs a new node representing the pathname component Name under
+ the node Top. The node is given the name Package. It is assumed that Name
+ is inside of top. If a duplicate already entered name is found then
+ a note is made on the Dup list and the previous in-tree node is returned. */
+GenContents::Node *GenContents::Grab(GenContents::Node *Top,const char *Name,
+ const char *Package)
+{
+ /* We drop down to the next dir level each call. This simplifies
+ the calling routine */
+ if (Top->DirDown == 0)
+ {
+ Node *Item = new(this) Node;
+ Item->Path = Mystrdup(Name);
+ Item->Package = Package;
+ Top->DirDown = Item;
+ return Item;
+ }
+ Top = Top->DirDown;
+
+ int Res;
+ while (1)
+ {
+ Res = strcmp(Name,Top->Path);
+
+ // Collision!
+ if (Res == 0)
+ {
+ // See if this is the same package (multi-version dup)
+ if (Top->Package == Package ||
+ strcasecmp(Top->Package,Package) == 0)
+ return Top;
+
+ // Look for an already existing Dup
+ for (Node *I = Top->Dups; I != 0; I = I->Dups)
+ if (I->Package == Package ||
+ strcasecmp(I->Package,Package) == 0)
+ return Top;
+
+ // Add the dup in
+ Node *Item = new(this) Node;
+ Item->Path = Top->Path;
+ Item->Package = Package;
+ Item->Dups = Top->Dups;
+ Top->Dups = Item;
+ return Top;
+ }
+
+ // Continue to traverse the tree
+ if (Res < 0)
+ {
+ if (Top->BTreeLeft == 0)
+ break;
+ Top = Top->BTreeLeft;
+ }
+ else
+ {
+ if (Top->BTreeRight == 0)
+ break;
+ Top = Top->BTreeRight;
+ }
+ }
+
+ // The item was not found in the tree
+ Node *Item = new(this) Node;
+ Item->Path = Mystrdup(Name);
+ Item->Package = Package;
+
+ // Link it into the tree
+ if (Res < 0)
+ {
+ Item->BTreeLeft = Top->BTreeLeft;
+ Top->BTreeLeft = Item;
+ }
+ else
+ {
+ Item->BTreeRight = Top->BTreeRight;
+ Top->BTreeRight = Item;
+ }
+
+ return Item;
+}
+ /*}}}*/
+// GenContents::Add - Add a path to the tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This takes a full pathname and adds it into the tree. We split the
+ pathname into directory fragments adding each one as we go. Technically
+ in output from tar this should result in hitting previous items. */
+void GenContents::Add(const char *Dir,const char *Package)
+{
+ Node *Root = &this->Root;
+
+ // Drop leading slashes
+ while (*Dir == '/')
+ Dir++;
+
+ // Run over the string and grab out each bit up to and including a /
+ const char *Start = Dir;
+ const char *I = Dir;
+ while (*I != 0)
+ {
+ if (*I != '/' || I - Start <= 1)
+ {
+ I++;
+ continue;
+ }
+ I++;
+
+ // Copy the path fragment over
+ char Tmp[1024];
+ strncpy(Tmp,Start,I - Start);
+ Tmp[I - Start] = 0;
+
+ // Grab a node for it
+ Root = Grab(Root,Tmp,Package);
+
+ Start = I;
+ }
+
+ // The final component if it does not have a trailing /
+ if (I - Start >= 1)
+ Grab(Root,Start,Package);
+}
+ /*}}}*/
+// GenContents::WriteSpace - Write a given number of white space chars /*{{{*/
+// ---------------------------------------------------------------------
+/* We mod 8 it and write tabs where possible. */
+void GenContents::WriteSpace(std::string &out, size_t Current, size_t Target)
+{
+ if (Target <= Current)
+ Target = Current + 1;
+
+ /* Now we write tabs so long as the next tab stop would not pass
+ the target */
+ for (; (Current/8 + 1)*8 < Target; Current = (Current/8 + 1)*8)
+ out.append("\t");
+
+ // Fill the last bit with spaces
+ for (; Current < Target; Current++)
+ out.append(" ");
+}
+ /*}}}*/
+// GenContents::Print - Display the tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This is the final result function. It takes the tree and recursively
+ calls itself and runs over each section of the tree printing out
+ the pathname and the hit packages. We use Buf to build the pathname
+ summed over all the directory parents of this node. */
+void GenContents::Print(FileFd &Out)
+{
+ char Buffer[1024];
+ Buffer[0] = 0;
+ DoPrint(Out,&Root,Buffer);
+}
+void GenContents::DoPrint(FileFd &Out,GenContents::Node *Top, char *Buf)
+{
+ if (Top == 0)
+ return;
+
+ // Go left
+ DoPrint(Out,Top->BTreeLeft,Buf);
+
+ // Print the current dir location and then descend to lower dirs
+ char *OldEnd = Buf + strlen(Buf);
+ if (Top->Path != 0)
+ {
+ strcat(Buf,Top->Path);
+
+ // Do not show the item if it is a directory with dups
+ if (Top->Path[strlen(Top->Path)-1] != '/' /*|| Top->Dups == 0*/)
+ {
+ std::string out = Buf;
+ WriteSpace(out, out.length(), 60);
+ for (Node *I = Top; I != 0; I = I->Dups)
+ {
+ if (I != Top)
+ out.append(",");
+ out.append(I->Package);
+ }
+ out.append("\n");
+ Out.Write(out.c_str(), out.length());
+ }
+ }
+
+ // Go along the directory link
+ DoPrint(Out,Top->DirDown,Buf);
+ *OldEnd = 0;
+
+ // Go right
+ DoPrint(Out,Top->BTreeRight,Buf);
+}
+ /*}}}*/
+// ContentsExtract Constructor /*{{{*/
+ContentsExtract::ContentsExtract()
+ : Data(0), MaxSize(0), CurSize(0)
+{
+}
+ /*}}}*/
+// ContentsExtract Destructor /*{{{*/
+ContentsExtract::~ContentsExtract()
+{
+ free(Data);
+}
+ /*}}}*/
+// ContentsExtract::Read - Read the archive /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ContentsExtract::Read(debDebFile &Deb)
+{
+ Reset();
+ return Deb.ExtractArchive(*this);
+}
+ /*}}}*/
+// ContentsExtract::DoItem - Extract an item /*{{{*/
+// ---------------------------------------------------------------------
+/* This just tacks the name onto the end of our memory buffer */
+bool ContentsExtract::DoItem(Item &Itm, int &/*Fd*/)
+{
+ unsigned long Len = strlen(Itm.Name);
+
+ // Strip leading ./'s
+ if (Itm.Name[0] == '.' && Itm.Name[1] == '/')
+ {
+ // == './'
+ if (Len == 2)
+ return true;
+
+ Len -= 2;
+ Itm.Name += 2;
+ }
+
+ // Allocate more storage for the string list
+ if (CurSize + Len + 2 >= MaxSize || Data == 0)
+ {
+ if (MaxSize == 0)
+ MaxSize = 512*1024/2;
+ char *NewData = (char *)realloc(Data,MaxSize*2);
+ if (NewData == 0)
+ return _error->Error(_("realloc - Failed to allocate memory"));
+ Data = NewData;
+ MaxSize *= 2;
+ }
+
+ strcpy(Data+CurSize,Itm.Name);
+ CurSize += Len + 1;
+ return true;
+}
+ /*}}}*/
+// ContentsExtract::TakeContents - Load the contents data /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ContentsExtract::TakeContents(const void *NewData,unsigned long long Length)
+{
+ if (Length == 0)
+ {
+ CurSize = 0;
+ return true;
+ }
+
+ // Allocate more storage for the string list
+ if (Length + 2 >= MaxSize || Data == 0)
+ {
+ if (MaxSize == 0)
+ MaxSize = 512*1024/2;
+ while (MaxSize*2 <= Length)
+ MaxSize *= 2;
+
+ char *NewData = (char *)realloc(Data,MaxSize*2);
+ if (NewData == 0)
+ return _error->Error(_("realloc - Failed to allocate memory"));
+ Data = NewData;
+ MaxSize *= 2;
+ }
+ memcpy(Data,NewData,Length);
+ CurSize = Length;
+
+ return Data[CurSize-1] == 0;
+}
+ /*}}}*/
+// ContentsExtract::Add - Read the contents data into the sorter /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void ContentsExtract::Add(GenContents &Contents,std::string const &Package)
+{
+ const char *Start = Data;
+ char *Pkg = Contents.Mystrdup(Package.c_str());
+ for (const char *I = Data; I < Data + CurSize; I++)
+ {
+ if (*I == 0)
+ {
+ Contents.Add(Start,Pkg);
+ Start = ++I;
+ }
+ }
+}
+ /*}}}*/
diff --git a/ftparchive/contents.h b/ftparchive/contents.h
new file mode 100644
index 0000000..7460705
--- /dev/null
+++ b/ftparchive/contents.h
@@ -0,0 +1,92 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ contents - Contents of archive things.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef CONTENTS_H
+#define CONTENTS_H
+
+#include <apt-pkg/dirstream.h>
+
+#include <string>
+#include <stddef.h>
+#include <stdio.h>
+
+class debDebFile;
+class FileFd;
+
+class GenContents
+{
+ struct Node
+ {
+ // Binary Tree links
+ Node *BTreeLeft;
+ Node *BTreeRight;
+ Node *DirDown;
+ Node *Dups;
+ const char *Path;
+ const char *Package;
+
+ void *operator new(size_t Amount,GenContents *Owner);
+ void operator delete(void *) {};
+
+ Node() : BTreeLeft(0), BTreeRight(0), DirDown(0), Dups(0),
+ Path(0), Package(0) {};
+ };
+ friend struct Node;
+
+ struct BigBlock
+ {
+ void *Block;
+ BigBlock *Next;
+ };
+
+ Node Root;
+
+ // Big block allocation pools
+ BigBlock *BlockList;
+ char *StrPool;
+ unsigned long StrLeft;
+ Node *NodePool;
+ unsigned long NodeLeft;
+
+ Node *Grab(Node *Top,const char *Name,const char *Package);
+ void WriteSpace(std::string &out, size_t Current, size_t Target);
+ void DoPrint(FileFd &Out,Node *Top, char *Buf);
+
+ public:
+
+ char *Mystrdup(const char *From);
+ void Add(const char *Dir,const char *Package);
+ void Print(FileFd &Out);
+
+ GenContents() : BlockList(0), StrPool(0), StrLeft(0),
+ NodePool(0), NodeLeft(0) {};
+ ~GenContents();
+};
+
+class ContentsExtract : public pkgDirStream
+{
+ public:
+
+ // The Data Block
+ char *Data;
+ unsigned long long MaxSize;
+ unsigned long long CurSize;
+ void AddData(const char *Text);
+
+ bool Read(debDebFile &Deb);
+
+ virtual bool DoItem(Item &Itm,int &Fd) APT_OVERRIDE;
+ void Reset() {CurSize = 0;};
+ bool TakeContents(const void *Data,unsigned long long Length);
+ void Add(GenContents &Contents,std::string const &Package);
+
+ ContentsExtract();
+ virtual ~ContentsExtract();
+};
+
+#endif
diff --git a/ftparchive/multicompress.cc b/ftparchive/multicompress.cc
new file mode 100644
index 0000000..cdaa7a6
--- /dev/null
+++ b/ftparchive/multicompress.cc
@@ -0,0 +1,360 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ MultiCompressor
+
+ This class is very complicated in order to optimize for the common
+ case of its use, writing a large set of compressed files that are
+ different from the old set. It spawns off compressors in parallel
+ to maximize compression throughput and has a separate task managing
+ the data going into the compressors.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/aptconfiguration.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/strutl.h>
+
+#include <ctype.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <algorithm>
+#include <vector>
+
+#include "multicompress.h"
+#include <apti18n.h>
+ /*}}}*/
+
+using namespace std;
+
+static std::vector<APT::Configuration::Compressor>::const_iterator findMatchingCompressor(std::string::const_iterator &I,
+ std::string::const_iterator const &End, std::vector<APT::Configuration::Compressor> const &Compressors)
+{
+ // Grab a word (aka: a compressor name)
+ for (; I != End && isspace(*I); ++I);
+ string::const_iterator Start = I;
+ for (; I != End && !isspace(*I); ++I);
+
+ auto const Comp = std::find_if(Compressors.begin(), Compressors.end(),
+ [&](APT::Configuration::Compressor const &C) { return stringcmp(Start, I, C.Name.c_str()) == 0;
+ });
+ if (Comp == Compressors.end())
+ _error->Warning(_("Unknown compression algorithm '%s'"),string(Start,I).c_str());
+ return Comp;
+}
+
+// MultiCompress::MultiCompress - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Setup the file outputs, compression modes and fork the writer child */
+MultiCompress::MultiCompress(string const &Output, string const &Compress,
+ mode_t const &Permissions, bool const &Write) : Outputter{-1}, Permissions(Permissions)
+{
+ Outputs = 0;
+ UpdateMTime = 0;
+
+ auto const Compressors = APT::Configuration::getCompressors();
+ // Parse the compression string, a space separated lists of compression types
+ for (auto I = Compress.cbegin(); I != Compress.cend();)
+ {
+ auto const Comp = findMatchingCompressor(I, Compress.cend(), Compressors);
+ if (Comp == Compressors.end())
+ continue;
+
+ // Create and link in a new output
+ Files *NewOut = new Files;
+ NewOut->Next = Outputs;
+ Outputs = NewOut;
+ NewOut->CompressProg = *Comp;
+ NewOut->Output = Output + Comp->Extension;
+
+ struct stat St;
+ if (stat(NewOut->Output.c_str(),&St) == 0)
+ NewOut->OldMTime = St.st_mtime;
+ else
+ NewOut->OldMTime = 0;
+ }
+
+ if (Write == false)
+ return;
+
+ /* Open all the temp files now so we can report any errors. File is
+ made unreable to prevent people from touching it during creating. */
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ I->TmpFile.Open(I->Output + ".new", FileFd::WriteOnly | FileFd::Create | FileFd::Empty, FileFd::Extension, 0600);
+ if (_error->PendingError() == true)
+ return;
+
+ if (Outputs == 0)
+ {
+ _error->Error(_("Compressed output %s needs a compression set"),Output.c_str());
+ return;
+ }
+
+ Start();
+}
+ /*}}}*/
+// MultiCompress::~MultiCompress - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Just erase the file linked list. */
+MultiCompress::~MultiCompress()
+{
+ Die();
+
+ for (; Outputs != 0;)
+ {
+ Files *Tmp = Outputs->Next;
+ delete Outputs;
+ Outputs = Tmp;
+ }
+}
+ /*}}}*/
+// MultiCompress::GetStat - Get stat information for compressed files /*{{{*/
+// ---------------------------------------------------------------------
+/* This checks each compressed file to make sure it exists and returns
+ stat information for a random file from the collection. False means
+ one or more of the files is missing. */
+bool MultiCompress::GetStat(string const &Output,string const &Compress,struct stat &St)
+{
+ auto const Compressors = APT::Configuration::getCompressors();
+
+ // Parse the compression string, a space separated lists of compression types
+ bool DidStat = false;
+ for (auto I = Compress.cbegin(); I != Compress.cend();)
+ {
+ auto const Comp = findMatchingCompressor(I, Compress.cend(), Compressors);
+ if (Comp == Compressors.end())
+ continue;
+
+ string Name = Output + Comp->Extension;
+ if (stat(Name.c_str(),&St) != 0)
+ return false;
+ DidStat = true;
+ }
+ return DidStat;
+}
+ /*}}}*/
+// MultiCompress::Start - Start up the writer child /*{{{*/
+// ---------------------------------------------------------------------
+/* Fork a child and setup the communication pipe. */
+bool MultiCompress::Start()
+{
+ // Create a data pipe
+ int Pipe[2] = {-1,-1};
+ if (pipe(Pipe) != 0)
+ return _error->Errno("pipe",_("Failed to create IPC pipe to subprocess"));
+ for (int I = 0; I != 2; I++)
+ SetCloseExec(Pipe[I],true);
+
+ // The child..
+ Outputter = fork();
+ if (Outputter == 0)
+ {
+ close(Pipe[1]);
+ Child(Pipe[0]);
+ if (_error->PendingError() == true)
+ {
+ _error->DumpErrors();
+ _exit(100);
+ }
+ _exit(0);
+ };
+
+ close(Pipe[0]);
+ if (Input.OpenDescriptor(Pipe[1], FileFd::WriteOnly, true) == false)
+ return false;
+
+ if (Outputter == -1)
+ return _error->Errno("fork",_("Failed to fork"));
+ return true;
+}
+ /*}}}*/
+// MultiCompress::Die - Clean up the writer /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool MultiCompress::Die()
+{
+ if (Input.IsOpen() == false)
+ return true;
+
+ Input.Close();
+ bool Res = ExecWait(Outputter,_("Compress child"),false);
+ Outputter = -1;
+ return Res;
+}
+ /*}}}*/
+// MultiCompress::Finalize - Finish up writing /*{{{*/
+// ---------------------------------------------------------------------
+/* This is only necessary for statistics reporting. */
+bool MultiCompress::Finalize(unsigned long long &OutSize)
+{
+ OutSize = 0;
+ if (Input.IsOpen() == false || Die() == false)
+ return false;
+
+ time_t Now;
+ time(&Now);
+
+ // Check the mtimes to see if the files were replaced.
+ bool Changed = false;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ struct stat St;
+ if (stat(I->Output.c_str(),&St) != 0)
+ return _error->Error(_("Internal error, failed to create %s"),
+ I->Output.c_str());
+
+ if (I->OldMTime != St.st_mtime)
+ Changed = true;
+ else
+ {
+ // Update the mtime if necessary
+ if (UpdateMTime > 0 &&
+ (Now - St.st_mtime > (signed)UpdateMTime || St.st_mtime > Now))
+ {
+ utimes(I->Output.c_str(), NULL);
+ Changed = true;
+ }
+ }
+
+ // Force the file permissions
+ if (St.st_mode != Permissions)
+ chmod(I->Output.c_str(),Permissions);
+
+ OutSize += St.st_size;
+ }
+
+ if (Changed == false)
+ OutSize = 0;
+
+ return true;
+}
+ /*}}}*/
+// MultiCompress::OpenOld - Open an old file /*{{{*/
+// ---------------------------------------------------------------------
+/* This opens one of the original output files, possibly decompressing it. */
+bool MultiCompress::OpenOld(FileFd &Fd)
+{
+ Files *Best = Outputs;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ if (Best->CompressProg.Cost > I->CompressProg.Cost)
+ Best = I;
+
+ // Open the file
+ return Fd.Open(Best->Output, FileFd::ReadOnly, FileFd::Extension);
+}
+ /*}}}*/
+// MultiCompress::Child - The writer child /*{{{*/
+// ---------------------------------------------------------------------
+/* The child process forks a bunch of compression children and takes
+ input on FD and passes it to all the compressor child. On the way it
+ computes the MD5 of the raw data. After this the raw data in the
+ original files is compared to see if this data is new. If the data
+ is new then the temp files are renamed, otherwise they are erased. */
+bool MultiCompress::Child(int const &FD)
+{
+ /* Okay, now we just feed data from FD to all the other FDs. Also
+ stash a hash of the data to use later. */
+ SetNonBlock(FD,false);
+ unsigned char Buffer[32*1024];
+ unsigned long long FileSize = 0;
+ Hashes MD5(Hashes::MD5SUM);
+ while (1)
+ {
+ WaitFd(FD,false);
+ int Res = read(FD,Buffer,sizeof(Buffer));
+ if (Res == 0)
+ break;
+ if (Res < 0)
+ continue;
+
+ MD5.Add(Buffer,Res);
+ FileSize += Res;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (I->TmpFile.Write(Buffer, Res) == false)
+ {
+ _error->Errno("write",_("IO to subprocess/file failed"));
+ break;
+ }
+ }
+ }
+
+ if (_error->PendingError() == true)
+ return false;
+
+ /* Now we have to copy the files over, or erase them if they
+ have not changed. First find the cheapest decompressor */
+ bool Missing = false;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (I->OldMTime == 0)
+ {
+ Missing = true;
+ break;
+ }
+ }
+
+ // Check the MD5 of the lowest cost entity.
+ while (Missing == false)
+ {
+ FileFd CompFd;
+ if (OpenOld(CompFd) == false)
+ {
+ _error->Discard();
+ break;
+ }
+
+ // Compute the hash
+ Hashes OldMD5(Hashes::MD5SUM);
+ unsigned long long NewFileSize = 0;
+ while (1)
+ {
+ unsigned long long Res = 0;
+ if (CompFd.Read(Buffer,sizeof(Buffer), &Res) == false)
+ return _error->Errno("read",_("Failed to read while computing MD5"));
+ if (Res == 0)
+ break;
+ NewFileSize += Res;
+ OldMD5.Add(Buffer,Res);
+ }
+ CompFd.Close();
+
+ // Check the hash
+ if (OldMD5.GetHashString(Hashes::MD5SUM) == MD5.GetHashString(Hashes::MD5SUM) &&
+ FileSize == NewFileSize)
+ {
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ I->TmpFile.Close();
+ RemoveFile("MultiCompress::Child", I->TmpFile.Name());
+ }
+ return !_error->PendingError();
+ }
+ break;
+ }
+
+ // Finalize
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ // Set the correct file modes
+ chmod(I->TmpFile.Name().c_str(),Permissions);
+
+ if (rename(I->TmpFile.Name().c_str(),I->Output.c_str()) != 0)
+ _error->Errno("rename",_("Failed to rename %s to %s"),
+ I->TmpFile.Name().c_str(),I->Output.c_str());
+ I->TmpFile.Close();
+ }
+
+ return !_error->PendingError();
+}
+ /*}}}*/
+
diff --git a/ftparchive/multicompress.h b/ftparchive/multicompress.h
new file mode 100644
index 0000000..c237269
--- /dev/null
+++ b/ftparchive/multicompress.h
@@ -0,0 +1,62 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ MultiCompressor
+
+ Multiple output class. Takes a single FILE* and writes it simultaneously
+ to many compressed files. Then checks if the resulting output is
+ different from any previous output and overwrites the old files. Care is
+ taken to ensure that the new files are not generally readable while they
+ are being written.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef MULTICOMPRESS_H
+#define MULTICOMPRESS_H
+
+#include <apt-pkg/aptconfiguration.h>
+#include <apt-pkg/fileutl.h>
+
+#include <string>
+#include <stdio.h>
+#include <sys/types.h>
+#include <time.h>
+
+class MultiCompress
+{
+ // An output file
+ struct Files
+ {
+ std::string Output;
+ APT::Configuration::Compressor CompressProg;
+ Files *Next;
+ FileFd TmpFile;
+ pid_t CompressProc;
+ time_t OldMTime;
+ };
+
+ Files *Outputs;
+ pid_t Outputter;
+ mode_t Permissions;
+
+ bool Child(int const &Fd);
+ bool Start();
+ bool Die();
+
+ public:
+
+ // The FD to write to for compression.
+ FileFd Input;
+ unsigned long UpdateMTime;
+
+ bool Finalize(unsigned long long &OutSize);
+ bool OpenOld(FileFd &Fd);
+ static bool GetStat(std::string const &Output,std::string const &Compress,struct stat &St);
+
+ MultiCompress(std::string const &Output,std::string const &Compress,
+ mode_t const &Permissions, bool const &Write = true);
+ ~MultiCompress();
+};
+
+#endif
diff --git a/ftparchive/override.cc b/ftparchive/override.cc
new file mode 100644
index 0000000..16fefec
--- /dev/null
+++ b/ftparchive/override.cc
@@ -0,0 +1,288 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ Override
+
+ Store the override file.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/error.h>
+#include <apt-pkg/strutl.h>
+
+#include <utility>
+#include <ctype.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "override.h"
+
+#include <apti18n.h>
+ /*}}}*/
+
+// Override::ReadOverride - Read the override file /*{{{*/
+// ---------------------------------------------------------------------
+/* This parses the override file and reads it into the map */
+bool Override::ReadOverride(string const &File,bool const &Source)
+{
+ if (File.empty() == true)
+ return true;
+
+ FILE *F = fopen(File.c_str(),"r");
+ if (F == 0)
+ return _error->Errno("fopen",_("Unable to open %s"),File.c_str());
+
+ char Line[1000];
+ unsigned long long Counter = 0;
+ while (fgets(Line,sizeof(Line),F) != 0)
+ {
+ Counter++;
+ Item Itm;
+
+ // Silence
+ for (char *I = Line; *I != 0; I++)
+ if (*I == '#')
+ *I = 0;
+
+ // Strip space leading up to the package name, skip blank lines
+ char *Pkg = Line;
+ for (; isspace(*Pkg) && *Pkg != 0;Pkg++);
+ if (*Pkg == 0)
+ continue;
+
+#define APT_FIND_NEXT_FIELD \
+ for (End++; isspace(*End) != 0 && *End != 0; ++End) \
+ /* skip spaces */ ; \
+ Start = End; \
+ for (; isspace(*End) == 0 && *End != 0; ++End) \
+ /* find end of word */ ;
+
+#define APT_WARNING_MALFORMED_LINE(FIELD) \
+ if (*End == 0) \
+ { \
+ _error->Warning(_("Malformed override %s line %llu (%s)"),File.c_str(), \
+ Counter, FIELD ); \
+ continue; \
+ } \
+ *End = 0;
+
+ // Find the package and zero..
+ char *Start;
+ char *End = Pkg;
+ for (; isspace(*End) == 0 && *End != 0; End++);
+ APT_WARNING_MALFORMED_LINE("pkgname");
+
+ APT_FIND_NEXT_FIELD;
+
+ // Find the priority
+ if (Source == false)
+ {
+ APT_WARNING_MALFORMED_LINE("priority");
+ Itm.Priority = Start;
+
+ APT_FIND_NEXT_FIELD;
+ }
+
+ // Find the Section
+ APT_WARNING_MALFORMED_LINE("section");
+ Itm.FieldOverride["Section"] = Start;
+
+ // Source override files only have the two columns
+ if (Source == true)
+ {
+ Mapping[Pkg] = Itm;
+ continue;
+ }
+
+ // Find the =>
+ for (End++; isspace(*End) != 0 && *End != 0; End++);
+ if (*End != 0)
+ {
+ Start = End;
+ for (; *End != 0 && (End[0] != '=' || End[1] != '>'); End++);
+ if (*End == 0 || strlen(End) < 4)
+ {
+ Itm.OldMaint = "*";
+ Itm.NewMaint = _strstrip(Start);
+ }
+ else
+ {
+ *End = 0;
+ Itm.OldMaint = _strstrip(Start);
+
+ End += 3;
+ Itm.NewMaint = _strstrip(End);
+ }
+ }
+
+ Mapping[Pkg] = Itm;
+ }
+
+ if (ferror(F))
+ _error->Errno("fgets",_("Failed to read the override file %s"),File.c_str());
+ fclose(F);
+ return true;
+}
+ /*}}}*/
+// Override::ReadExtraOverride - Read the extra override file /*{{{*/
+// ---------------------------------------------------------------------
+/* This parses the extra override file and reads it into the map */
+bool Override::ReadExtraOverride(string const &File,bool const &/*Source*/)
+{
+ if (File.empty() == true)
+ return true;
+
+ FILE *F = fopen(File.c_str(),"r");
+ if (F == 0)
+ return _error->Errno("fopen",_("Unable to open %s"),File.c_str());
+
+ char Line[1000];
+ unsigned long long Counter = 0;
+ while (fgets(Line,sizeof(Line),F) != 0)
+ {
+ Counter++;
+
+ // Silence
+ for (char *I = Line; *I != 0; I++)
+ if (*I == '#')
+ *I = 0;
+
+ // Strip space leading up to the package name, skip blank lines
+ char *Pkg = Line;
+ for (; isspace(*Pkg) && *Pkg != 0;Pkg++);
+ if (Pkg == 0)
+ continue;
+
+ // Find the package and zero..
+ char *End = Pkg;
+ for (; isspace(*End) == 0 && *End != 0; End++);
+ if (*End == 0)
+ {
+ _error->Warning(_("Malformed override %s line %llu #1"),File.c_str(),
+ Counter);
+ continue;
+ }
+ *End = 0;
+
+ // Find the field
+ for (End++; isspace(*End) != 0 && *End != 0; End++);
+ char *Field = End;
+ for (; isspace(*End) == 0 && *End != 0; End++);
+ if (*End == 0)
+ {
+ _error->Warning(_("Malformed override %s line %llu #2"),File.c_str(),
+ Counter);
+ continue;
+ }
+ *End = 0;
+
+ // Find the field value
+ for (End++; isspace(*End) != 0 && *End != 0; End++);
+ char *Value = End;
+ for (; *End != 0; End++);
+ for (; isspace(*(End-1)) && End > Value; End--);
+ if (End == Value)
+ {
+ _error->Warning(_("Malformed override %s line %llu #3"),File.c_str(),
+ Counter);
+ continue;
+ }
+ *End = 0;
+
+ Mapping[Pkg].FieldOverride[Field] = Value;
+ }
+
+ if (ferror(F))
+ _error->Errno("fgets",_("Failed to read the override file %s"),File.c_str());
+ fclose(F);
+ return true;
+}
+ /*}}}*/
+
+// Override::GetItem - Get a architecture specific item /*{{{*/
+// ---------------------------------------------------------------------
+/* Returns a override item for the given package and the given architecture.
+ * Treats "all" special
+ */
+Override::Item* Override::GetItem(string const &Package, string const &Architecture)
+{
+ map<string,Item>::const_iterator I = Mapping.find(Package);
+ map<string,Item>::iterator J = Mapping.find(Package + "/" + Architecture);
+
+ if (I == Mapping.end() && J == Mapping.end())
+ {
+ return 0;
+ }
+
+ Item *result = new Item;
+ if (I == Mapping.end()) *result = J->second;
+ else
+ {
+ *result = I->second;
+ if (J != Mapping.end())
+ {
+ Item *R = &J->second;
+ if (R->Priority != "") result->Priority = R->Priority;
+ if (R->OldMaint != "") result->OldMaint = R->OldMaint;
+ if (R->NewMaint != "") result->NewMaint = R->NewMaint;
+ for (map<string,string>::const_iterator foI = R->FieldOverride.begin();
+ foI != R->FieldOverride.end(); ++foI)
+ {
+ result->FieldOverride[foI->first] = foI->second;
+ }
+ }
+ }
+ return result;
+}
+
+
+// Override::Item::SwapMaint - Swap the maintainer field if necessary /*{{{*/
+// ---------------------------------------------------------------------
+/* Returns the new maintainer string after evaluating the rewriting rule. If
+ there is a rule but it does not match then the empty string is returned,
+ also if there was no rewrite rule the empty string is returned. Failed
+ indicates if there was some kind of problem while rewriting. */
+string Override::Item::SwapMaint(string const &Orig,bool &Failed)
+{
+ Failed = false;
+
+ // Degenerate case..
+ if (NewMaint.empty() == true)
+ return OldMaint;
+
+ if (OldMaint == "*")
+ return NewMaint;
+
+ /* James: ancient, eliminate it, however it is still being used in the main
+ override file. Thus it persists.*/
+#if 1
+ // Break OldMaint up into little bits on double slash boundaries.
+ string::const_iterator End = OldMaint.begin();
+ while (1)
+ {
+ string::const_iterator Start = End;
+ for (; End < OldMaint.end() &&
+ (End + 3 >= OldMaint.end() || End[0] != ' ' ||
+ End[1] != '/' || End[2] != '/'); ++End);
+ if (stringcasecmp(Start,End,Orig.begin(),Orig.end()) == 0)
+ return NewMaint;
+
+ if (End >= OldMaint.end())
+ break;
+
+ // Skip the divider and white space
+ for (; End < OldMaint.end() && (*End == '/' || *End == ' '); ++End);
+ }
+#else
+ if (stringcasecmp(OldMaint.begin(),OldMaint.end(),Orig.begin(),Orig.end()) == 0)
+ return NewMaint;
+#endif
+
+ Failed = true;
+ return string();
+}
+ /*}}}*/
diff --git a/ftparchive/override.h b/ftparchive/override.h
new file mode 100644
index 0000000..af62a04
--- /dev/null
+++ b/ftparchive/override.h
@@ -0,0 +1,48 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ Override
+
+ Store the override file.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef OVERRIDE_H
+#define OVERRIDE_H
+
+#include <map>
+#include <string>
+
+using std::string;
+using std::map;
+
+class Override
+{
+ public:
+
+ struct Item
+ {
+ string Priority;
+ string OldMaint;
+ string NewMaint;
+
+ map<string,string> FieldOverride;
+ string SwapMaint(string const &Orig,bool &Failed);
+ ~Item() {};
+ };
+
+ map<string,Item> Mapping;
+
+ inline Item *GetItem(string const &Package)
+ {
+ return GetItem(Package, "");
+ }
+ Item *GetItem(string const &Package, string const &Architecture);
+
+ bool ReadOverride(string const &File,bool const &Source = false);
+ bool ReadExtraOverride(string const &File,bool const &Source = false);
+};
+
+#endif
+
diff --git a/ftparchive/sources.cc b/ftparchive/sources.cc
new file mode 100644
index 0000000..a592243
--- /dev/null
+++ b/ftparchive/sources.cc
@@ -0,0 +1,63 @@
+#include <config.h>
+
+#include <sstream>
+#include <string>
+
+// for memcpy
+#include <cstring>
+
+#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/gpgv.h>
+
+#include "sources.h"
+
+bool DscExtract::TakeDsc(const void *newData, unsigned long long newSize)
+{
+ if (newSize == 0)
+ {
+ // adding two newlines 'off record' for pkgTagSection.Scan() calls
+ Data = "\n\n";
+ Length = 0;
+ return true;
+ }
+
+ Data = std::string((const char*)newData, newSize);
+ // adding two newlines 'off record' for pkgTagSection.Scan() calls
+ Data.append("\n\n");
+ Length = newSize;
+
+ return true;
+}
+
+bool DscExtract::Read(std::string FileName)
+{
+ Data.clear();
+ Length = 0;
+
+ FileFd F;
+ if (OpenMaybeClearSignedFile(FileName, F) == false)
+ return false;
+
+ IsClearSigned = (FileName != F.Name());
+
+ std::ostringstream data;
+ char buffer[1024];
+ do {
+ unsigned long long actual = 0;
+ if (F.Read(buffer, sizeof(buffer)-1, &actual) == false)
+ return _error->Errno("read", "Failed to read dsc file %s", FileName.c_str());
+ if (actual == 0)
+ break;
+ Length += actual;
+ buffer[actual] = '\0';
+ data << buffer;
+ } while(true);
+
+ // adding two newlines 'off record' for pkgTagSection.Scan() calls
+ data << "\n\n";
+ Data = data.str();
+ return true;
+}
+
+
diff --git a/ftparchive/sources.h b/ftparchive/sources.h
new file mode 100644
index 0000000..a125ec6
--- /dev/null
+++ b/ftparchive/sources.h
@@ -0,0 +1,24 @@
+#ifndef SOURCES_H
+#define SOURCES_H
+
+#include <apt-pkg/tagfile.h>
+
+#include <string>
+
+class DscExtract
+{
+ public:
+ std::string Data;
+ pkgTagSection Section;
+ unsigned long long Length;
+ bool IsClearSigned;
+
+ bool TakeDsc(const void *Data, unsigned long long Size);
+ bool Read(std::string FileName);
+
+ DscExtract() : Length(0), IsClearSigned(false) {};
+ ~DscExtract() {};
+};
+
+
+#endif
diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc
new file mode 100644
index 0000000..ae427b1
--- /dev/null
+++ b/ftparchive/writer.cc
@@ -0,0 +1,1181 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ Writer
+
+ The file writer classes. These write various types of output, sources,
+ packages and contents.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/deblistparser.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/gpgv.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/tagfile-keys.h>
+#include <apt-pkg/tagfile.h>
+
+#include <algorithm>
+#include <ctime>
+#include <iomanip>
+#include <iostream>
+#include <memory>
+#include <sstream>
+#include <utility>
+#include <ctype.h>
+#include <fnmatch.h>
+#include <ftw.h>
+#include <locale.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "apt-ftparchive.h"
+#include "byhash.h"
+#include "cachedb.h"
+#include "multicompress.h"
+#include "writer.h"
+
+#include <apti18n.h>
+ /*}}}*/
+using namespace std;
+FTWScanner *FTWScanner::Owner;
+
+// ConfigToDoHashes - which hashes to generate /*{{{*/
+static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
+{
+ if (_config->FindB(Conf, (DoHashes & Flag) == Flag) == true)
+ DoHashes |= Flag;
+ else
+ DoHashes &= ~Flag;
+}
+static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
+{
+ SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
+ SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
+ SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
+ SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
+}
+ /*}}}*/
+
+// FTWScanner::FTWScanner - Constructor /*{{{*/
+FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch, bool const IncludeArchAll)
+ : Arch(Arch), IncludeArchAll(IncludeArchAll), DoHashes(~0)
+{
+ if (GivenOutput == NULL)
+ {
+ Output = new FileFd;
+ OwnsOutput = true;
+ Output->OpenDescriptor(STDOUT_FILENO, FileFd::WriteOnly, false);
+ }
+ else
+ {
+ Output = GivenOutput;
+ OwnsOutput = false;
+ }
+ ErrorPrinted = false;
+ NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
+ ConfigToDoHashes(DoHashes, "APT::FTPArchive");
+}
+ /*}}}*/
+FTWScanner::~FTWScanner()
+{
+ if (Output != NULL && OwnsOutput)
+ delete Output;
+}
+// FTWScanner::Scanner - FTW Scanner /*{{{*/
+// ---------------------------------------------------------------------
+/* This is the FTW scanner, it processes each directory element in the
+ directory tree. */
+int FTWScanner::ScannerFTW(const char *File,const struct stat * /*sb*/,int Flag)
+{
+ if (Flag == FTW_DNR)
+ {
+ Owner->NewLine(1);
+ ioprintf(c1out, _("W: Unable to read directory %s\n"), File);
+ }
+ if (Flag == FTW_NS)
+ {
+ Owner->NewLine(1);
+ ioprintf(c1out, _("W: Unable to stat %s\n"), File);
+ }
+ if (Flag != FTW_F)
+ return 0;
+
+ return ScannerFile(File, true);
+}
+ /*}}}*/
+static bool FileMatchesPatterns(char const *const File, std::vector<std::string> const &Patterns) /*{{{*/
+{
+ const char *LastComponent = strrchr(File, '/');
+ if (LastComponent == nullptr)
+ LastComponent = File;
+ else
+ ++LastComponent;
+
+ return std::any_of(Patterns.cbegin(), Patterns.cend(), [&](std::string const &pattern) {
+ return fnmatch(pattern.c_str(), LastComponent, 0) == 0;
+ });
+}
+ /*}}}*/
+int FTWScanner::ScannerFile(const char *const File, bool const ReadLink) /*{{{*/
+{
+ if (FileMatchesPatterns(File, Owner->Patterns) == false)
+ return 0;
+
+ Owner->FilesToProcess.emplace_back(File, ReadLink);
+ return 0;
+}
+ /*}}}*/
+int FTWScanner::ProcessFile(const char *const File, bool const ReadLink) /*{{{*/
+{
+ /* Process it. If the file is a link then resolve it into an absolute
+ name.. This works best if the directory components the scanner are
+ given are not links themselves. */
+ char Jnk[2];
+ char *RealPath = NULL;
+ Owner->OriginalPath = File;
+ if (ReadLink &&
+ readlink(File,Jnk,sizeof(Jnk)) != -1 &&
+ (RealPath = realpath(File,NULL)) != 0)
+ {
+ Owner->DoPackage(RealPath);
+ free(RealPath);
+ }
+ else
+ Owner->DoPackage(File);
+
+ if (_error->empty() == false)
+ {
+ // Print any errors or warnings found
+ string Err;
+ bool SeenPath = false;
+ while (_error->empty() == false)
+ {
+ Owner->NewLine(1);
+
+ bool const Type = _error->PopMessage(Err);
+ if (Type == true)
+ cerr << _("E: ") << Err << endl;
+ else
+ cerr << _("W: ") << Err << endl;
+
+ if (Err.find(File) != string::npos)
+ SeenPath = true;
+ }
+
+ if (SeenPath == false)
+ cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl;
+ return 0;
+ }
+
+ return 0;
+}
+ /*}}}*/
+// FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::RecursiveScan(string const &Dir)
+{
+ /* If noprefix is set then jam the scan root in, so we don't generate
+ link followed paths out of control */
+ if (InternalPrefix.empty() == true)
+ {
+ char *RealPath = nullptr;
+ if ((RealPath = realpath(Dir.c_str(), nullptr)) == 0)
+ return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
+ InternalPrefix = RealPath;
+ free(RealPath);
+ }
+
+ // Do recursive directory searching
+ Owner = this;
+ int const Res = ftw(Dir.c_str(),ScannerFTW,30);
+
+ // Error treewalking?
+ if (Res != 0)
+ {
+ if (_error->PendingError() == false)
+ _error->Errno("ftw",_("Tree walking failed"));
+ return false;
+ }
+
+ using PairType = decltype(*FilesToProcess.cbegin());
+ std::sort(FilesToProcess.begin(), FilesToProcess.end(), [](PairType a, PairType b) {
+ return a.first < b.first;
+ });
+ if (not std::all_of(FilesToProcess.cbegin(), FilesToProcess.cend(), [](auto &&it) { return ProcessFile(it.first.c_str(), it.second) == 0; }))
+ return false;
+ FilesToProcess.clear();
+ return true;
+}
+ /*}}}*/
+// FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
+// ---------------------------------------------------------------------
+/* This is an alternative to using FTW to locate files, it reads the list
+ of files from another file. */
+bool FTWScanner::LoadFileList(string const &Dir, string const &File)
+{
+ /* If noprefix is set then jam the scan root in, so we don't generate
+ link followed paths out of control */
+ if (InternalPrefix.empty() == true)
+ {
+ char *RealPath = nullptr;
+ if ((RealPath = realpath(Dir.c_str(), nullptr)) == 0)
+ return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
+ InternalPrefix = RealPath;
+ free(RealPath);
+ }
+
+ Owner = this;
+ FILE *List = fopen(File.c_str(),"r");
+ if (List == 0)
+ return _error->Errno("fopen",_("Failed to open %s"),File.c_str());
+
+ /* We are a tad tricky here.. We prefix the buffer with the directory
+ name, that way if we need a full path with just use line.. Sneaky and
+ fully evil. */
+ char Line[1000];
+ char *FileStart;
+ if (Dir.empty() == true || Dir.end()[-1] != '/')
+ FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str());
+ else
+ FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str());
+ while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0)
+ {
+ char *FileName = _strstrip(FileStart);
+ if (FileName[0] == 0)
+ continue;
+
+ if (FileName[0] != '/')
+ {
+ if (FileName != FileStart)
+ memmove(FileStart,FileName,strlen(FileStart));
+ FileName = Line;
+ }
+
+#if 0
+ struct stat St;
+ int Flag = FTW_F;
+ if (stat(FileName,&St) != 0)
+ Flag = FTW_NS;
+#endif
+ if (FileMatchesPatterns(FileName, Patterns) == false)
+ continue;
+
+ if (ProcessFile(FileName, false) != 0)
+ break;
+ }
+
+ fclose(List);
+ return true;
+}
+ /*}}}*/
+// FTWScanner::Delink - Delink symlinks /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
+ unsigned long long &DeLinkBytes,
+ unsigned long long const &FileSize)
+{
+ // See if this isn't an internally prefix'd file name.
+ if (InternalPrefix.empty() == false &&
+ InternalPrefix.length() < FileName.length() &&
+ stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(),
+ InternalPrefix.begin(),InternalPrefix.end()) != 0)
+ {
+ if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit)
+ {
+ // Tidy up the display
+ if (DeLinkBytes == 0)
+ cout << endl;
+
+ NewLine(1);
+ ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()),
+ SizeToStr(FileSize).c_str());
+ c1out << flush;
+
+ if (NoLinkAct == false)
+ {
+ char OldLink[400];
+ if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1)
+ _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
+ else
+ {
+ if (RemoveFile("FTWScanner::Delink", OriginalPath))
+ {
+ if (link(FileName.c_str(),OriginalPath) != 0)
+ {
+ // Panic! Restore the symlink
+ if (symlink(OldLink,OriginalPath) != 0)
+ _error->Errno("symlink", "failed to restore symlink");
+ return _error->Errno("link",_("*** Failed to link %s to %s"),
+ FileName.c_str(),
+ OriginalPath);
+ }
+ }
+ }
+ }
+
+ DeLinkBytes += FileSize;
+ if (DeLinkBytes/1024 >= DeLinkLimit)
+ ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str());
+ }
+
+ FileName = OriginalPath;
+ }
+
+ return true;
+}
+ /*}}}*/
+// FTWScanner::SetExts - Set extensions to support /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::SetExts(string const &Vals)
+{
+ ClearPatterns();
+ string::size_type Start = 0;
+ while (Start <= Vals.length()-1)
+ {
+ string::size_type const Space = Vals.find(' ',Start);
+ string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
+ if ( Arch.empty() == false )
+ {
+ AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
+ if (IncludeArchAll == true && Arch != "all")
+ AddPattern(string("*_all") + Vals.substr(Start, Length));
+ }
+ else
+ AddPattern(string("*") + Vals.substr(Start, Length));
+
+ Start += Length + 1;
+ }
+
+ return true;
+}
+ /*}}}*/
+
+// PackagesWriter::PackagesWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+PackagesWriter::PackagesWriter(FileFd * const GivenOutput, TranslationWriter * const transWriter,
+ string const &DB,string const &Overrides,string const &ExtOverrides,
+ string const &Arch, bool const IncludeArchAll) :
+ FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats), TransWriter(transWriter)
+{
+ SetExts(".deb .ddeb .udeb");
+ DeLinkLimit = 0;
+
+ // Process the command line options
+ ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
+ DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
+ DoContents = _config->FindB("APT::FTPArchive::Contents",true);
+ NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
+ LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
+
+ if (Db.Loaded() == false)
+ DoContents = false;
+
+ // Read the override file
+ if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
+ return;
+ else
+ NoOverride = true;
+
+ if (ExtOverrides.empty() == false)
+ Over.ReadExtraOverride(ExtOverrides);
+
+ _error->DumpErrors();
+}
+ /*}}}*/
+// PackagesWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* This method takes a package and gets its control information and
+ MD5, SHA1 and SHA256 then writes out a control record with the proper fields
+ rewritten and the path/size/hash appended. */
+bool PackagesWriter::DoPackage(string FileName)
+{
+ // Pull all the data we need form the DB
+ if (Db.GetFileInfo(FileName,
+ true, /* DoControl */
+ DoContents,
+ true, /* GenContentsOnly */
+ false, /* DoSource */
+ DoHashes, DoAlwaysStat) == false)
+ {
+ return false;
+ }
+
+ unsigned long long FileSize = Db.GetFileSize();
+ if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
+ return false;
+
+ // Lookup the override information
+ pkgTagSection &Tags = Db.Control.Section;
+ auto const Package = Tags.Find(pkgTagSection::Key::Package).to_string();
+ string Architecture;
+ // if we generate a Packages file for a given arch, we use it to
+ // look for overrides. if we run in "simple" mode without the
+ // "Architectures" variable in the config we use the architecture value
+ // from the deb file
+ if(Arch != "")
+ Architecture = Arch;
+ else
+ Architecture = Tags.Find(pkgTagSection::Key::Architecture).to_string();
+ unique_ptr<Override::Item> OverItem(Over.GetItem(Package, Architecture));
+
+ if (Package.empty() == true)
+ return _error->Error(_("Archive had no package field"));
+
+ // If we need to do any rewriting of the header do it now..
+ if (OverItem.get() == 0)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str());
+ }
+
+ OverItem = unique_ptr<Override::Item>(new Override::Item);
+ OverItem->FieldOverride["Section"] = Tags.Find(pkgTagSection::Key::Section).to_string();
+ OverItem->Priority = Tags.Find(pkgTagSection::Key::Priority).to_string();
+ }
+
+ // Strip the DirStrip prefix from the FileName and add the PathPrefix
+ string NewFileName;
+ if (DirStrip.empty() == false &&
+ FileName.length() > DirStrip.length() &&
+ stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
+ DirStrip.begin(),DirStrip.end()) == 0)
+ NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
+ else
+ NewFileName = FileName;
+ if (PathPrefix.empty() == false)
+ NewFileName = flCombine(PathPrefix,NewFileName);
+
+ /* Configuration says we don't want to include the long Description
+ in the package file - instead we want to ship a separated file */
+ string desc;
+ if (LongDescription == false) {
+ desc = Tags.Find(pkgTagSection::Key::Description).to_string().append("\n");
+ OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str();
+ }
+
+ // This lists all the changes to the fields we are going to make.
+ std::vector<pkgTagSection::Tag> Changes;
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Size", std::to_string(FileSize)));
+
+ for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
+ {
+ if (hs->HashType() == "MD5Sum")
+ Changes.push_back(pkgTagSection::Tag::Rewrite("MD5sum", hs->HashValue()));
+ else if (hs->HashType() == "Checksum-FileSize")
+ continue;
+ else
+ Changes.push_back(pkgTagSection::Tag::Rewrite(hs->HashType(), hs->HashValue()));
+ }
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Filename", NewFileName));
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", OverItem->Priority));
+ Changes.push_back(pkgTagSection::Tag::Remove("Status"));
+
+ string DescriptionMd5;
+ if (LongDescription == false) {
+ Hashes descmd5(Hashes::MD5SUM);
+ descmd5.Add(desc.c_str());
+ DescriptionMd5 = descmd5.GetHashString(Hashes::MD5SUM).HashValue();
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Description-md5", DescriptionMd5));
+ if (TransWriter != NULL)
+ TransWriter->DoPackage(Package, desc, DescriptionMd5);
+ }
+
+ // Rewrite the maintainer field if necessary
+ bool MaintFailed;
+ string NewMaint = OverItem->SwapMaint(Tags.Find(pkgTagSection::Key::Maintainer).to_string(), MaintFailed);
+ if (MaintFailed == true)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ ioprintf(c1out, _(" %s maintainer is %s not %s\n"),
+ Package.c_str(), Tags.Find(pkgTagSection::Key::Maintainer).to_string().c_str(), OverItem->OldMaint.c_str());
+ }
+ }
+
+ if (NewMaint.empty() == false)
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint));
+
+ for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
+ I != OverItem->FieldOverride.end(); ++I)
+ Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
+
+ // Rewrite and store the fields.
+ if (Tags.Write(*Output, TFRewritePackageOrder, Changes) == false ||
+ Output->Write("\n", 1) == false)
+ return false;
+
+ return Db.Finish();
+}
+ /*}}}*/
+PackagesWriter::~PackagesWriter() /*{{{*/
+{
+}
+ /*}}}*/
+
+// TranslationWriter::TranslationWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Create a Translation-Master file for this Packages file */
+TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
+ mode_t const &Permissions) : Comp(NULL), Output(NULL)
+{
+ if (File.empty() == true)
+ return;
+
+ Comp = new MultiCompress(File, TransCompress, Permissions);
+ Output = &Comp->Input;
+}
+ /*}}}*/
+// TranslationWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* Create a Translation-Master file for this Packages file */
+bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
+ string const &MD5)
+{
+ if (Output == NULL)
+ return true;
+
+ // Different archs can include different versions and therefore
+ // different descriptions - so we need to check for both name and md5.
+ string const Record = Pkg + ":" + MD5;
+
+ if (Included.find(Record) != Included.end())
+ return true;
+
+ std::string out;
+ strprintf(out, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
+ Pkg.c_str(), MD5.c_str(), Desc.c_str());
+ Output->Write(out.c_str(), out.length());
+
+ Included.insert(Record);
+ return true;
+}
+ /*}}}*/
+// TranslationWriter::~TranslationWriter - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+TranslationWriter::~TranslationWriter()
+{
+ if (Comp != NULL)
+ delete Comp;
+}
+ /*}}}*/
+
+// SourcesWriter::SourcesWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+SourcesWriter::SourcesWriter(FileFd * const GivenOutput, string const &DB, string const &BOverrides,string const &SOverrides,
+ string const &ExtOverrides) :
+ FTWScanner(GivenOutput), Db(DB), Stats(Db.Stats)
+{
+ AddPattern("*.dsc");
+ DeLinkLimit = 0;
+ Buffer = 0;
+ BufSize = 0;
+
+ // Process the command line options
+ ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
+ NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
+ DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
+
+ // Read the override file
+ if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
+ return;
+ else
+ NoOverride = true;
+
+ // WTF?? The logic above: if we can't read binary overrides, don't even try
+ // reading source overrides. if we can read binary overrides, then say there
+ // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28
+
+ if (ExtOverrides.empty() == false)
+ SOver.ReadExtraOverride(ExtOverrides);
+
+ if (SOverrides.empty() == false && FileExists(SOverrides) == true)
+ SOver.ReadOverride(SOverrides,true);
+}
+ /*}}}*/
+// SourcesWriter::DoPackage - Process a single package /*{{{*/
+static std::string getDscHash(unsigned int const DoHashes,
+ Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, pkgTagSection::Key const FieldKey,
+ HashString const * const Hash, unsigned long long Size, std::string const &FileName)
+{
+ if ((DoHashes & DoIt) != DoIt || not Tags.Exists(FieldKey) || Hash == nullptr)
+ return "";
+ std::ostringstream out;
+ out << "\n " << Hash->HashValue() << " " << std::to_string(Size) << " " << FileName
+ << "\n " << Tags.Find(FieldKey).to_string();
+ return out.str();
+}
+bool SourcesWriter::DoPackage(string FileName)
+{
+ // Pull all the data we need form the DB
+ if (Db.GetFileInfo(FileName,
+ false, /* DoControl */
+ false, /* DoContents */
+ false, /* GenContentsOnly */
+ true, /* DoSource */
+ DoHashes, DoAlwaysStat) == false)
+ {
+ return false;
+ }
+
+ // we need to perform a "write" here (this is what finish is doing)
+ // because the call to Db.GetFileInfo() in the loop will change
+ // the "db cursor"
+ Db.Finish();
+
+ pkgTagSection Tags;
+ if (Tags.Scan(Db.Dsc.Data.c_str(), Db.Dsc.Data.length()) == false)
+ return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
+
+ if (not Tags.Exists(pkgTagSection::Key::Source))
+ return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
+ Tags.Trim();
+
+ // Lookup the override information, finding first the best priority.
+ string BestPrio;
+ auto const Bins = Tags.Find(pkgTagSection::Key::Binary);
+ char Buffer[Bins.length() + 1];
+ unique_ptr<Override::Item> OverItem(nullptr);
+ if (Bins.empty() == false)
+ {
+ strncpy(Buffer, Bins.data(), Bins.length());
+ Buffer[Bins.length()] = '\0';
+
+ // Ignore too-long errors.
+ char *BinList[400];
+ TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0]));
+
+ // Look at all the binaries
+ unsigned char BestPrioV = pkgCache::State::Extra;
+ for (unsigned I = 0; BinList[I] != 0; I++)
+ {
+ unique_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
+ if (Itm.get() == 0)
+ continue;
+
+ unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority);
+ if (NewPrioV < BestPrioV || BestPrio.empty() == true)
+ {
+ BestPrioV = NewPrioV;
+ BestPrio = Itm->Priority;
+ }
+
+ if (OverItem.get() == 0)
+ OverItem = std::move(Itm);
+ }
+ }
+
+ // If we need to do any rewriting of the header do it now..
+ if (OverItem.get() == 0)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ ioprintf(c1out, _(" %s has no override entry\n"), Tags.Find(pkgTagSection::Key::Source).to_string().c_str());
+ }
+
+ OverItem.reset(new Override::Item);
+ }
+
+ struct stat St;
+ if (stat(FileName.c_str(), &St) != 0)
+ return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
+
+ auto const Package = Tags.Find(pkgTagSection::Key::Source).to_string();
+ unique_ptr<Override::Item> SOverItem(SOver.GetItem(Package));
+ // const unique_ptr<Override::Item> autoSOverItem(SOverItem);
+ if (SOverItem.get() == 0)
+ {
+ ioprintf(c1out, _(" %s has no source override entry\n"), Package.c_str());
+ SOverItem = unique_ptr<Override::Item>(BOver.GetItem(Package));
+ if (SOverItem.get() == 0)
+ {
+ ioprintf(c1out, _(" %s has no binary override entry either\n"), Package.c_str());
+ SOverItem = unique_ptr<Override::Item>(new Override::Item);
+ *SOverItem = *OverItem;
+ }
+ }
+
+ // Add the dsc to the files hash list
+ string const strippedName = flNotDir(FileName);
+ std::string const Files = getDscHash(DoHashes, Hashes::MD5SUM, Tags, pkgTagSection::Key::Files, Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
+ std::string ChecksumsSha1 = getDscHash(DoHashes, Hashes::SHA1SUM, Tags, pkgTagSection::Key::Checksums_Sha1, Db.HashesList.find("SHA1"), St.st_size, strippedName);
+ std::string ChecksumsSha256 = getDscHash(DoHashes, Hashes::SHA256SUM, Tags, pkgTagSection::Key::Checksums_Sha256, Db.HashesList.find("SHA256"), St.st_size, strippedName);
+ std::string ChecksumsSha512 = getDscHash(DoHashes, Hashes::SHA512SUM, Tags, pkgTagSection::Key::Checksums_Sha512, Db.HashesList.find("SHA512"), St.st_size, strippedName);
+
+ // Strip the DirStrip prefix from the FileName and add the PathPrefix
+ string NewFileName;
+ if (DirStrip.empty() == false &&
+ FileName.length() > DirStrip.length() &&
+ stringcmp(DirStrip,OriginalPath,OriginalPath + DirStrip.length()) == 0)
+ NewFileName = string(OriginalPath + DirStrip.length());
+ else
+ NewFileName = OriginalPath;
+ if (PathPrefix.empty() == false)
+ NewFileName = flCombine(PathPrefix,NewFileName);
+
+ string Directory = flNotFile(OriginalPath);
+
+ // Perform operation over all of the files
+ string ParseJnk;
+ const char *C = Files.c_str();
+ char *RealPath = NULL;
+ for (;isspace(*C); C++);
+ while (*C != 0)
+ {
+ // Parse each of the elements
+ if (ParseQuoteWord(C,ParseJnk) == false ||
+ ParseQuoteWord(C,ParseJnk) == false ||
+ ParseQuoteWord(C,ParseJnk) == false)
+ return _error->Error("Error parsing file record");
+
+ string OriginalPath = Directory + ParseJnk;
+
+ // Add missing hashes to source files
+ if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && not Tags.Exists(pkgTagSection::Key::Checksums_Sha1)) ||
+ ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && not Tags.Exists(pkgTagSection::Key::Checksums_Sha256)) ||
+ ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && not Tags.Exists(pkgTagSection::Key::Checksums_Sha512)))
+ {
+ if (Db.GetFileInfo(OriginalPath,
+ false, /* DoControl */
+ false, /* DoContents */
+ false, /* GenContentsOnly */
+ false, /* DoSource */
+ DoHashes,
+ DoAlwaysStat) == false)
+ {
+ return _error->Error("Error getting file info");
+ }
+
+ for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
+ {
+ if (hs->HashType() == "MD5Sum" || hs->HashType() == "Checksum-FileSize")
+ continue;
+ pkgTagSection::Key fieldkey;
+ std::string * out;
+ if (hs->HashType() == "SHA1")
+ {
+ fieldkey = pkgTagSection::Key::Checksums_Sha1;
+ out = &ChecksumsSha1;
+ }
+ else if (hs->HashType() == "SHA256")
+ {
+ fieldkey = pkgTagSection::Key::Checksums_Sha256;
+ out = &ChecksumsSha256;
+ }
+ else if (hs->HashType() == "SHA512")
+ {
+ fieldkey = pkgTagSection::Key::Checksums_Sha512;
+ out = &ChecksumsSha512;
+ }
+ else
+ {
+ _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
+ continue;
+ }
+ if (Tags.Exists(fieldkey))
+ continue;
+ std::ostringstream streamout;
+ streamout << "\n " << hs->HashValue() << " " << std::to_string(Db.GetFileSize()) << " " << ParseJnk;
+ out->append(streamout.str());
+ }
+
+ // write back the GetFileInfo() stats data
+ Db.Finish();
+ }
+
+ // Perform the delinking operation
+ char Jnk[2];
+
+ if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
+ (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0)
+ {
+ string RP = RealPath;
+ free(RealPath);
+ if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
+ return false;
+ }
+ }
+
+ Directory = flNotFile(NewFileName);
+ if (Directory.length() > 2)
+ Directory.erase(Directory.end()-1);
+
+ // This lists all the changes to the fields we are going to make.
+ // (5 hardcoded + checksums + maintainer + end marker)
+ std::vector<pkgTagSection::Tag> Changes;
+
+ Changes.push_back(pkgTagSection::Tag::Remove("Source"));
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Package", Package));
+ if (Files.empty() == false)
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Files", Files));
+ else
+ Changes.push_back(pkgTagSection::Tag::Remove("Files"));
+ if (ChecksumsSha1.empty() == false)
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha1", ChecksumsSha1));
+ else
+ Changes.push_back(pkgTagSection::Tag::Remove("Checksums-Sha1"));
+ if (ChecksumsSha256.empty() == false)
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha256", ChecksumsSha256));
+ else
+ Changes.push_back(pkgTagSection::Tag::Remove("Checksums-Sha256"));
+ if (ChecksumsSha512.empty() == false)
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha512", ChecksumsSha512));
+ else
+ Changes.push_back(pkgTagSection::Tag::Remove("Checksums-Sha512"));
+ if (Directory != "./")
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Directory", Directory));
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", BestPrio));
+ Changes.push_back(pkgTagSection::Tag::Remove("Status"));
+
+ // Rewrite the maintainer field if necessary
+ bool MaintFailed;
+ string NewMaint = OverItem->SwapMaint(Tags.Find(pkgTagSection::Key::Maintainer).to_string(), MaintFailed);
+ if (MaintFailed == true)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(),
+ Tags.Find(pkgTagSection::Key::Maintainer).to_string().c_str(), OverItem->OldMaint.c_str());
+ }
+ }
+ if (NewMaint.empty() == false)
+ Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint.c_str()));
+
+ for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
+ I != SOverItem->FieldOverride.end(); ++I)
+ Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
+
+ // Rewrite and store the fields.
+ if (Tags.Write(*Output, TFRewriteSourceOrder, Changes) == false ||
+ Output->Write("\n", 1) == false)
+ return false;
+
+ Stats.Packages++;
+
+ return true;
+}
+ /*}}}*/
+
+// ContentsWriter::ContentsWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB,
+ string const &Arch, bool const IncludeArchAll) :
+ FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats)
+
+{
+ SetExts(".deb");
+}
+ /*}}}*/
+// ContentsWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* If Package is the empty string the control record will be parsed to
+ determine what the package name is. */
+bool ContentsWriter::DoPackage(string FileName, string Package)
+{
+ if (!Db.GetFileInfo(FileName,
+ Package.empty(), /* DoControl */
+ true, /* DoContents */
+ false, /* GenContentsOnly */
+ false, /* DoSource */
+ 0, /* DoHashes */
+ false /* checkMtime */))
+ {
+ return false;
+ }
+
+ // Parse the package name
+ if (Package.empty() == true)
+ {
+ Package = Db.Control.Section.Find(pkgTagSection::Key::Package).to_string();
+ }
+
+ Db.Contents.Add(Gen,Package);
+
+ return Db.Finish();
+}
+ /*}}}*/
+// ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
+{
+ MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Open the package file
+ FileFd Fd;
+ if (Pkgs.OpenOld(Fd) == false)
+ return false;
+
+ pkgTagFile Tags(&Fd);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Parse.
+ pkgTagSection Section;
+ while (Tags.Step(Section) == true)
+ {
+ auto File = flCombine(Prefix, Section.Find(pkgTagSection::Key::Filename).to_string());
+ auto Package = flCombine(Section.Find(pkgTagSection::Key::Section).to_string(), Section.Find(pkgTagSection::Key::Package).to_string());
+ DoPackage(std::move(File), std::move(Package));
+ if (_error->empty() == false)
+ {
+ _error->Error("Errors apply to file '%s'",File.c_str());
+ _error->DumpErrors();
+ }
+ }
+
+ // Tidy the compressor
+ Fd.Close();
+
+ return true;
+}
+
+ /*}}}*/
+
+// ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+static std::string formatUTCDateTime(time_t const now)
+{
+ bool const NumericTimezone = _config->FindB("APT::FTPArchive::Release::NumericTimezone", true);
+ // TimeRFC1123 uses GMT to satisfy HTTP/1.1
+ std::string datetime = TimeRFC1123(now, NumericTimezone);
+ if (NumericTimezone == false)
+ {
+ auto const lastspace = datetime.rfind(' ');
+ if (likely(lastspace != std::string::npos))
+ datetime.replace(lastspace + 1, 3, "UTC");
+ }
+ return datetime;
+}
+ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) : FTWScanner(GivenOutput)
+{
+ if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
+ {
+ AddPattern("Packages");
+ AddPattern("Packages.*");
+ AddPattern("Translation-*");
+ AddPattern("Sources");
+ AddPattern("Sources.*");
+ AddPattern("Release");
+ AddPattern("Contents-*");
+ AddPattern("Index");
+ AddPattern("Index.*");
+ AddPattern("icons-*.tar");
+ AddPattern("icons-*.tar.*");
+ AddPattern("Components-*.yml");
+ AddPattern("Components-*.yml.*");
+ AddPattern("md5sum.txt");
+ }
+ AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
+
+ time_t const now = time(NULL);
+ time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
+
+ map<string,bool> BoolFields;
+ map<string,string> Fields;
+ Fields["Origin"] = "";
+ Fields["Label"] = "";
+ Fields["Suite"] = "";
+ Fields["Version"] = "";
+ Fields["Codename"] = "";
+ Fields["Date"] = formatUTCDateTime(now);
+ if (validuntil != now)
+ Fields["Valid-Until"] = formatUTCDateTime(validuntil);
+ Fields["Architectures"] = "";
+ Fields["Components"] = "";
+ Fields["Description"] = "";
+ Fields["Signed-By"] = "";
+ BoolFields["Acquire-By-Hash"] = _config->FindB("APT::FTPArchive::DoByHash", false);
+ BoolFields["NotAutomatic"] = false;
+ BoolFields["ButAutomaticUpgrades"] = false;
+
+ // Read configuration for string fields, but don't output them
+ for (auto &&I : Fields)
+ {
+ string Config = string("APT::FTPArchive::Release::") + I.first;
+ I.second = _config->Find(Config, I.second);
+ }
+
+ // Read configuration for bool fields, and add them to Fields if true
+ for (auto &&I : BoolFields)
+ {
+ string Config = string("APT::FTPArchive::Release::") + I.first;
+ I.second = _config->FindB(Config, I.second);
+ if (I.second)
+ Fields[I.first] = "yes";
+ }
+
+ // All configuration read and stored in Fields; output
+ for (auto &&I : Fields)
+ {
+ if (I.second.empty())
+ continue;
+ std::string const out = I.first + ": " + I.second + "\n";
+ Output->Write(out.c_str(), out.length());
+ }
+
+ ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
+}
+ /*}}}*/
+// ReleaseWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+bool ReleaseWriter::DoPackage(string FileName)
+{
+ // Strip the DirStrip prefix from the FileName and add the PathPrefix
+ string NewFileName;
+ if (DirStrip.empty() == false &&
+ FileName.length() > DirStrip.length() &&
+ stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
+ DirStrip.begin(),DirStrip.end()) == 0)
+ {
+ NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
+ while (NewFileName[0] == '/')
+ NewFileName = string(NewFileName.begin() + 1,NewFileName.end());
+ }
+ else
+ NewFileName = FileName;
+
+ if (PathPrefix.empty() == false)
+ NewFileName = flCombine(PathPrefix,NewFileName);
+
+ FileFd fd(FileName, FileFd::ReadOnly);
+
+ if (!fd.IsOpen())
+ {
+ return false;
+ }
+
+ CheckSums[NewFileName].size = fd.Size();
+
+ Hashes hs(DoHashes);
+ hs.AddFD(fd);
+ CheckSums[NewFileName].Hashes = hs.GetHashStringList();
+ fd.Close();
+
+ // FIXME: wrong layer in the code(?)
+ // FIXME2: symlink instead of create a copy
+ if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
+ {
+ std::string Input = FileName;
+ HashStringList hsl = hs.GetHashStringList();
+ for(HashStringList::const_iterator h = hsl.begin();
+ h != hsl.end(); ++h)
+ {
+ if (!h->usable())
+ continue;
+ if (flNotDir(FileName) == "Release" || flNotDir(FileName) == "InRelease")
+ continue;
+
+ std::string ByHashOutputFile = GenByHashFilename(Input, *h);
+ std::string ByHashOutputDir = flNotFile(ByHashOutputFile);
+ if(!CreateDirectory(flNotFile(Input), ByHashOutputDir))
+ return _error->Warning("can not create dir %s", flNotFile(ByHashOutputFile).c_str());
+
+ // write new hashes
+ FileFd In(Input, FileFd::ReadOnly);
+ FileFd Out(ByHashOutputFile, FileFd::WriteEmpty);
+ if(!CopyFile(In, Out))
+ return _error->Warning("failed to copy %s %s", Input.c_str(), ByHashOutputFile.c_str());
+ }
+ }
+
+ return true;
+}
+
+ /*}}}*/
+// ReleaseWriter::Finish - Output the checksums /*{{{*/
+// ---------------------------------------------------------------------
+static void printChecksumTypeRecord(FileFd &Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
+{
+ {
+ std::string out;
+ strprintf(out, "%s:\n", Type);
+ Output.Write(out.c_str(), out.length());
+ }
+ for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
+ I != CheckSums.end(); ++I)
+ {
+ HashString const * const hs = I->second.Hashes.find(Type);
+ if (hs == NULL)
+ continue;
+ std::string out;
+ strprintf(out, " %s %16llu %s\n",
+ hs->HashValue().c_str(),
+ (*I).second.size,
+ (*I).first.c_str());
+ Output.Write(out.c_str(), out.length());
+ }
+}
+void ReleaseWriter::Finish()
+{
+ if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
+ printChecksumTypeRecord(*Output, "MD5Sum", CheckSums);
+ if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
+ printChecksumTypeRecord(*Output, "SHA1", CheckSums);
+ if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
+ printChecksumTypeRecord(*Output, "SHA256", CheckSums);
+ if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
+ printChecksumTypeRecord(*Output, "SHA512", CheckSums);
+
+ // go by-hash cleanup
+ map<string,ReleaseWriter::CheckSum>::const_iterator prev = CheckSums.begin();
+ if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
+ {
+ for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
+ I != CheckSums.end(); ++I)
+ {
+ if (I->first == "Release" || I->first == "InRelease")
+ continue;
+
+ // keep iterating until we find a new subdir
+ if(flNotFile(I->first) == flNotFile(prev->first))
+ continue;
+
+ // clean that subdir up
+ int keepFiles = _config->FindI("APT::FTPArchive::By-Hash-Keep", 3);
+ // calculate how many compressors are used (the amount of files
+ // in that subdir generated for this run)
+ keepFiles *= std::distance(prev, I);
+ prev = I;
+
+ HashStringList hsl = prev->second.Hashes;
+ for(HashStringList::const_iterator h = hsl.begin();
+ h != hsl.end(); ++h)
+ {
+
+ if (!h->usable())
+ continue;
+
+ std::string RealFilename = DirStrip+"/"+prev->first;
+ std::string ByHashOutputFile = GenByHashFilename(RealFilename, *h);
+ DeleteAllButMostRecent(flNotFile(ByHashOutputFile), keepFiles);
+ }
+ }
+ }
+}
diff --git a/ftparchive/writer.h b/ftparchive/writer.h
new file mode 100644
index 0000000..9aa2168
--- /dev/null
+++ b/ftparchive/writer.h
@@ -0,0 +1,208 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ Writer
+
+ The file writer classes. These write various types of output, sources,
+ packages and contents.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef WRITER_H
+#define WRITER_H
+
+#include <apt-pkg/hashes.h>
+
+#include <iostream>
+#include <map>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+
+#include "apt-ftparchive.h"
+#include "cachedb.h"
+#include "contents.h"
+#include "override.h"
+
+using std::string;
+using std::cout;
+using std::endl;
+using std::vector;
+using std::map;
+
+class FTWScanner
+{
+ protected:
+ vector<string> Patterns;
+ vector<std::pair<string, bool>> FilesToProcess;
+ string Arch;
+ bool IncludeArchAll;
+ const char *OriginalPath;
+ bool ErrorPrinted;
+
+ // Stuff for the delinker
+ bool NoLinkAct;
+
+ static FTWScanner *Owner;
+ static int ScannerFTW(const char *File,const struct stat *sb,int Flag);
+ static int ScannerFile(const char *const File, bool const ReadLink);
+ static int ProcessFile(const char *const File, bool const ReadLink);
+
+ bool Delink(string &FileName,const char *OriginalPath,
+ unsigned long long &Bytes,unsigned long long const &FileSize);
+
+ inline void NewLine(unsigned const &Priority)
+ {
+ if (ErrorPrinted == false && Quiet <= Priority)
+ {
+ c1out << endl;
+ ErrorPrinted = true;
+ }
+ }
+
+ public:
+ FileFd *Output;
+ bool OwnsOutput;
+ unsigned int DoHashes;
+
+ unsigned long DeLinkLimit;
+ string InternalPrefix;
+
+ virtual bool DoPackage(string FileName) = 0;
+ bool RecursiveScan(string const &Dir);
+ bool LoadFileList(string const &BaseDir,string const &File);
+ void ClearPatterns() { Patterns.clear(); };
+ void AddPattern(string const &Pattern) { Patterns.push_back(Pattern); };
+ void AddPattern(char const *Pattern) { Patterns.push_back(Pattern); };
+ void AddPatterns(std::vector<std::string> const &patterns) { Patterns.insert(Patterns.end(), patterns.begin(), patterns.end()); };
+ bool SetExts(string const &Vals);
+
+ FTWScanner(FileFd * const Output, string const &Arch = string(), bool const IncludeArchAll = true);
+ virtual ~FTWScanner();
+};
+
+class MultiCompress;
+
+class TranslationWriter
+{
+ MultiCompress *Comp;
+ std::set<string> Included;
+ FileFd *Output;
+
+ public:
+ bool DoPackage(string const &Pkg, string const &Desc, string const &MD5);
+
+ TranslationWriter(string const &File, string const &TransCompress, mode_t const &Permissions);
+ ~TranslationWriter();
+};
+
+class PackagesWriter : public FTWScanner
+{
+ Override Over;
+ CacheDB Db;
+
+ public:
+
+ // Some flags
+ bool DoAlwaysStat;
+ bool NoOverride;
+ bool DoContents;
+ bool LongDescription;
+
+ // General options
+ string PathPrefix;
+ string DirStrip;
+ struct CacheDB::Stats &Stats;
+ TranslationWriter * const TransWriter;
+
+ inline bool ReadOverride(string const &File) {return Over.ReadOverride(File);};
+ inline bool ReadExtraOverride(string const &File)
+ {return Over.ReadExtraOverride(File);};
+ virtual bool DoPackage(string FileName) APT_OVERRIDE;
+
+ PackagesWriter(FileFd * const Output, TranslationWriter * const TransWriter, string const &DB,
+ string const &Overrides,
+ string const &ExtOverrides = "",
+ string const &Arch = "",
+ bool const IncludeArchAll = true);
+ virtual ~PackagesWriter();
+};
+
+class ContentsWriter : public FTWScanner
+{
+ CacheDB Db;
+
+ GenContents Gen;
+
+ public:
+
+ // General options
+ struct CacheDB::Stats &Stats;
+ string Prefix;
+
+ bool DoPackage(string FileName,string Package);
+ virtual bool DoPackage(string FileName) APT_OVERRIDE
+ {return DoPackage(FileName,string());};
+ bool ReadFromPkgs(string const &PkgFile,string const &PkgCompress);
+
+ void Finish() {Gen.Print(*Output);};
+ inline bool ReadyDB(string const &DB) {return Db.ReadyDB(DB);};
+
+ ContentsWriter(FileFd * const Output, string const &DB, string const &Arch = string(),
+ bool const IncludeArchAll = true);
+ virtual ~ContentsWriter() {};
+};
+
+class SourcesWriter : public FTWScanner
+{
+ CacheDB Db;
+ Override BOver;
+ Override SOver;
+ char *Buffer;
+ unsigned long long BufSize;
+
+ public:
+
+ bool NoOverride;
+ bool DoAlwaysStat;
+
+ // General options
+ string PathPrefix;
+ string DirStrip;
+ struct CacheDB::Stats &Stats;
+
+ virtual bool DoPackage(string FileName) APT_OVERRIDE;
+
+ SourcesWriter(FileFd * const Output, string const &DB,string const &BOverrides,string const &SOverrides,
+ string const &ExtOverrides=string());
+ virtual ~SourcesWriter() {free(Buffer);};
+};
+
+class ReleaseWriter : public FTWScanner
+{
+public:
+ ReleaseWriter(FileFd * const Output, string const &DB);
+ virtual bool DoPackage(string FileName) APT_OVERRIDE;
+ void Finish();
+
+ // General options
+ string PathPrefix;
+ string DirStrip;
+
+ struct CheckSum
+ {
+ HashStringList Hashes;
+ // Limited by FileFd::Size()
+ unsigned long long size;
+ ~CheckSum() {};
+ };
+protected:
+ map<string,struct CheckSum> CheckSums;
+};
+
+#endif