summaryrefslogtreecommitdiff
path: root/ftparchive
diff options
context:
space:
mode:
authorArch Librarian <arch@canonical.com>2004-09-20 16:56:32 +0000
committerArch Librarian <arch@canonical.com>2004-09-20 16:56:32 +0000
commitb2e465d6d32d2dc884f58b94acb7e35f671a87fe (patch)
tree5928383b9bde7b0ba9812e6526ad746466e558f7 /ftparchive
parent00b47c98ca4a4349686a082eba6d77decbb03a4d (diff)
downloadapt-b2e465d6d32d2dc884f58b94acb7e35f671a87fe.tar.gz
Join with aliencode
Author: jgg Date: 2001-02-20 07:03:16 GMT Join with aliencode
Diffstat (limited to 'ftparchive')
-rw-r--r--ftparchive/apt-ftparchive.cc919
-rw-r--r--ftparchive/apt-ftparchive.h28
-rw-r--r--ftparchive/cachedb.cc284
-rw-r--r--ftparchive/cachedb.h119
-rw-r--r--ftparchive/contents.cc401
-rw-r--r--ftparchive/contents.h89
-rw-r--r--ftparchive/makefile20
-rw-r--r--ftparchive/multicompress.cc494
-rw-r--r--ftparchive/multicompress.h80
-rw-r--r--ftparchive/override.cc180
-rw-r--r--ftparchive/override.h50
-rw-r--r--ftparchive/writer.cc756
-rw-r--r--ftparchive/writer.h145
13 files changed, 3565 insertions, 0 deletions
diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc
new file mode 100644
index 00000000..055d876d
--- /dev/null
+++ b/ftparchive/apt-ftparchive.cc
@@ -0,0 +1,919 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: apt-ftparchive.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ apt-scanpackages - Efficient work-alike for dpkg-scanpackages
+
+ Let contents be disabled from the conf
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-ftparchive.h"
+#endif
+
+#include "apt-ftparchive.h"
+
+#include <apt-pkg/error.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/cmndline.h>
+#include <apt-pkg/strutl.h>
+#include <config.h>
+#include <apti18n.h>
+#include <algorithm>
+
+#include <sys/time.h>
+#include <regex.h>
+
+#include "contents.h"
+#include "multicompress.h"
+#include "writer.h"
+ /*}}}*/
+
+ostream c0out;
+ostream c1out;
+ostream c2out;
+ofstream devnull("/dev/null");
+unsigned Quiet = 0;
+
+// struct PackageMap - List of all package files in the config file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+struct PackageMap
+{
+ // General Stuff
+ string BaseDir;
+ string InternalPrefix;
+ string FLFile;
+ string PkgExt;
+ string SrcExt;
+
+ // Stuff for the Package File
+ string PkgFile;
+ string BinCacheDB;
+ string BinOverride;
+
+ // Stuff for the Source File
+ string SrcFile;
+ string SrcOverride;
+
+ // Contents
+ string Contents;
+ string ContentsHead;
+
+ // Random things
+ string Tag;
+ string PkgCompress;
+ string CntCompress;
+ string SrcCompress;
+ string PathPrefix;
+ unsigned int DeLinkLimit;
+ mode_t Permissions;
+
+ bool ContentsDone;
+ bool PkgDone;
+ bool SrcDone;
+ time_t ContentsMTime;
+
+ struct ContentsCompare : public binary_function<PackageMap,PackageMap,bool>
+ {
+ inline bool operator() (const PackageMap &x,const PackageMap &y)
+ {return x.ContentsMTime < y.ContentsMTime;};
+ };
+
+ struct DBCompare : public binary_function<PackageMap,PackageMap,bool>
+ {
+ inline bool operator() (const PackageMap &x,const PackageMap &y)
+ {return x.BinCacheDB < y.BinCacheDB;};
+ };
+
+ void GetGeneral(Configuration &Setup,Configuration &Block);
+ bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats);
+ bool GenSources(Configuration &Setup,struct CacheDB::Stats &Stats);
+ bool GenContents(Configuration &Setup,
+ PackageMap *Begin,PackageMap *End,
+ unsigned long &Left);
+
+ PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false),
+ PkgDone(false), SrcDone(false), ContentsMTime(0) {};
+};
+ /*}}}*/
+
+// PackageMap::GetGeneral - Common per-section definitions /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block)
+{
+ PathPrefix = Block.Find("PathPrefix");
+
+ if (Block.FindB("External-Links",true) == false)
+ DeLinkLimit = Setup.FindI("Default::DeLinkLimit",UINT_MAX);
+ else
+ DeLinkLimit = 0;
+
+ PkgCompress = Block.Find("Packages::Compress",
+ Setup.Find("Default::Packages::Compress",". gzip").c_str());
+ CntCompress = Block.Find("Contents::Compress",
+ Setup.Find("Default::Contents::Compress",". gzip").c_str());
+ SrcCompress = Block.Find("Sources::Compress",
+ Setup.Find("Default::Sources::Compress",". gzip").c_str());
+
+ SrcExt = Block.Find("Sources::Extensions",
+ Setup.Find("Default::Sources::Extensions",".dsc").c_str());
+ PkgExt = Block.Find("Packages::Extensions",
+ Setup.Find("Default::Packages::Extensions",".deb").c_str());
+
+ Permissions = Setup.FindI("Default::FileMode",0644);
+
+ if (FLFile.empty() == false)
+ FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile);
+
+ if (Contents == " ")
+ Contents= string();
+}
+ /*}}}*/
+// PackageMap::GenPackages - Actually generate a Package file /*{{{*/
+// ---------------------------------------------------------------------
+/* This generates the Package File described by this object. */
+bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats)
+{
+ if (PkgFile.empty() == true)
+ return true;
+
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ string OverrideDir = Setup.FindDir("Dir::OverrideDir");
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
+ struct timeval StartTime;
+ gettimeofday(&StartTime,0);
+
+ PkgDone = true;
+
+ // Create a package writer object.
+ PackagesWriter Packages(flCombine(CacheDir,BinCacheDB),
+ flCombine(OverrideDir,BinOverride));
+ if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false)
+ return _error->Error("Package extension list is too long");
+ if (_error->PendingError() == true)
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+
+ Packages.PathPrefix = PathPrefix;
+ Packages.DirStrip = ArchiveDir;
+ Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
+
+ Packages.Stats.DeLinkBytes = Stats.DeLinkBytes;
+ Packages.DeLinkLimit = DeLinkLimit;
+
+ // Create a compressor object
+ MultiCompress Comp(flCombine(ArchiveDir,PkgFile),
+ PkgCompress,Permissions);
+ Packages.Output = Comp.Input;
+ if (_error->PendingError() == true)
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+
+ c0out << ' ' << BaseDir << ":" << flush;
+
+ // Do recursive directory searching
+ if (FLFile.empty() == true)
+ {
+ if (Packages.RecursiveScan(flCombine(ArchiveDir,BaseDir)) == false)
+ return false;
+ }
+ else
+ {
+ if (Packages.LoadFileList(ArchiveDir,FLFile) == false)
+ return false;
+ }
+
+ Packages.Output = 0; // Just in case
+
+ // Finish compressing
+ unsigned long Size;
+ if (Comp.Finalize(Size) == false)
+ {
+ c0out << endl;
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ }
+
+ if (Size != 0)
+ c0out << " New "
+ << SizeToStr(Size) << "B ";
+ else
+ c0out << ' ';
+
+ struct timeval NewTime;
+ gettimeofday(&NewTime,0);
+ double Delta = NewTime.tv_sec - StartTime.tv_sec +
+ (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
+
+ c0out << Packages.Stats.Packages << " files " <<
+/* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */
+ SizeToStr(Packages.Stats.Bytes) << "B " <<
+ TimeToStr((long)Delta) << endl;
+
+ Stats.Add(Packages.Stats);
+ Stats.DeLinkBytes = Packages.Stats.DeLinkBytes;
+
+ return !_error->PendingError();
+}
+ /*}}}*/
+// PackageMap::GenSources - Actually generate a Package file /*{{{*/
+// ---------------------------------------------------------------------
+/* This generates the Sources File described by this object. */
+bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
+{
+ if (SrcFile.empty() == true)
+ return true;
+
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ string OverrideDir = Setup.FindDir("Dir::OverrideDir");
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
+ struct timeval StartTime;
+ gettimeofday(&StartTime,0);
+
+ SrcDone = true;
+
+ // Create a package writer object.
+ SourcesWriter Sources(flCombine(OverrideDir,BinOverride),
+ flCombine(OverrideDir,SrcOverride));
+ if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false)
+ return _error->Error("Source extension list is too long");
+ if (_error->PendingError() == true)
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+
+ Sources.PathPrefix = PathPrefix;
+ Sources.DirStrip = ArchiveDir;
+ Sources.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
+
+ Sources.DeLinkLimit = DeLinkLimit;
+ Sources.Stats.DeLinkBytes = Stats.DeLinkBytes;
+
+ // Create a compressor object
+ MultiCompress Comp(flCombine(ArchiveDir,SrcFile),
+ SrcCompress,Permissions);
+ Sources.Output = Comp.Input;
+ if (_error->PendingError() == true)
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+
+ c0out << ' ' << BaseDir << ":" << flush;
+
+ // Do recursive directory searching
+ if (FLFile.empty() == true)
+ {
+ if (Sources.RecursiveScan(flCombine(ArchiveDir,BaseDir))== false)
+ return false;
+ }
+ else
+ {
+ if (Sources.LoadFileList(ArchiveDir,FLFile) == false)
+ return false;
+ }
+ Sources.Output = 0; // Just in case
+
+ // Finish compressing
+ unsigned long Size;
+ if (Comp.Finalize(Size) == false)
+ {
+ c0out << endl;
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ }
+
+ if (Size != 0)
+ c0out << " New "
+ << SizeToStr(Size) << "B ";
+ else
+ c0out << ' ';
+
+ struct timeval NewTime;
+ gettimeofday(&NewTime,0);
+ double Delta = NewTime.tv_sec - StartTime.tv_sec +
+ (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
+
+ c0out << Sources.Stats.Packages << " pkgs in " <<
+ TimeToStr((long)Delta) << endl;
+
+ Stats.Add(Sources.Stats);
+ Stats.DeLinkBytes = Sources.Stats.DeLinkBytes;
+
+ return !_error->PendingError();
+}
+ /*}}}*/
+// PackageMap::GenContents - Actually generate a Contents file /*{{{*/
+// ---------------------------------------------------------------------
+/* This generates the contents file partially described by this object.
+ It searches the given iterator range for other package files that map
+ into this contents file and includes their data as well when building. */
+bool PackageMap::GenContents(Configuration &Setup,
+ PackageMap *Begin,PackageMap *End,
+ unsigned long &Left)
+{
+ if (Contents.empty() == true)
+ return true;
+
+ if (Left == 0)
+ return true;
+
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+ string OverrideDir = Setup.FindDir("Dir::OverrideDir");
+
+ struct timeval StartTime;
+ gettimeofday(&StartTime,0);
+
+ // Create a package writer object.
+ ContentsWriter Contents("");
+ if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false)
+ return _error->Error("Package extension list is too long");
+ if (_error->PendingError() == true)
+ return false;
+
+ MultiCompress Comp(flCombine(ArchiveDir,this->Contents),
+ CntCompress,Permissions);
+ Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60;
+ Contents.Output = Comp.Input;
+ if (_error->PendingError() == true)
+ return false;
+
+ // Write the header out.
+ if (ContentsHead.empty() == false)
+ {
+ FileFd Head(flCombine(OverrideDir,ContentsHead),FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ unsigned long Size = Head.Size();
+ unsigned char Buf[4096];
+ while (Size != 0)
+ {
+ unsigned long ToRead = Size;
+ if (Size > sizeof(Buf))
+ ToRead = sizeof(Buf);
+
+ if (Head.Read(Buf,ToRead) == false)
+ return false;
+
+ if (fwrite(Buf,1,ToRead,Comp.Input) != ToRead)
+ return _error->Errno("fwrite","Error writing header to contents file");
+
+ Size -= ToRead;
+ }
+ }
+
+ /* Go over all the package file records and parse all the package
+ files associated with this contents file into one great big honking
+ memory structure, then dump the sorted version */
+ c0out << ' ' << this->Contents << ":" << flush;
+ for (PackageMap *I = Begin; I != End; I++)
+ {
+ if (I->Contents != this->Contents)
+ continue;
+
+ Contents.Prefix = ArchiveDir;
+ Contents.ReadyDB(flCombine(CacheDir,I->BinCacheDB));
+ Contents.ReadFromPkgs(flCombine(ArchiveDir,I->PkgFile),
+ I->PkgCompress);
+
+ I->ContentsDone = true;
+ }
+
+ Contents.Finish();
+
+ // Finish compressing
+ unsigned long Size;
+ if (Comp.Finalize(Size) == false || _error->PendingError() == true)
+ {
+ c0out << endl;
+ return _error->Error("Error Processing Contents %s",
+ this->Contents.c_str());
+ }
+
+ if (Size != 0)
+ {
+ c0out << " New " << SizeToStr(Size) << "B ";
+ if (Left > Size)
+ Left -= Size;
+ else
+ Left = 0;
+ }
+ else
+ c0out << ' ';
+
+ struct timeval NewTime;
+ gettimeofday(&NewTime,0);
+ double Delta = NewTime.tv_sec - StartTime.tv_sec +
+ (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
+
+ c0out << Contents.Stats.Packages << " files " <<
+ SizeToStr(Contents.Stats.Bytes) << "B " <<
+ TimeToStr((long)Delta) << endl;
+
+ return true;
+}
+ /*}}}*/
+
+// LoadTree - Load a 'tree' section from the Generate Config /*{{{*/
+// ---------------------------------------------------------------------
+/* This populates the PkgList with all the possible permutations of the
+ section/arch lists. */
+void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
+{
+ // Load the defaults
+ string DDir = Setup.Find("TreeDefault::Directory",
+ "$(DIST)/$(SECTION)/binary-$(ARCH)/");
+ string DSDir = Setup.Find("TreeDefault::SrcDirectory",
+ "$(DIST)/$(SECTION)/source/");
+ string DPkg = Setup.Find("TreeDefault::Packages",
+ "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages");
+ string DIPrfx = Setup.Find("TreeDefault::InternalPrefix",
+ "$(DIST)/$(SECTION)/");
+ string DContents = Setup.Find("TreeDefault::Contents",
+ "$(DIST)/Contents-$(ARCH)");
+ string DContentsH = Setup.Find("TreeDefault::Contents::Header","");
+ string DBCache = Setup.Find("TreeDefault::BinCacheDB",
+ "packages-$(ARCH).db");
+ string DSources = Setup.Find("TreeDefault::Sources",
+ "$(DIST)/$(SECTION)/source/Sources");
+ string DFLFile = Setup.Find("TreeDefault::FileList", "");
+ string DSFLFile = Setup.Find("TreeDefault::SourceFileList", "");
+
+ // Process 'tree' type sections
+ const Configuration::Item *Top = Setup.Tree("tree");
+ for (Top = (Top == 0?0:Top->Child); Top != 0;)
+ {
+ Configuration Block(Top);
+ string Dist = Top->Tag;
+
+ // Parse the sections
+ const char *Sections = Block.Find("Sections").c_str();
+ string Section;
+ while (ParseQuoteWord(Sections,Section) == true)
+ {
+ const char *Archs = Block.Find("Architectures").c_str();
+ string Arch;
+ while (ParseQuoteWord(Archs,Arch) == true)
+ {
+ struct SubstVar Vars[] = {{"$(DIST)",&Dist},
+ {"$(SECTION)",&Section},
+ {"$(ARCH)",&Arch},
+ {}};
+ PackageMap Itm;
+
+ Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars);
+ Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars);
+
+ if (stringcasecmp(Arch,"source") == 0)
+ {
+ Itm.SrcOverride = SubstVar(Block.Find("SrcOverride"),Vars);
+ Itm.BaseDir = SubstVar(Block.Find("SrcDirectory",DSDir.c_str()),Vars);
+ Itm.SrcFile = SubstVar(Block.Find("Sources",DSources.c_str()),Vars);
+ Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars);
+ Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars);
+ }
+ else
+ {
+ Itm.BinCacheDB = SubstVar(Block.Find("BinCacheDB",DBCache.c_str()),Vars);
+ Itm.BaseDir = SubstVar(Block.Find("Directory",DDir.c_str()),Vars);
+ Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars);
+ Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars);
+ Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars);
+ Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars);
+ Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars);
+ }
+
+ Itm.GetGeneral(Setup,Block);
+ PkgList.push_back(Itm);
+ }
+ }
+
+ Top = Top->Next;
+ }
+}
+ /*}}}*/
+// LoadBinDir - Load a 'bindirectory' section from the Generate Config /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup)
+{
+ // Process 'bindirectory' type sections
+ const Configuration::Item *Top = Setup.Tree("bindirectory");
+ for (Top = (Top == 0?0:Top->Child); Top != 0;)
+ {
+ Configuration Block(Top);
+
+ PackageMap Itm;
+ Itm.PkgFile = Block.Find("Packages");
+ Itm.SrcFile = Block.Find("Sources");
+ Itm.BinCacheDB = Block.Find("BinCacheDB");
+ Itm.BinOverride = Block.Find("BinOverride");
+ Itm.SrcOverride = Block.Find("SrcOverride");
+ Itm.BaseDir = Top->Tag;
+ Itm.FLFile = Block.Find("FileList");
+ Itm.InternalPrefix = Block.Find("InternalPrefix",Top->Tag.c_str());
+ Itm.Contents = Block.Find("Contents");
+ Itm.ContentsHead = Block.Find("Contents::Header");
+
+ Itm.GetGeneral(Setup,Block);
+ PkgList.push_back(Itm);
+
+ Top = Top->Next;
+ }
+}
+ /*}}}*/
+
+// ShowHelp - Show the help text /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ShowHelp(CommandLine &CmdL)
+{
+ ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
+ COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
+ if (_config->FindB("version") == true)
+ return true;
+
+ cout <<
+ "Usage: apt-ftparchive [options] command\n"
+ "Commands: packges binarypath [overridefile [pathprefix]]\n"
+ " sources srcpath [overridefile [pathprefix]]\n"
+ " contents path\n"
+ " generate config [groups]\n"
+ " clean config\n"
+ "\n"
+ "apt-ftparchive generates index files for Debian archives. It supports\n"
+ "many styles of generation from fully automated to functional replacements\n"
+ "for dpkg-scanpackages and dpkg-scansources\n"
+ "\n"
+ "apt-ftparchive generates Package files from a tree of .debs. The\n"
+ "Package file contains the contents of all the control fields from\n"
+ "each package as well as the MD5 hash and filesize. An override file\n"
+ "is supported to force the value of Priority and Section.\n"
+ "\n"
+ "Similarly apt-ftparchive generates Sources files from a tree of .dscs.\n"
+ "The --source-override option can be used to specify a src override file\n"
+ "\n"
+ "The 'packages' and 'sources' command should be run in the root of the\n"
+ "tree. BinaryPath should point to the base of the recursive search and \n"
+ "override file should contian the override flags. Pathprefix is\n"
+ "appended to the filename fields if present. Example usage from the \n"
+ "debian archive:\n"
+ " apt-ftparchive packages dists/potato/main/binary-i386/ > \\\n"
+ " dists/potato/main/binary-i386/Packages\n"
+ "\n"
+ "Options:\n"
+ " -h This help text\n"
+ " --md5 Control MD5 generation\n"
+ " -s=? Source override file\n"
+ " -q Quiet\n"
+ " -d=? Select the optional caching database\n"
+ " --no-delink Enable delinking debug mode\n"
+ " --contents Control contents file generation\n"
+ " -c=? Read this configuration file\n"
+ " -o=? Set an arbitary configuration option" << endl;
+
+ return true;
+}
+ /*}}}*/
+// SimpleGenPackages - Generate a Packages file for a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This emulates dpkg-scanpackages's command line interface. 'mostly' */
+bool SimpleGenPackages(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ string Override;
+ if (CmdL.FileSize() >= 3)
+ Override = CmdL.FileList[2];
+
+ // Create a package writer object.
+ PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"),
+ Override);
+ if (_error->PendingError() == true)
+ return false;
+
+ if (CmdL.FileSize() >= 4)
+ Packages.PathPrefix = CmdL.FileList[3];
+
+ // Do recursive directory searching
+ if (Packages.RecursiveScan(CmdL.FileList[1]) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
+// SimpleGenContents - Generate a Contents listing /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool SimpleGenContents(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ // Create a package writer object.
+ ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"));
+ if (_error->PendingError() == true)
+ return false;
+
+ // Do recursive directory searching
+ if (Contents.RecursiveScan(CmdL.FileList[1]) == false)
+ return false;
+
+ Contents.Finish();
+
+ return true;
+}
+ /*}}}*/
+// SimpleGenSources - Generate a Sources file for a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This emulates dpkg-scanpackages's command line interface. 'mostly' */
+bool SimpleGenSources(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ string Override;
+ if (CmdL.FileSize() >= 3)
+ Override = CmdL.FileList[2];
+
+ string SOverride;
+ if (Override.empty() == false)
+ SOverride = Override + ".src";
+
+ SOverride = _config->Find("APT::FTPArchive::SourceOverride",
+ SOverride.c_str());
+
+ // Create a package writer object.
+ SourcesWriter Sources(Override,SOverride);
+ if (_error->PendingError() == true)
+ return false;
+
+ if (CmdL.FileSize() >= 4)
+ Sources.PathPrefix = CmdL.FileList[3];
+
+ // Do recursive directory searching
+ if (Sources.RecursiveScan(CmdL.FileList[1]) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
+// Generate - Full generate, using a config file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool Generate(CommandLine &CmdL)
+{
+ struct CacheDB::Stats SrcStats;
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ struct timeval StartTime;
+ gettimeofday(&StartTime,0);
+ struct CacheDB::Stats Stats;
+
+ // Read the configuration file.
+ Configuration Setup;
+ if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false)
+ return false;
+
+ vector<PackageMap> PkgList;
+ LoadTree(PkgList,Setup);
+ LoadBinDir(PkgList,Setup);
+
+ // Sort by cache DB to improve IO locality.
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
+
+ // Generate packages
+ if (CmdL.FileSize() <= 2)
+ {
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ if (I->GenPackages(Setup,Stats) == false)
+ _error->DumpErrors();
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ if (I->GenSources(Setup,SrcStats) == false)
+ _error->DumpErrors();
+ }
+ else
+ {
+ // Make a choice list out of the package list..
+ RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1];
+ RxChoiceList *End = List;
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ {
+ End->UserData = I;
+ End->Str = I->BaseDir.c_str();
+ End++;
+
+ End->UserData = I;
+ End->Str = I->Tag.c_str();
+ End++;
+ }
+ End->Str = 0;
+
+ // Regex it
+ if (RegexChoice(List,CmdL.FileList + 2,CmdL.FileList + CmdL.FileSize()) == 0)
+ {
+ delete [] List;
+ return _error->Error("No selections matched");
+ }
+ _error->DumpErrors();
+
+ // Do the generation for Packages
+ for (End = List; End->Str != 0; End++)
+ {
+ if (End->Hit == false)
+ continue;
+
+ PackageMap *I = (PackageMap *)End->UserData;
+ if (I->PkgDone == true)
+ continue;
+ if (I->GenPackages(Setup,Stats) == false)
+ _error->DumpErrors();
+ }
+
+ // Do the generation for Sources
+ for (End = List; End->Str != 0; End++)
+ {
+ if (End->Hit == false)
+ continue;
+
+ PackageMap *I = (PackageMap *)End->UserData;
+ if (I->SrcDone == true)
+ continue;
+ if (I->GenSources(Setup,SrcStats) == false)
+ _error->DumpErrors();
+ }
+
+ delete [] List;
+ }
+
+ if (_config->FindB("APT::FTPArchive::Contents",true) == false)
+ return true;
+
+ c1out << "Done Packages, Starting contents." << endl;
+
+ // Sort the contents file list by date
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ {
+ struct stat A;
+ if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),
+ I->CntCompress,A) == false)
+ time(&I->ContentsMTime);
+ else
+ I->ContentsMTime = A.st_mtime;
+ }
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::ContentsCompare());
+
+ /* Now for Contents.. The process here is to do a make-like dependency
+ check. Each contents file is verified to be newer than the package files
+ that describe the debs it indexes. Since the package files contain
+ hashes of the .debs this means they have not changed either so the
+ contents must be up to date. */
+ unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024;
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ {
+ // This record is not relevent
+ if (I->ContentsDone == true ||
+ I->Contents.empty() == true)
+ continue;
+
+ // Do not do everything if the user specified sections.
+ if (CmdL.FileSize() > 2 && I->PkgDone == false)
+ continue;
+
+ struct stat A,B;
+ if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),I->CntCompress,A) == true)
+ {
+ if (MultiCompress::GetStat(flCombine(ArchiveDir,I->PkgFile),I->PkgCompress,B) == false)
+ {
+ _error->Warning("Some files are missing in the package file group `%s'",I->PkgFile.c_str());
+ continue;
+ }
+
+ if (A.st_mtime > B.st_mtime)
+ continue;
+ }
+
+ if (I->GenContents(Setup,PkgList.begin(),PkgList.end(),
+ MaxContentsChange) == false)
+ _error->DumpErrors();
+
+ // Hit the limit?
+ if (MaxContentsChange == 0)
+ {
+ c1out << "Hit contents update byte limit" << endl;
+ break;
+ }
+ }
+
+ struct timeval NewTime;
+ gettimeofday(&NewTime,0);
+ double Delta = NewTime.tv_sec - StartTime.tv_sec +
+ (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
+ c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages
+ << " archives. Took " << TimeToStr((long)Delta) << endl;
+
+ return true;
+}
+ /*}}}*/
+// Clean - Clean out the databases /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool Clean(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() != 2)
+ return ShowHelp(CmdL);
+
+ // Read the configuration file.
+ Configuration Setup;
+ if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false)
+ return false;
+
+ vector<PackageMap> PkgList;
+ LoadTree(PkgList,Setup);
+ LoadBinDir(PkgList,Setup);
+
+ // Sort by cache DB to improve IO locality.
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
+
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); )
+ {
+ c0out << I->BinCacheDB << endl;
+ CacheDB DB(flCombine(CacheDir,I->BinCacheDB));
+ if (DB.Clean() == false)
+ _error->DumpErrors();
+
+ string CacheDB = I->BinCacheDB;
+ for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++);
+ }
+
+ return true;
+}
+ /*}}}*/
+
+int main(int argc, const char *argv[])
+{
+ CommandLine::Args Args[] = {
+ {'h',"help","help",0},
+ {0,"md5","APT::FTPArchive::MD5",0},
+ {'v',"version","version",0},
+ {'d',"db","APT::FTPArchive::DB",CommandLine::HasArg},
+ {'s',"source-override","APT::FTPArchive::SourceOverride",CommandLine::HasArg},
+ {'q',"quiet","quiet",CommandLine::IntLevel},
+ {'q',"silent","quiet",CommandLine::IntLevel},
+ {0,"delink","APT::FTPArchive::DeLinkAct",0},
+ {0,"readonly","APT::FTPArchive::ReadOnlyDB",0},
+ {0,"contents","APT::FTPArchive::Contents",0},
+ {'c',"config-file",0,CommandLine::ConfigFile},
+ {'o',"option",0,CommandLine::ArbItem},
+ {0,0,0,0}};
+ CommandLine::Dispatch Cmds[] = {{"packages",&SimpleGenPackages},
+ {"contents",&SimpleGenContents},
+ {"sources",&SimpleGenSources},
+ {"generate",&Generate},
+ {"clean",&Clean},
+ {"help",&ShowHelp},
+ {0,0}};
+
+ // Parse the command line and initialize the package library
+ CommandLine CmdL(Args,_config);
+ if (CmdL.Parse(argc,argv) == false)
+ {
+ _error->DumpErrors();
+ return 100;
+ }
+
+ // See if the help should be shown
+ if (_config->FindB("help") == true ||
+ _config->FindB("version") == true ||
+ CmdL.FileSize() == 0)
+ {
+ ShowHelp(CmdL);
+ return 0;
+ }
+
+ // Setup the output streams
+ c0out.rdbuf(cout.rdbuf());
+ c1out.rdbuf(cout.rdbuf());
+ c2out.rdbuf(cout.rdbuf());
+ Quiet = _config->FindI("quiet",0);
+ if (Quiet > 0)
+ c0out.rdbuf(devnull.rdbuf());
+ if (Quiet > 1)
+ c1out.rdbuf(devnull.rdbuf());
+
+ // Match the operation
+ CmdL.DispatchArg(Cmds);
+
+ if (_error->empty() == false)
+ {
+ bool Errors = _error->PendingError();
+ _error->DumpErrors();
+ return Errors == true?100:0;
+ }
+ return 0;
+}
diff --git a/ftparchive/apt-ftparchive.h b/ftparchive/apt-ftparchive.h
new file mode 100644
index 00000000..c228903b
--- /dev/null
+++ b/ftparchive/apt-ftparchive.h
@@ -0,0 +1,28 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: apt-ftparchive.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Writer
+
+ The file writer classes. These write various types of output, sources,
+ packages and contents.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef APT_FTPARCHIVE_H
+#define APT_FTPARCHIVE_H
+
+#ifdef __GNUG__
+#pragma interface "apt-ftparchive.h"
+#endif
+
+#include <fstream>
+
+extern ostream c0out;
+extern ostream c1out;
+extern ostream c2out;
+extern ofstream devnull;
+extern unsigned Quiet;
+
+#endif
diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc
new file mode 100644
index 00000000..dd63f215
--- /dev/null
+++ b/ftparchive/cachedb.cc
@@ -0,0 +1,284 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: cachedb.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ CacheDB
+
+ Simple uniform interface to a cache database.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "cachedb.h"
+#endif
+
+#include "cachedb.h"
+
+#include <apt-pkg/error.h>
+#include <apt-pkg/md5.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/configuration.h>
+
+#include <netinet/in.h> // htonl, etc
+ /*}}}*/
+
+// CacheDB::ReadyDB - Ready the DB2 /*{{{*/
+// ---------------------------------------------------------------------
+/* This opens the DB2 file for caching package information */
+bool CacheDB::ReadyDB(string DB)
+{
+ ReadOnly = _config->FindB("APT::FTPArchive::ReadOnlyDB",false);
+
+ // Close the old DB
+ if (Dbp != 0)
+ Dbp->close(Dbp,0);
+
+ /* Check if the DB was disabled while running and deal with a
+ corrupted DB */
+ if (DBFailed() == true)
+ {
+ _error->Warning("DB was corrupted, file renamed to %s.old",DBFile.c_str());
+ rename(DBFile.c_str(),(DBFile+".old").c_str());
+ }
+
+ DBLoaded = false;
+ Dbp = 0;
+ DBFile = string();
+
+ if (DB.empty())
+ return true;
+
+ if ((errno = db_open(DB.c_str(),DB_HASH,
+ (ReadOnly?DB_RDONLY:DB_CREATE),
+ 0644,0,0,&Dbp)) != 0)
+ {
+ Dbp = 0;
+ return _error->Errno("db_open","Unable to open DB2 file %s",DB.c_str());
+ }
+
+ DBFile = DB;
+ DBLoaded = true;
+ return true;
+}
+ /*}}}*/
+// CacheDB::SetFile - Select a file to be working with /*{{{*/
+// ---------------------------------------------------------------------
+/* All future actions will be performed against this file */
+bool CacheDB::SetFile(string FileName,struct stat St,FileFd *Fd)
+{
+ delete DebFile;
+ DebFile = 0;
+ this->FileName = FileName;
+ this->Fd = Fd;
+ this->FileStat = St;
+ FileStat = St;
+ memset(&CurStat,0,sizeof(CurStat));
+
+ Stats.Bytes += St.st_size;
+ Stats.Packages++;
+
+ if (DBLoaded == false)
+ return true;
+
+ InitQuery("st");
+
+ // Ensure alignment of the returned structure
+ Data.data = &CurStat;
+ Data.ulen = sizeof(CurStat);
+ Data.flags = DB_DBT_USERMEM;
+ // Lookup the stat info and confirm the file is unchanged
+ if (Get() == true)
+ {
+ if (CurStat.st_mtime != htonl(St.st_mtime))
+ {
+ CurStat.st_mtime = htonl(St.st_mtime);
+ CurStat.Flags = 0;
+ _error->Warning("File date has changed %s",FileName.c_str());
+ }
+ }
+ else
+ {
+ CurStat.st_mtime = htonl(St.st_mtime);
+ CurStat.Flags = 0;
+ }
+ CurStat.Flags = ntohl(CurStat.Flags);
+ OldStat = CurStat;
+ return true;
+}
+ /*}}}*/
+// CacheDB::LoadControl - Load Control information /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::LoadControl()
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlControl) == FlControl)
+ {
+ // Lookup the control information
+ InitQuery("cl");
+ if (Get() == true && Control.TakeControl(Data.data,Data.size) == true)
+ return true;
+ CurStat.Flags &= ~FlControl;
+ }
+
+ // Create a deb instance to read the archive
+ if (DebFile == 0)
+ {
+ DebFile = new debDebFile(*Fd);
+ if (_error->PendingError() == true)
+ return false;
+ }
+
+ Stats.Misses++;
+ if (Control.Read(*DebFile) == false)
+ return false;
+
+ if (Control.Control == 0)
+ return _error->Error("Archive has no control record");
+
+ // Write back the control information
+ InitQuery("cl");
+ if (Put(Control.Control,Control.Length) == true)
+ CurStat.Flags |= FlControl;
+ return true;
+}
+ /*}}}*/
+// CacheDB::LoadContents - Load the File Listing /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::LoadContents(bool GenOnly)
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlContents) == FlContents)
+ {
+ if (GenOnly == true)
+ return true;
+
+ // Lookup the contents information
+ InitQuery("cn");
+ if (Get() == true)
+ {
+ if (Contents.TakeContents(Data.data,Data.size) == true)
+ return true;
+ }
+
+ CurStat.Flags &= ~FlContents;
+ }
+
+ // Create a deb instance to read the archive
+ if (DebFile == 0)
+ {
+ DebFile = new debDebFile(*Fd);
+ if (_error->PendingError() == true)
+ return false;
+ }
+
+ if (Contents.Read(*DebFile) == false)
+ return false;
+
+ // Write back the control information
+ InitQuery("cn");
+ if (Put(Contents.Data,Contents.CurSize) == true)
+ CurStat.Flags |= FlContents;
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetMD5 - Get the MD5 hash /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::GetMD5(string &MD5Res,bool GenOnly)
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlMD5) == FlMD5)
+ {
+ if (GenOnly == true)
+ return true;
+
+ InitQuery("m5");
+ if (Get() == true)
+ {
+ MD5Res = string((char *)Data.data,Data.size);
+ return true;
+ }
+ CurStat.Flags &= ~FlMD5;
+ }
+
+ Stats.MD5Bytes += FileStat.st_size;
+
+ MD5Summation MD5;
+ if (Fd->Seek(0) == false || MD5.AddFD(Fd->Fd(),FileStat.st_size) == false)
+ return false;
+
+ MD5Res = MD5.Result();
+ InitQuery("m5");
+ if (Put(MD5Res.begin(),MD5Res.length()) == true)
+ CurStat.Flags |= FlMD5;
+ return true;
+}
+ /*}}}*/
+// CacheDB::Finish - Write back the cache structure /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::Finish()
+{
+ // Optimize away some writes.
+ if (CurStat.Flags == OldStat.Flags &&
+ CurStat.st_mtime == OldStat.st_mtime)
+ return true;
+
+ // Write the stat information
+ CurStat.Flags = htonl(CurStat.Flags);
+ InitQuery("st");
+ Put(&CurStat,sizeof(CurStat));
+ CurStat.Flags = ntohl(CurStat.Flags);
+ return true;
+}
+ /*}}}*/
+// CacheDB::Clean - Clean the Database /*{{{*/
+// ---------------------------------------------------------------------
+/* Tidy the database by removing files that no longer exist at all. */
+bool CacheDB::Clean()
+{
+ if (DBLoaded == false)
+ return true;
+
+ /* I'm not sure what VERSION_MINOR should be here.. 2.4.14 certainly
+ needs the lower one and 2.7.7 needs the upper.. */
+#if DB_VERSION_MAJOR >= 2 && DB_VERSION_MINOR >= 7
+ DBC *Cursor;
+ if ((errno = Dbp->cursor(Dbp,0,&Cursor,0)) != 0)
+ return _error->Error("Unable to get a cursor");
+#else
+ DBC *Cursor;
+ if ((errno = Dbp->cursor(Dbp,0,&Cursor)) != 0)
+ return _error->Error("Unable to get a cursor");
+#endif
+
+ DBT Key;
+ DBT Data;
+ memset(&Key,0,sizeof(Key));
+ memset(&Data,0,sizeof(Data));
+ while ((errno = Cursor->c_get(Cursor,&Key,&Data,DB_NEXT)) == 0)
+ {
+ const char *Colon = (char *)Key.data;
+ for (; Colon != (char *)Key.data+Key.size && *Colon != ':'; Colon++);
+ if ((char *)Key.data+Key.size - Colon > 2)
+ {
+ if (stringcmp((char *)Key.data,Colon,"st") == 0 ||
+ stringcmp((char *)Key.data,Colon,"cn") == 0 ||
+ stringcmp((char *)Key.data,Colon,"m5") == 0 ||
+ stringcmp((char *)Key.data,Colon,"cl") == 0)
+ {
+ if (FileExists(string(Colon+1,(const char *)Key.data+Key.size)) == true)
+ continue;
+ }
+ }
+
+ Cursor->c_del(Cursor,0);
+ }
+
+ return true;
+}
+ /*}}}*/
diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h
new file mode 100644
index 00000000..89b1a232
--- /dev/null
+++ b/ftparchive/cachedb.h
@@ -0,0 +1,119 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: cachedb.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ CacheDB
+
+ Simple uniform interface to a cache database.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef CACHEDB_H
+#define CACHEDB_H
+
+#ifdef __GNUG__
+#pragma interface "cachedb.h"
+#endif
+
+#include <db2/db.h>
+#include <string>
+#include <apt-pkg/debfile.h>
+#include <inttypes.h>
+#include <sys/stat.h>
+#include <errno.h>
+
+#include "contents.h"
+
+class CacheDB
+{
+ protected:
+
+ // Database state/access
+ DBT Key;
+ DBT Data;
+ char TmpKey[600];
+ DB *Dbp;
+ bool DBLoaded;
+ bool ReadOnly;
+ string DBFile;
+
+ // Generate a key for the DB of a given type
+ inline void InitQuery(const char *Type)
+ {
+ memset(&Key,0,sizeof(Key));
+ memset(&Data,0,sizeof(Data));
+ Key.data = TmpKey;
+ Key.size = snprintf(TmpKey,sizeof(TmpKey),"%s:%s",Type,FileName.c_str());
+ }
+
+ inline bool Get()
+ {
+ return Dbp->get(Dbp,0,&Key,&Data,0) == 0;
+ };
+ inline bool Put(const void *In,unsigned long Length)
+ {
+ if (ReadOnly == true)
+ return true;
+ Data.size = Length;
+ Data.data = (void *)In;
+ if (DBLoaded == true && (errno = Dbp->put(Dbp,0,&Key,&Data,0)) != 0)
+ {
+ DBLoaded = false;
+ return false;
+ }
+ return true;
+ }
+
+ // Stat info stored in the DB, Fixed types since it is written to disk.
+ enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2)};
+ struct StatStore
+ {
+ uint32_t st_mtime;
+ uint32_t Flags;
+ } CurStat;
+ struct StatStore OldStat;
+
+ // 'set' state
+ string FileName;
+ struct stat FileStat;
+ FileFd *Fd;
+ debDebFile *DebFile;
+
+ public:
+
+ // Data collection helpers
+ debDebFile::MemControlExtract Control;
+ ContentsExtract Contents;
+
+ // Runtime statistics
+ struct Stats
+ {
+ double Bytes;
+ double MD5Bytes;
+ unsigned long Packages;
+ unsigned long Misses;
+ unsigned long DeLinkBytes;
+
+ inline void Add(const Stats &S) {Bytes += S.Bytes; MD5Bytes += S.MD5Bytes;
+ Packages += S.Packages; Misses += S.Misses; DeLinkBytes += S.DeLinkBytes;};
+ Stats() : Bytes(0), MD5Bytes(0), Packages(0), Misses(0), DeLinkBytes(0) {};
+ } Stats;
+
+ bool ReadyDB(string DB);
+ inline bool DBFailed() {return Dbp != 0 && DBLoaded == false;};
+ inline bool Loaded() {return DBLoaded == true;};
+
+ bool SetFile(string FileName,struct stat St,FileFd *Fd);
+ bool LoadControl();
+ bool LoadContents(bool GenOnly);
+ bool GetMD5(string &MD5Res,bool GenOnly);
+ bool Finish();
+
+ bool Clean();
+
+ CacheDB(string DB) : Dbp(0), DebFile(0) {ReadyDB(DB);};
+ ~CacheDB() {ReadyDB(string()); delete DebFile;};
+};
+
+#endif
diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc
new file mode 100644
index 00000000..145a6878
--- /dev/null
+++ b/ftparchive/contents.cc
@@ -0,0 +1,401 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: contents.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ contents - Archive contents generator
+
+ The GenContents class is a back end for an archive contents generator.
+ It takes a list of per-deb file name and merges it into a memory
+ database of all previous output. This database is stored as a set
+ of binary trees linked across directories to form a tree of all files+dirs
+ given to it. The tree will also be sorted as it is built up thus
+ removing the massive sort time overhead.
+
+ By breaking all the pathnames into components and storing them
+ separately a space savings is realized by not duplicating the string
+ over and over again. Ultimately this saving is sacrificed to storage of
+ the tree structure itself but the tree structure yields a speed gain
+ in the sorting and processing. Ultimately it takes about 5 seconds to
+ do 141000 nodes and about 5 meg of ram.
+
+ The tree looks something like:
+
+ usr/
+ / \ / libslang
+ bin/ lib/ --> libc6
+ / \ \ libfoo
+ games/ sbin/
+
+ The ---> is the DirDown link
+
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include "contents.h"
+
+#include <apt-pkg/extracttar.h>
+#include <apt-pkg/error.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <malloc.h>
+ /*}}}*/
+
+// GenContents::~GenContents - Free allocated memory /*{{{*/
+// ---------------------------------------------------------------------
+/* Since all our allocations are static big-block allocations all that is
+ needed is to free all of them. */
+GenContents::~GenContents()
+{
+ while (BlockList != 0)
+ {
+ BigBlock *Old = BlockList;
+ BlockList = Old->Next;
+ free(Old->Block);
+ delete Old;
+ }
+}
+ /*}}}*/
+// GenContents::Mystrdup - Custom strdup /*{{{*/
+// ---------------------------------------------------------------------
+/* This strdup also uses a large block allocator to eliminate glibc
+ overhead */
+char *GenContents::Mystrdup(const char *From)
+{
+ unsigned int Len = strlen(From) + 1;
+ if (StrLeft <= Len)
+ {
+ StrLeft = 4096*10;
+ StrPool = (char *)malloc(StrLeft);
+
+ BigBlock *Block = new BigBlock;
+ Block->Block = StrPool;
+ Block->Next = BlockList;
+ BlockList = Block;
+ }
+
+ memcpy(StrPool,From,Len);
+ StrLeft -= Len;
+
+ char *Res = StrPool;
+ StrPool += Len;
+ return Res;
+}
+ /*}}}*/
+// GenContents::Node::operator new - Big block allocator /*{{{*/
+// ---------------------------------------------------------------------
+/* This eliminates glibc's malloc overhead by allocating large blocks and
+ having a continuous set of Nodes. This takes about 8 bytes off each nodes
+ space needs. Freeing is not supported. */
+void *GenContents::Node::operator new(size_t Amount,GenContents *Owner)
+{
+ if (Owner->NodeLeft == 0)
+ {
+ Owner->NodeLeft = 10000;
+ Owner->NodePool = (Node *)malloc(Amount*Owner->NodeLeft);
+ BigBlock *Block = new BigBlock;
+ Block->Block = Owner->NodePool;
+ Block->Next = Owner->BlockList;
+ Owner->BlockList = Block;
+ }
+
+ Owner->NodeLeft--;
+ return Owner->NodePool++;
+}
+ /*}}}*/
+// GenContents::Grab - Grab a new node representing Name under Top /*{{{*/
+// ---------------------------------------------------------------------
+/* This grabs a new node representing the pathname component Name under
+ the node Top. The node is given the name Package. It is assumed that Name
+ is inside of top. If a duplicate already entered name is found then
+ a note is made on the Dup list and the previous in-tree node is returned. */
+GenContents::Node *GenContents::Grab(GenContents::Node *Top,const char *Name,
+ const char *Package)
+{
+ /* We drop down to the next dir level each call. This simplifies
+ the calling routine */
+ if (Top->DirDown == 0)
+ {
+ Node *Item = new(this) Node;
+ Item->Path = Mystrdup(Name);
+ Item->Package = Package;
+ Top->DirDown = Item;
+ return Item;
+ }
+ Top = Top->DirDown;
+
+ int Res;
+ while (1)
+ {
+ Res = strcmp(Name,Top->Path);
+
+ // Collision!
+ if (Res == 0)
+ {
+ // See if this the the same package (multi-version dup)
+ if (Top->Package == Package ||
+ strcasecmp(Top->Package,Package) == 0)
+ return Top;
+
+ // Look for an already existing Dup
+ for (Node *I = Top->Dups; I != 0; I = I->Dups)
+ if (I->Package == Package ||
+ strcasecmp(I->Package,Package) == 0)
+ return Top;
+
+ // Add the dup in
+ Node *Item = new(this) Node;
+ Item->Path = Top->Path;
+ Item->Package = Package;
+ Item->Dups = Top->Dups;
+ Top->Dups = Item;
+ return Top;
+ }
+
+ // Continue to traverse the tree
+ if (Res < 0)
+ {
+ if (Top->BTreeLeft == 0)
+ break;
+ Top = Top->BTreeLeft;
+ }
+ else
+ {
+ if (Top->BTreeRight == 0)
+ break;
+ Top = Top->BTreeRight;
+ }
+ }
+
+ // The item was not found in the tree
+ Node *Item = new(this) Node;
+ Item->Path = Mystrdup(Name);
+ Item->Package = Package;
+
+ // Link it into the tree
+ if (Res < 0)
+ {
+ Item->BTreeLeft = Top->BTreeLeft;
+ Top->BTreeLeft = Item;
+ }
+ else
+ {
+ Item->BTreeRight = Top->BTreeRight;
+ Top->BTreeRight = Item;
+ }
+
+ return Item;
+}
+ /*}}}*/
+// GenContents::Add - Add a path to the tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This takes a full pathname and adds it into the tree. We split the
+ pathname into directory fragments adding each one as we go. Technically
+ in output from tar this should result in hitting previous items. */
+void GenContents::Add(const char *Dir,const char *Package)
+{
+ Node *Root = &this->Root;
+
+ // Drop leading slashes
+ while (*Dir == '/' && *Dir != 0)
+ Dir++;
+
+ // Run over the string and grab out each bit up to and including a /
+ const char *Start = Dir;
+ const char *I = Dir;
+ while (*I != 0)
+ {
+ if (*I != '/' || I - Start <= 1)
+ {
+ I++;
+ continue;
+ }
+ I++;
+
+ // Copy the path fragment over
+ char Tmp[1024];
+ strncpy(Tmp,Start,I - Start);
+ Tmp[I - Start] = 0;
+
+ // Grab a node for it
+ Root = Grab(Root,Tmp,Package);
+
+ Start = I;
+ }
+
+ // The final component if it does not have a trailing /
+ if (I - Start >= 1)
+ Root = Grab(Root,Start,Package);
+}
+ /*}}}*/
+// GenContents::WriteSpace - Write a given number of white space chars /*{{{*/
+// ---------------------------------------------------------------------
+/* We mod 8 it and write tabs where possible. */
+void GenContents::WriteSpace(FILE *Out,unsigned int Current,unsigned int Target)
+{
+ if (Target <= Current)
+ Target = Current + 1;
+
+ /* Now we write tabs so long as the next tab stop would not pass
+ the target */
+ for (; (Current/8 + 1)*8 < Target; Current = (Current/8 + 1)*8)
+ fputc('\t',Out);
+
+ // Fill the last bit with spaces
+ for (; Current < Target; Current++)
+ fputc(' ',Out);
+}
+ /*}}}*/
+// GenContents::Print - Display the tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This is the final result function. It takes the tree and recursively
+ calls itself and runs over each section of the tree printing out
+ the pathname and the hit packages. We use Buf to build the pathname
+ summed over all the directory parents of this node. */
+void GenContents::Print(FILE *Out)
+{
+ char Buffer[1024];
+ DoPrint(Out,&Root,Buffer);
+}
+void GenContents::DoPrint(FILE *Out,GenContents::Node *Top, char *Buf)
+{
+ if (Top == 0)
+ return;
+
+ // Go left
+ DoPrint(Out,Top->BTreeLeft,Buf);
+
+ // Print the current dir location and then descend to lower dirs
+ char *OldEnd = Buf + strlen(Buf);
+ if (Top->Path != 0)
+ {
+ strcat(Buf,Top->Path);
+
+ // Do not show the item if it is a directory with dups
+ if (Top->Path[strlen(Top->Path)-1] != '/' /*|| Top->Dups == 0*/)
+ {
+ fputs(Buf,Out);
+ WriteSpace(Out,strlen(Buf),60);
+ for (Node *I = Top; I != 0; I = I->Dups)
+ {
+ if (I != Top)
+ fputc(',',Out);
+ fputs(I->Package,Out);
+ }
+ fputc('\n',Out);
+ }
+ }
+
+ // Go along the directory link
+ DoPrint(Out,Top->DirDown,Buf);
+ *OldEnd = 0;
+
+ // Go right
+ DoPrint(Out,Top->BTreeRight,Buf);
+}
+ /*}}}*/
+
+// ContentsExtract::Read - Read the archive /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ContentsExtract::Read(debDebFile &Deb)
+{
+ Reset();
+
+ // Get the archive member and positition the file
+ const ARArchive::Member *Member = Deb.GotoMember("data.tar.gz");
+ if (Member == 0)
+ return false;
+
+ // Extract it.
+ ExtractTar Tar(Deb.GetFile(),Member->Size);
+ if (Tar.Go(*this) == false)
+ return false;
+ return true;
+}
+ /*}}}*/
+// ContentsExtract::DoItem - Extract an item /*{{{*/
+// ---------------------------------------------------------------------
+/* This just tacks the name onto the end of our memory buffer */
+bool ContentsExtract::DoItem(Item &Itm,int &Fd)
+{
+ unsigned long Len = strlen(Itm.Name);
+
+ // Strip leading ./'s
+ if (Itm.Name[0] == '.' && Itm.Name[1] == '/')
+ {
+ // == './'
+ if (Len == 2)
+ return true;
+
+ Len -= 2;
+ Itm.Name += 2;
+ }
+
+ // Allocate more storage for the string list
+ if (CurSize + Len + 2 >= MaxSize || Data == 0)
+ {
+ if (MaxSize == 0)
+ MaxSize = 512*1024/2;
+ char *NewData = (char *)realloc(Data,MaxSize*2);
+ if (NewData == 0)
+ return _error->Error("realloc - Failed to allocate memory");
+ Data = NewData;
+ MaxSize *= 2;
+ }
+
+ strcpy(Data+CurSize,Itm.Name);
+ CurSize += Len + 1;
+ return true;
+}
+ /*}}}*/
+// ContentsExtract::TakeContents - Load the contents data /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ContentsExtract::TakeContents(const void *NewData,unsigned long Length)
+{
+ if (Length == 0)
+ {
+ CurSize = 0;
+ return true;
+ }
+
+ // Allocate more storage for the string list
+ if (Length + 2 >= MaxSize || Data == 0)
+ {
+ if (MaxSize == 0)
+ MaxSize = 512*1024/2;
+ while (MaxSize*2 <= Length)
+ MaxSize *= 2;
+
+ char *NewData = (char *)realloc(Data,MaxSize*2);
+ if (NewData == 0)
+ return _error->Error("realloc - Failed to allocate memory");
+ Data = NewData;
+ MaxSize *= 2;
+ }
+ memcpy(Data,NewData,Length);
+ CurSize = Length;
+
+ return Data[CurSize-1] == 0;
+}
+ /*}}}*/
+// ContentsExtract::Add - Read the contents data into the sorter /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void ContentsExtract::Add(GenContents &Contents,string Package)
+{
+ const char *Start = Data;
+ char *Pkg = Contents.Mystrdup(Package.c_str());
+ for (const char *I = Data; I < Data + CurSize; I++)
+ {
+ if (*I == 0)
+ {
+ Contents.Add(Start,Pkg);
+ Start = ++I;
+ }
+ }
+}
+ /*}}}*/
diff --git a/ftparchive/contents.h b/ftparchive/contents.h
new file mode 100644
index 00000000..d8457cd4
--- /dev/null
+++ b/ftparchive/contents.h
@@ -0,0 +1,89 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: contents.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ contents - Contents of archive things.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef CONTENTS_H
+#define CONTENTS_H
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/dirstream.h>
+
+class GenContents
+{
+ struct Node
+ {
+ // Binary Tree links
+ Node *BTreeLeft;
+ Node *BTreeRight;
+ Node *DirDown;
+ Node *Dups;
+ const char *Path;
+ const char *Package;
+
+ void *operator new(size_t Amount,GenContents *Owner);
+ void operator delete(void *) {};
+
+ Node() : BTreeLeft(0), BTreeRight(0), DirDown(0), Dups(0),
+ Path(0), Package(0) {};
+ };
+ friend struct Node;
+
+ struct BigBlock
+ {
+ void *Block;
+ BigBlock *Next;
+ };
+
+ Node Root;
+
+ // Big block allocation pools
+ BigBlock *BlockList;
+ char *StrPool;
+ unsigned long StrLeft;
+ Node *NodePool;
+ unsigned long NodeLeft;
+
+ Node *Grab(Node *Top,const char *Name,const char *Package);
+ void WriteSpace(FILE *Out,unsigned int Current,unsigned int Target);
+ void DoPrint(FILE *Out,Node *Top, char *Buf);
+
+ public:
+
+ char *Mystrdup(const char *From);
+ void Add(const char *Dir,const char *Package);
+ void Print(FILE *Out);
+
+ GenContents() : BlockList(0), StrPool(0), StrLeft(0),
+ NodePool(0), NodeLeft(0) {};
+ ~GenContents();
+};
+
+class ContentsExtract : public pkgDirStream
+{
+ public:
+
+ // The Data Block
+ char *Data;
+ unsigned long MaxSize;
+ unsigned long CurSize;
+ void AddData(const char *Text);
+
+ bool Read(debDebFile &Deb);
+
+ virtual bool DoItem(Item &Itm,int &Fd);
+ void Reset() {CurSize = 0;};
+ bool TakeContents(const void *Data,unsigned long Length);
+ void Add(GenContents &Contents,string Package);
+
+ ContentsExtract() : Data(0), MaxSize(0), CurSize(0) {};
+ virtual ~ContentsExtract() {delete [] Data;};
+};
+
+#endif
diff --git a/ftparchive/makefile b/ftparchive/makefile
new file mode 100644
index 00000000..ca6d8f9f
--- /dev/null
+++ b/ftparchive/makefile
@@ -0,0 +1,20 @@
+# -*- make -*-
+BASE=..
+SUBDIR=ftparchive
+
+# Bring in the default rules
+include ../buildlib/defaults.mak
+
+# The apt-ftparchive program
+ifdef DB2LIB
+PROGRAM=apt-ftparchive
+SLIBS = -lapt-pkg -lapt-inst $(DB2LIB)
+LIB_MAKES = apt-pkg/makefile apt-inst/makefile
+SOURCE = apt-ftparchive.cc cachedb.cc writer.cc contents.cc override.cc \
+ multicompress.cc
+include $(PROGRAM_H)
+else
+PROGRAM=apt-ftparchive
+MESSAGE="Must have db2 to build apt-ftparchive"
+include $(FAIL_H)
+endif # ifdef DB2LIB
diff --git a/ftparchive/multicompress.cc b/ftparchive/multicompress.cc
new file mode 100644
index 00000000..932ad675
--- /dev/null
+++ b/ftparchive/multicompress.cc
@@ -0,0 +1,494 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: multicompress.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ MultiCompressor
+
+ This class is very complicated in order to optimize for the common
+ case of its use, writing a large set of compressed files that are
+ different from the old set. It spawns off compressors in parallel
+ to maximize compression throughput and has a separate task managing
+ the data going into the compressors.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "multicompress.h"
+#endif
+
+#include "multicompress.h"
+
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/md5.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <utime.h>
+#include <unistd.h>
+ /*}}}*/
+
+const MultiCompress::CompType MultiCompress::Compressors[] =
+ {{".","",0,0,0,1},
+ {"gzip",".gz","gzip","-9n","-d",2},
+ {"bzip2",".bz2","bzip2","-9","-d",3},
+ {}};
+
+// MultiCompress::MultiCompress - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Setup the file outputs, compression modes and fork the writer child */
+MultiCompress::MultiCompress(string Output,string Compress,
+ mode_t Permissions,bool Write)
+{
+ Outputs = 0;
+ Outputter = -1;
+ Input = 0;
+ UpdateMTime = 0;
+ this->Permissions = Permissions;
+
+ /* Parse the compression string, a space separated lists of compresison
+ types */
+ string::const_iterator I = Compress.begin();
+ for (; I != Compress.end();)
+ {
+ for (; I != Compress.end() && isspace(*I); I++);
+
+ // Grab a word
+ string::const_iterator Start = I;
+ for (; I != Compress.end() && !isspace(*I); I++);
+
+ // Find the matching compressor
+ const CompType *Comp = Compressors;
+ for (; Comp->Name != 0; Comp++)
+ if (stringcmp(Start,I,Comp->Name) == 0)
+ break;
+
+ // Hmm.. unknown.
+ if (Comp->Name == 0)
+ {
+ _error->Warning("Unknown Compresison Algorithm '%s'",string(Start,I).c_str());
+ continue;
+ }
+
+ // Create and link in a new output
+ Files *NewOut = new Files;
+ NewOut->Next = Outputs;
+ Outputs = NewOut;
+ NewOut->CompressProg = Comp;
+ NewOut->Output = Output+Comp->Extension;
+
+ struct stat St;
+ if (stat(NewOut->Output.c_str(),&St) == 0)
+ NewOut->OldMTime = St.st_mtime;
+ else
+ NewOut->OldMTime = 0;
+ }
+
+ if (Write == false)
+ return;
+
+ /* Open all the temp files now so we can report any errors. File is
+ made unreable to prevent people from touching it during creating. */
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ I->TmpFile.Open(I->Output + ".new",FileFd::WriteEmpty,0600);
+ if (_error->PendingError() == true)
+ return;
+
+ if (Outputs == 0)
+ {
+ _error->Error("Compressed output %s needs a compression set",Output.c_str());
+ return;
+ }
+
+ Start();
+}
+ /*}}}*/
+// MultiCompress::~MultiCompress - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Just erase the file linked list. */
+MultiCompress::~MultiCompress()
+{
+ Die();
+
+ for (; Outputs != 0;)
+ {
+ Files *Tmp = Outputs->Next;
+ delete Outputs;
+ Outputs = Tmp;
+ }
+}
+ /*}}}*/
+// MultiCompress::GetStat - Get stat information for compressed files /*{{{*/
+// ---------------------------------------------------------------------
+/* This checks each compressed file to make sure it exists and returns
+ stat information for a random file from the collection. False means
+ one or more of the files is missing. */
+bool MultiCompress::GetStat(string Output,string Compress,struct stat &St)
+{
+ /* Parse the compression string, a space separated lists of compresison
+ types */
+ string::const_iterator I = Compress.begin();
+ bool DidStat = false;
+ for (; I != Compress.end();)
+ {
+ for (; I != Compress.end() && isspace(*I); I++);
+
+ // Grab a word
+ string::const_iterator Start = I;
+ for (; I != Compress.end() && !isspace(*I); I++);
+
+ // Find the matching compressor
+ const CompType *Comp = Compressors;
+ for (; Comp->Name != 0; Comp++)
+ if (stringcmp(Start,I,Comp->Name) == 0)
+ break;
+
+ // Hmm.. unknown.
+ if (Comp->Name == 0)
+ continue;
+
+ string Name = Output+Comp->Extension;
+ if (stat(Name.c_str(),&St) != 0)
+ return false;
+ DidStat = true;
+ }
+ return DidStat;
+}
+ /*}}}*/
+// MultiCompress::Start - Start up the writer child /*{{{*/
+// ---------------------------------------------------------------------
+/* Fork a child and setup the communication pipe. */
+bool MultiCompress::Start()
+{
+ // Create a data pipe
+ int Pipe[2] = {-1,-1};
+ if (pipe(Pipe) != 0)
+ return _error->Errno("pipe","Failed to create IPC pipe to subprocess");
+ for (int I = 0; I != 2; I++)
+ SetCloseExec(Pipe[I],true);
+
+ // The child..
+ Outputter = fork();
+ if (Outputter == 0)
+ {
+ close(Pipe[1]);
+ Child(Pipe[0]);
+ if (_error->PendingError() == true)
+ {
+ _error->DumpErrors();
+ _exit(100);
+ }
+ _exit(0);
+ };
+
+ /* Tidy up the temp files, we open them in the constructor so as to
+ get proper error reporting. Close them now. */
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ I->TmpFile.Close();
+
+ close(Pipe[0]);
+ Input = fdopen(Pipe[1],"w");
+ if (Input == 0)
+ return _error->Errno("fdopen","Failed to create FILE*");
+
+ if (Outputter == -1)
+ return _error->Errno("fork","Failed to fork");
+ return true;
+}
+ /*}}}*/
+// MultiCompress::Die - Clean up the writer /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool MultiCompress::Die()
+{
+ if (Input == 0)
+ return true;
+
+ fclose(Input);
+ Input = 0;
+ bool Res = ExecWait(Outputter,"Compress Child",false);
+ Outputter = -1;
+ return Res;
+}
+ /*}}}*/
+// MultiCompress::Finalize - Finish up writing /*{{{*/
+// ---------------------------------------------------------------------
+/* This is only necessary for statistics reporting. */
+bool MultiCompress::Finalize(unsigned long &OutSize)
+{
+ OutSize = 0;
+ if (Input == 0 || Die() == false)
+ return false;
+
+ time_t Now;
+ time(&Now);
+
+ // Check the mtimes to see if the files were replaced.
+ bool Changed = false;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ struct stat St;
+ if (stat(I->Output.c_str(),&St) != 0)
+ return _error->Error("Internal Error, Failed to create %s",
+ I->Output.c_str());
+
+ if (I->OldMTime != St.st_mtime)
+ Changed = true;
+ else
+ {
+ // Update the mtime if necessary
+ if (UpdateMTime > 0 &&
+ (Now - St.st_mtime > (signed)UpdateMTime || St.st_mtime > Now))
+ {
+ struct utimbuf Buf;
+ Buf.actime = Buf.modtime = Now;
+ utime(I->Output.c_str(),&Buf);
+ Changed = true;
+ }
+ }
+
+ // Force the file permissions
+ if (St.st_mode != Permissions)
+ chmod(I->Output.c_str(),Permissions);
+
+ OutSize += St.st_size;
+ }
+
+ if (Changed == false)
+ OutSize = 0;
+
+ return true;
+}
+ /*}}}*/
+// MultiCompress::OpenCompress - Open the compressor /*{{{*/
+// ---------------------------------------------------------------------
+/* This opens the compressor, either in compress mode or decompress
+ mode. FileFd is always the compressor input/output file,
+ OutFd is the created pipe, Input for Compress, Output for Decompress. */
+bool MultiCompress::OpenCompress(const CompType *Prog,int &Pid,int FileFd,
+ int &OutFd,bool Comp)
+{
+ Pid = -1;
+
+ // No compression
+ if (Prog->Binary == 0)
+ {
+ OutFd = dup(FileFd);
+ return true;
+ }
+
+ // Create a data pipe
+ int Pipe[2] = {-1,-1};
+ if (pipe(Pipe) != 0)
+ return _error->Errno("pipe","Failed to create subprocess IPC");
+ for (int J = 0; J != 2; J++)
+ SetCloseExec(Pipe[J],true);
+
+ if (Comp == true)
+ OutFd = Pipe[1];
+ else
+ OutFd = Pipe[0];
+
+ // The child..
+ Pid = ExecFork();
+ if (Pid == 0)
+ {
+ if (Comp == true)
+ {
+ dup2(FileFd,STDOUT_FILENO);
+ dup2(Pipe[0],STDIN_FILENO);
+ }
+ else
+ {
+ dup2(FileFd,STDIN_FILENO);
+ dup2(Pipe[1],STDOUT_FILENO);
+ }
+
+ SetCloseExec(STDOUT_FILENO,false);
+ SetCloseExec(STDIN_FILENO,false);
+
+ const char *Args[3];
+ Args[0] = Prog->Binary;
+ if (Comp == true)
+ Args[1] = Prog->CompArgs;
+ else
+ Args[1] = Prog->UnCompArgs;
+ Args[2] = 0;
+ execvp(Args[0],(char **)Args);
+ cerr << "Failed to exec compressor " << Args[0] << endl;
+ _exit(100);
+ };
+ if (Comp == true)
+ close(Pipe[0]);
+ else
+ close(Pipe[1]);
+ return true;
+}
+ /*}}}*/
+// MultiCompress::OpenOld - Open an old file /*{{{*/
+// ---------------------------------------------------------------------
+/* This opens one of the original output files, possibly decompressing it. */
+bool MultiCompress::OpenOld(int &Fd,int &Proc)
+{
+ Files *Best = Outputs;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ if (Best->CompressProg->Cost > I->CompressProg->Cost)
+ Best = I;
+
+ // Open the file
+ FileFd F(Best->Output,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Decompress the file so we can read it
+ if (OpenCompress(Best->CompressProg,Proc,F.Fd(),Fd,false) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
+// MultiCompress::CloseOld - Close the old file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool MultiCompress::CloseOld(int Fd,int Proc)
+{
+ close(Fd);
+ if (Proc != -1)
+ if (ExecWait(Proc,"decompressor",false) == false)
+ return false;
+ return true;
+}
+ /*}}}*/
+// MultiCompress::Child - The writer child /*{{{*/
+// ---------------------------------------------------------------------
+/* The child process forks a bunch of compression children and takes
+ input on FD and passes it to all the compressor childer. On the way it
+ computes the MD5 of the raw data. After this the raw data in the
+ original files is compared to see if this data is new. If the data
+ is new then the temp files are renamed, otherwise they are erased. */
+bool MultiCompress::Child(int FD)
+{
+ // Start the compression children.
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (OpenCompress(I->CompressProg,I->CompressProc,I->TmpFile.Fd(),
+ I->Fd,true) == false)
+ return false;
+ }
+
+ /* Okay, now we just feed data from FD to all the other FDs. Also
+ stash a hash of the data to use later. */
+ SetNonBlock(FD,false);
+ unsigned char Buffer[32*1024];
+ unsigned long FileSize = 0;
+ MD5Summation MD5;
+ while (1)
+ {
+ WaitFd(FD,false);
+ int Res = read(FD,Buffer,sizeof(Buffer));
+ if (Res == 0)
+ break;
+ if (Res < 0)
+ continue;
+
+ MD5.Add(Buffer,Res);
+ FileSize += Res;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (write(I->Fd,Buffer,Res) != Res)
+ {
+ _error->Errno("write","IO to subprocess/file failed");
+ break;
+ }
+ }
+ }
+
+ // Close all the writers
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ close(I->Fd);
+
+ // Wait for the compressors to exit
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (I->CompressProc != -1)
+ ExecWait(I->CompressProc,I->CompressProg->Binary,false);
+ }
+
+ if (_error->PendingError() == true)
+ return false;
+
+ /* Now we have to copy the files over, or erase them if they
+ have not changed. First find the cheapest decompressor */
+ bool Missing = false;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (I->OldMTime == 0)
+ {
+ Missing = true;
+ break;
+ }
+ }
+
+ // Check the MD5 of the lowest cost entity.
+ while (Missing == false)
+ {
+ int CompFd = -1;
+ int Proc = -1;
+ if (OpenOld(CompFd,Proc) == false)
+ {
+ _error->Discard();
+ break;
+ }
+
+ // Compute the hash
+ MD5Summation OldMD5;
+ unsigned long NewFileSize = 0;
+ while (1)
+ {
+ int Res = read(CompFd,Buffer,sizeof(Buffer));
+ if (Res == 0)
+ break;
+ if (Res < 0)
+ return _error->Errno("read","Failed to read while computing MD5");
+ NewFileSize += Res;
+ OldMD5.Add(Buffer,Res);
+ }
+
+ // Tidy the compressor
+ if (CloseOld(CompFd,Proc) == false)
+ return false;
+
+ // Check the hash
+ if (OldMD5.Result() == MD5.Result() &&
+ FileSize == NewFileSize)
+ {
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ I->TmpFile.Close();
+ if (unlink(I->TmpFile.Name().c_str()) != 0)
+ _error->Errno("unlink","Problem unlinking %s",
+ I->TmpFile.Name().c_str());
+ }
+ return !_error->PendingError();
+ }
+ break;
+ }
+
+ // Finalize
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ // Set the correct file modes
+ fchmod(I->TmpFile.Fd(),Permissions);
+
+ if (rename(I->TmpFile.Name().c_str(),I->Output.c_str()) != 0)
+ _error->Errno("rename","Failed to rename %s to %s",
+ I->TmpFile.Name().c_str(),I->Output.c_str());
+ I->TmpFile.Close();
+ }
+
+ return !_error->PendingError();
+}
+ /*}}}*/
+
diff --git a/ftparchive/multicompress.h b/ftparchive/multicompress.h
new file mode 100644
index 00000000..212dec63
--- /dev/null
+++ b/ftparchive/multicompress.h
@@ -0,0 +1,80 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: multicompress.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ MultiCompressor
+
+ Multiple output class. Takes a single FILE* and writes it simultaneously
+ to many compressed files. Then checks if the resulting output is
+ different from any previous output and overwrites the old files. Care is
+ taken to ensure that the new files are not generally readable while they
+ are being written.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef MULTICOMPRESS_H
+#define MULTICOMPRESS_H
+
+#ifdef __GNUG__
+#pragma interface "multicompress.h"
+#endif
+
+#include <string>
+#include <apt-pkg/fileutl.h>
+#include <stdio.h>
+#include <sys/types.h>
+
+class MultiCompress
+{
+ // Enumeration of all supported compressors
+ struct CompType
+ {
+ const char *Name;
+ const char *Extension;
+ const char *Binary;
+ const char *CompArgs;
+ const char *UnCompArgs;
+ unsigned char Cost;
+ };
+
+ // An output file
+ struct Files
+ {
+ string Output;
+ const CompType *CompressProg;
+ Files *Next;
+ FileFd TmpFile;
+ pid_t CompressProc;
+ time_t OldMTime;
+ int Fd;
+ };
+
+ Files *Outputs;
+ pid_t Outputter;
+ mode_t Permissions;
+ static const CompType Compressors[];
+
+ bool OpenCompress(const CompType *Prog,int &Pid,int FileFd,
+ int &OutFd,bool Comp);
+ bool Child(int Fd);
+ bool Start();
+ bool Die();
+
+ public:
+
+ // The FD to write to for compression.
+ FILE *Input;
+ unsigned long UpdateMTime;
+
+ bool Finalize(unsigned long &OutSize);
+ bool OpenOld(int &Fd,int &Proc);
+ bool CloseOld(int Fd,int Proc);
+ static bool GetStat(string Output,string Compress,struct stat &St);
+
+ MultiCompress(string Output,string Compress,mode_t Permissions,
+ bool Write = true);
+ ~MultiCompress();
+};
+
+#endif
diff --git a/ftparchive/override.cc b/ftparchive/override.cc
new file mode 100644
index 00000000..93cc34e8
--- /dev/null
+++ b/ftparchive/override.cc
@@ -0,0 +1,180 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: override.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Override
+
+ Store the override file.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "override.h"
+#endif
+
+#include "override.h"
+
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/error.h>
+
+#include <stdio.h>
+
+#include "override.h"
+ /*}}}*/
+
+// Override::ReadOverride - Read the override file /*{{{*/
+// ---------------------------------------------------------------------
+/* This parses the override file and reads it into the map */
+bool Override::ReadOverride(string File,bool Source)
+{
+ if (File.empty() == true)
+ return true;
+
+ FILE *F = fopen(File.c_str(),"r");
+ if (F == 0)
+ return _error->Errno("fopen","Unable to open %s",File.c_str());
+
+ char Line[500];
+ unsigned long Counter = 0;
+ while (fgets(Line,sizeof(Line),F) != 0)
+ {
+ Counter++;
+ Item Itm;
+
+ // Silence
+ for (char *I = Line; *I != 0; I++)
+ if (*I == '#')
+ *I = 0;
+
+ // Strip space leading up to the package name, skip blank lines
+ char *Pkg = Line;
+ for (; isspace(*Pkg) && *Pkg != 0;Pkg++);
+ if (Pkg == 0)
+ continue;
+
+ // Find the package and zero..
+ char *Start = Pkg;
+ char *End = Pkg;
+ for (; isspace(*End) == 0 && *End != 0; End++);
+ if (*End == 0)
+ {
+ _error->Warning("Malformed override %s line %lu #1",File.c_str(),
+ Counter);
+ continue;
+ }
+ *End = 0;
+
+ // Find the priority
+ if (Source == false)
+ {
+ for (End++; isspace(*End) != 0 && *End != 0; End++);
+ Start = End;
+ for (; isspace(*End) == 0 && *End != 0; End++);
+ if (*End == 0)
+ {
+ _error->Warning("Malformed override %s line %lu #2",File.c_str(),
+ Counter);
+ continue;
+ }
+ *End = 0;
+ Itm.Priority = Start;
+ }
+
+ // Find the Section
+ for (End++; isspace(*End) != 0 && *End != 0; End++);
+ Start = End;
+ for (; isspace(*End) == 0 && *End != 0; End++);
+ if (*End == 0)
+ {
+ _error->Warning("Malformed override %s line %lu #3",File.c_str(),
+ Counter);
+ continue;
+ }
+ *End = 0;
+ Itm.Section = Start;
+
+ // Source override files only have the two columns
+ if (Source == true)
+ {
+ Mapping[Pkg] = Itm;
+ continue;
+ }
+
+ // Find the =>
+ for (End++; isspace(*End) != 0 && *End != 0; End++);
+ if (*End != 0)
+ {
+ Start = End;
+ for (; *End != 0 && (End[0] != '=' || End[1] != '>'); End++);
+ if (*End == 0 || strlen(End) < 4)
+ {
+ Itm.OldMaint = "*";
+ Itm.NewMaint = _strstrip(Start);
+ }
+ else
+ {
+ *End = 0;
+ Itm.OldMaint = _strstrip(Start);
+
+ End += 3;
+ Itm.NewMaint = _strstrip(End);
+ }
+ }
+
+ Mapping[Pkg] = Itm;
+ }
+
+ if (ferror(F))
+ _error->Errno("fgets","Failed to read the override file %s",File.c_str());
+ fclose(F);
+ return true;
+}
+ /*}}}*/
+// Override::Item::SwapMaint - Swap the maintainer field if necessary /*{{{*/
+// ---------------------------------------------------------------------
+/* Returns the new maintainer string after evaluating the rewriting rule. If
+ there is a rule but it does not match then the empty string is returned,
+ also if there was no rewrite rule the empty string is returned. Failed
+ indicates if there was some kind of problem while rewriting. */
+string Override::Item::SwapMaint(string Orig,bool &Failed)
+{
+ Failed = false;
+
+ // Degenerate case..
+ if (NewMaint.empty() == true)
+ return OldMaint;
+
+ if (OldMaint == "*")
+ return NewMaint;
+
+ /* James: ancient, eliminate it, however it is still being used in the main
+ override file. Thus it persists.*/
+#if 1
+ // Break OldMaint up into little bits on double slash boundaries.
+ string::iterator End = OldMaint.begin();
+ while (1)
+ {
+ string::iterator Start = End;
+ for (; End < OldMaint.end() &&
+ (End + 3 >= OldMaint.end() || End[0] != ' ' ||
+ End[1] != '/' || End[2] != '/'); End++);
+ if (stringcasecmp(Start,End,Orig.begin(),Orig.end()) == 0)
+ return NewMaint;
+
+ if (End >= OldMaint.end())
+ break;
+
+ // Skip the divider and white space
+ for (; End < OldMaint.end() && (*End == '/' || *End == ' '); End++);
+ }
+#else
+ if (stringcasecmp(OldMaint.begin(),OldMaint.end(),Orig.begin(),Orig.end()) == 0)
+ return NewMaint;
+#endif
+
+ Failed = true;
+ return string();
+}
+ /*}}}*/
diff --git a/ftparchive/override.h b/ftparchive/override.h
new file mode 100644
index 00000000..63f123c4
--- /dev/null
+++ b/ftparchive/override.h
@@ -0,0 +1,50 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: override.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Override
+
+ Store the override file.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef OVERRIDE_H
+#define OVERRIDE_H
+
+#ifdef __GNUG__
+#pragma interface "override.h"
+#endif
+
+#include <map>
+#include <string>
+
+class Override
+{
+ public:
+
+ struct Item
+ {
+ string Priority;
+ string Section;
+ string OldMaint;
+ string NewMaint;
+
+ string SwapMaint(string Orig,bool &Failed);
+ };
+
+ map<string,Item> Mapping;
+
+ inline Item *GetItem(string Package)
+ {
+ map<string,Item>::iterator I = Mapping.find(Package);
+ if (I == Mapping.end())
+ return 0;
+ return &I->second;
+ };
+
+ bool ReadOverride(string File,bool Source = false);
+};
+
+#endif
+
diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc
new file mode 100644
index 00000000..7aea8931
--- /dev/null
+++ b/ftparchive/writer.cc
@@ -0,0 +1,756 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: writer.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Writer
+
+ The file writer classes. These write various types of output, sources,
+ packages and contents.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "writer.h"
+#endif
+
+#include "writer.h"
+
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/md5.h>
+#include <apt-pkg/deblistparser.h>
+
+#include <sys/types.h>
+#include <unistd.h>
+#include <ftw.h>
+
+#include "cachedb.h"
+#include "apt-ftparchive.h"
+#include "multicompress.h"
+ /*}}}*/
+
+FTWScanner *FTWScanner::Owner;
+
+// FTWScanner::FTWScanner - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+FTWScanner::FTWScanner()
+{
+ ErrorPrinted = false;
+ NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
+ TmpExt = 0;
+ Ext[0] = 0;
+ RealPath = 0;
+ long PMax = pathconf(".",_PC_PATH_MAX);
+ if (PMax > 0)
+ RealPath = new char[PMax];
+}
+ /*}}}*/
+// FTWScanner::Scanner - FTW Scanner /*{{{*/
+// ---------------------------------------------------------------------
+/* This is the FTW scanner, it processes each directory element in the
+ directory tree. */
+int FTWScanner::Scanner(const char *File,const struct stat *sb,int Flag)
+{
+ if (Flag == FTW_DNR)
+ {
+ Owner->NewLine(1);
+ c1out << "W: Unable to read directory " << File << endl;
+ }
+ if (Flag == FTW_NS)
+ {
+ Owner->NewLine(1);
+ c1out << "W: Unable to stat " << File << endl;
+ }
+ if (Flag != FTW_F)
+ return 0;
+
+ // See if it is a .deb
+ if (strlen(File) < 4)
+ return 0;
+
+ unsigned CurExt = 0;
+ for (; Owner->Ext[CurExt] != 0; CurExt++)
+ if (strcmp(File+strlen(File)-strlen(Owner->Ext[CurExt]),
+ Owner->Ext[CurExt]) == 0)
+ break;
+ if (Owner->Ext[CurExt] == 0)
+ return 0;
+
+ /* Process it. If the file is a link then resolve it into an absolute
+ name.. This works best if the directory components the scanner are
+ given are not links themselves. */
+ char Jnk[2];
+ Owner->OriginalPath = File;
+ if (Owner->RealPath != 0 && readlink(File,Jnk,sizeof(Jnk)) != -1 &&
+ realpath(File,Owner->RealPath) != 0)
+ Owner->DoPackage(Owner->RealPath);
+ else
+ Owner->DoPackage(File);
+
+ if (_error->empty() == false)
+ {
+ // Print any errors or warnings found
+ string Err;
+ bool SeenPath = false;
+ while (_error->empty() == false)
+ {
+ Owner->NewLine(1);
+
+ bool Type = _error->PopMessage(Err);
+ if (Type == true)
+ c1out << "E: " << Err << endl;
+ else
+ c1out << "W: " << Err << endl;
+
+ if (Err.find(File) != string::npos)
+ SeenPath = true;
+ }
+
+ if (SeenPath == false)
+ cerr << "E: Errors apply to file '" << File << "'" << endl;
+ return 0;
+ }
+
+ return 0;
+}
+ /*}}}*/
+// FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::RecursiveScan(string Dir)
+{
+ /* If noprefix is set then jam the scan root in, so we don't generate
+ link followed paths out of control */
+ if (InternalPrefix.empty() == true)
+ {
+ if (realpath(Dir.c_str(),RealPath) == 0)
+ return _error->Errno("realpath","Failed to resolve %s",Dir.c_str());
+ InternalPrefix = RealPath;
+ }
+
+ // Do recursive directory searching
+ Owner = this;
+ int Res = ftw(Dir.c_str(),Scanner,30);
+
+ // Error treewalking?
+ if (Res != 0)
+ {
+ if (_error->PendingError() == false)
+ _error->Errno("ftw","Tree walking failed");
+ return false;
+ }
+
+ return true;
+}
+ /*}}}*/
+// FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
+// ---------------------------------------------------------------------
+/* This is an alternative to using FTW to locate files, it reads the list
+ of files from another file. */
+bool FTWScanner::LoadFileList(string Dir,string File)
+{
+ /* If noprefix is set then jam the scan root in, so we don't generate
+ link followed paths out of control */
+ if (InternalPrefix.empty() == true)
+ {
+ if (realpath(Dir.c_str(),RealPath) == 0)
+ return _error->Errno("realpath","Failed to resolve %s",Dir.c_str());
+ InternalPrefix = RealPath;
+ }
+
+ Owner = this;
+ FILE *List = fopen(File.c_str(),"r");
+ if (List == 0)
+ return _error->Errno("fopen","Failed to open %s",File.c_str());
+
+ /* We are a tad tricky here.. We prefix the buffer with the directory
+ name, that way if we need a full path with just use line.. Sneaky and
+ fully evil. */
+ char Line[1000];
+ char *FileStart;
+ if (Dir.empty() == true || Dir.end()[-1] != '/')
+ FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str());
+ else
+ FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str());
+ while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0)
+ {
+ char *FileName = _strstrip(FileStart);
+ if (FileName[0] == 0)
+ continue;
+
+ if (FileName[0] != '/')
+ {
+ if (FileName != FileStart)
+ memmove(FileStart,FileName,strlen(FileStart));
+ FileName = Line;
+ }
+
+ struct stat St;
+ int Flag = FTW_F;
+ if (stat(FileName,&St) != 0)
+ Flag = FTW_NS;
+
+ if (Scanner(FileName,&St,Flag) != 0)
+ break;
+ }
+
+ fclose(List);
+ return true;
+}
+ /*}}}*/
+// FTWScanner::Delink - Delink symlinks /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
+ unsigned long &DeLinkBytes,
+ struct stat &St)
+{
+ // See if this isn't an internaly prefix'd file name.
+ if (InternalPrefix.empty() == false &&
+ InternalPrefix.length() < FileName.length() &&
+ stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(),
+ InternalPrefix.begin(),InternalPrefix.end()) != 0)
+ {
+ if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit)
+ {
+ // Tidy up the display
+ if (DeLinkBytes == 0)
+ cout << endl;
+
+ NewLine(1);
+ c1out << " DeLink " << (OriginalPath + InternalPrefix.length())
+ << " [" << SizeToStr(St.st_size) << "B]" << endl << flush;
+
+ if (NoLinkAct == false)
+ {
+ char OldLink[400];
+ if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1)
+ _error->Errno("readlink","Failed to readlink %s",OriginalPath);
+ else
+ {
+ if (unlink(OriginalPath) != 0)
+ _error->Errno("unlink","Failed to unlink %s",OriginalPath);
+ else
+ {
+ if (link(FileName.c_str(),OriginalPath) != 0)
+ {
+ // Panic! Restore the symlink
+ symlink(OldLink,OriginalPath);
+ return _error->Errno("link","*** Failed to link %s to %s",
+ FileName.c_str(),
+ OriginalPath);
+ }
+ }
+ }
+ }
+
+ DeLinkBytes += St.st_size;
+ if (DeLinkBytes/1024 >= DeLinkLimit)
+ c1out << " DeLink limit of " << SizeToStr(DeLinkBytes) << "B hit." << endl;
+ }
+
+ FileName = OriginalPath;
+ }
+
+ return true;
+}
+ /*}}}*/
+// FTWScanner::SetExts - Set extensions to support /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::SetExts(string Vals)
+{
+ delete [] TmpExt;
+ TmpExt = new char[Vals.length()+1];
+ strcpy(TmpExt,Vals.c_str());
+ return TokSplitString(' ',TmpExt,(char **)Ext,sizeof(Ext)/sizeof(Ext[0]));
+}
+ /*}}}*/
+
+// PackagesWriter::PackagesWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+PackagesWriter::PackagesWriter(string DB,string Overrides) :
+ Db(DB),Stats(Db.Stats)
+{
+ Output = stdout;
+ Ext[0] = ".deb";
+ Ext[1] = 0;
+ DeLinkLimit = 0;
+
+ // Process the command line options
+ DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
+ DoContents = _config->FindB("APT::FTPArchive::Contents",true);
+ NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
+
+ if (Db.Loaded() == false)
+ DoContents = false;
+
+ // Read the override file
+ if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
+ return;
+ else
+ NoOverride = true;
+ _error->DumpErrors();
+}
+ /*}}}*/
+// PackagesWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* This method takes a package and gets its control information and
+ MD5 then writes out a control record with the proper fields rewritten
+ and the path/size/hash appended. */
+bool PackagesWriter::DoPackage(string FileName)
+{
+ // Open the archive
+ FileFd F(FileName,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Stat the file for later
+ struct stat St;
+ if (fstat(F.Fd(),&St) != 0)
+ return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
+
+ // Pull all the data we need form the DB
+ string MD5Res;
+ if (Db.SetFile(FileName,St,&F) == false ||
+ Db.LoadControl() == false ||
+ (DoContents == true && Db.LoadContents(true) == false) ||
+ (DoMD5 == true && Db.GetMD5(MD5Res,false) == false))
+ return false;
+
+ if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,St) == false)
+ return false;
+
+ // Lookup the overide information
+ pkgTagSection &Tags = Db.Control.Section;
+ string Package = Tags.FindS("Package");
+ Override::Item Tmp;
+ Override::Item *OverItem = Over.GetItem(Package);
+
+ if (Package.empty() == true)
+ return _error->Error("Archive had no package field");
+
+ // If we need to do any rewriting of the header do it now..
+ if (OverItem == 0)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ c1out << " " << Package << " has no override entry" << endl;
+ }
+
+ OverItem = &Tmp;
+ Tmp.Section = Tags.FindS("Section");
+ Tmp.Priority = Tags.FindS("Priority");
+ }
+
+ char Size[40];
+ sprintf(Size,"%lu",St.st_size);
+
+ // Strip the DirStrip prefix from the FileName and add the PathPrefix
+ string NewFileName;
+ if (DirStrip.empty() == false &&
+ FileName.length() > DirStrip.length() &&
+ stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
+ DirStrip.begin(),DirStrip.end()) == 0)
+ NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
+ else
+ NewFileName = FileName;
+ if (PathPrefix.empty() == false)
+ NewFileName = flCombine(PathPrefix,NewFileName);
+
+ // This lists all the changes to the fields we are going to make.
+ TFRewriteData Changes[] = {{"Size",Size},
+ {"MD5sum",MD5Res.c_str()},
+ {"Filename",NewFileName.c_str()},
+ {"Section",OverItem->Section.c_str()},
+ {"Priority",OverItem->Priority.c_str()},
+ {"Status",0},
+ {"Optional",0},
+ {}, // For maintainer
+ {}, // For Suggests
+ {}};
+ unsigned int End = 0;
+ for (End = 0; Changes[End].Tag != 0; End++);
+
+ // Rewrite the maintainer field if necessary
+ bool MaintFailed;
+ string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
+ if (MaintFailed == true)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ c1out << " " << Package << " maintainer is " <<
+ Tags.FindS("Maintainer") << " not " <<
+ OverItem->OldMaint << endl;
+ }
+ }
+
+ if (NewMaint.empty() == false)
+ {
+ Changes[End].Rewrite = NewMaint.c_str();
+ Changes[End++].Tag = "Maintainer";
+ }
+
+ /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
+ dpkg-scanpackages does.. Well sort of. dpkg-scanpackages just does renaming
+ but dpkg does this append bit. So we do the append bit, at least that way the
+ status file and package file will remain similar. There are other transforms
+ but optional is the only legacy one still in use for some lazy reason. */
+ string OptionalStr = Tags.FindS("Optional");
+ if (OptionalStr.empty() == false)
+ {
+ if (Tags.FindS("Suggests").empty() == false)
+ OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
+ Changes[End].Rewrite = OptionalStr.c_str();
+ Changes[End++].Tag = "Suggests";
+ }
+
+ // Rewrite and store the fields.
+ if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes) == false)
+ return false;
+ fprintf(Output,"\n");
+
+ return Db.Finish();
+}
+ /*}}}*/
+
+// SourcesWriter::SourcesWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+SourcesWriter::SourcesWriter(string BOverrides,string SOverrides)
+{
+ Output = stdout;
+ Ext[0] = ".dsc";
+ Ext[1] = 0;
+ DeLinkLimit = 0;
+ Buffer = 0;
+ BufSize = 0;
+
+ // Process the command line options
+ NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
+
+ // Read the override file
+ if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
+ return;
+ else
+ NoOverride = true;
+
+ if (SOverrides.empty() == false && FileExists(SOverrides) == true &&
+ SOver.ReadOverride(SOverrides,true) == false)
+ return;
+// _error->DumpErrors();
+}
+ /*}}}*/
+// SourcesWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool SourcesWriter::DoPackage(string FileName)
+{
+ // Open the archive
+ FileFd F(FileName,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Stat the file for later
+ struct stat St;
+ if (fstat(F.Fd(),&St) != 0)
+ return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
+
+ if (St.st_size > 128*1024)
+ return _error->Error("DSC file '%s' is too large!",FileName.c_str());
+
+ if (BufSize < (unsigned)St.st_size+1)
+ {
+ BufSize = St.st_size+1;
+ Buffer = (char *)realloc(Buffer,St.st_size+1);
+ }
+
+ if (F.Read(Buffer,St.st_size) == false)
+ return false;
+
+ // Hash the file
+ char *Start = Buffer;
+ char *BlkEnd = Buffer + St.st_size;
+ MD5Summation MD5;
+ MD5.Add((unsigned char *)Start,BlkEnd - Start);
+
+ // Add an extra \n to the end, just in case
+ *BlkEnd++ = '\n';
+
+ /* Remove the PGP trailer. Some .dsc's have this without a blank line
+ before */
+ const char *Key = "-----BEGIN PGP SIGNATURE-----";
+ for (char *MsgEnd = Start; MsgEnd < BlkEnd - strlen(Key) -1; MsgEnd++)
+ {
+ if (*MsgEnd == '\n' && strncmp(MsgEnd+1,Key,strlen(Key)) == 0)
+ {
+ MsgEnd[1] = '\n';
+ break;
+ }
+ }
+
+ /* Read records until we locate the Source record. This neatly skips the
+ GPG header (which is RFC822 formed) without any trouble. */
+ pkgTagSection Tags;
+ do
+ {
+ unsigned Pos;
+ if (Tags.Scan(Start,BlkEnd - Start) == false)
+ return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
+ if (Tags.Find("Source",Pos) == true)
+ break;
+ Start += Tags.size();
+ }
+ while (1);
+ Tags.Trim();
+
+ // Lookup the overide information, finding first the best priority.
+ string BestPrio;
+ char Buffer[1000];
+ string Bins = Tags.FindS("Binary");
+ Override::Item *OverItem = 0;
+ if (Bins.empty() == false && Bins.length() < sizeof(Buffer))
+ {
+ strcpy(Buffer,Bins.c_str());
+
+ // Ignore too-long errors.
+ char *BinList[400];
+ TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0]));
+
+ // Look at all the binaries
+ unsigned char BestPrioV = pkgCache::State::Extra;
+ for (unsigned I = 0; BinList[I] != 0; I++)
+ {
+ Override::Item *Itm = BOver.GetItem(BinList[I]);
+ if (Itm == 0)
+ continue;
+ if (OverItem == 0)
+ OverItem = Itm;
+
+ unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority);
+ if (NewPrioV < BestPrioV || BestPrio.empty() == true)
+ {
+ BestPrioV = NewPrioV;
+ BestPrio = Itm->Priority;
+ }
+ }
+ }
+
+ // If we need to do any rewriting of the header do it now..
+ Override::Item Tmp;
+ if (OverItem == 0)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ c1out << " " << Tags.FindS("Source") << " has no override entry" << endl;
+ }
+
+ OverItem = &Tmp;
+ }
+
+ Override::Item *SOverItem = SOver.GetItem(Tags.FindS("Source"));
+ if (SOverItem == 0)
+ {
+ SOverItem = BOver.GetItem(Tags.FindS("Source"));
+ if (SOverItem == 0)
+ SOverItem = OverItem;
+ }
+
+ // Add the dsc to the files hash list
+ char Files[1000];
+ snprintf(Files,sizeof(Files),"\n %s %lu %s\n %s",
+ string(MD5.Result()).c_str(),St.st_size,
+ flNotDir(FileName).c_str(),
+ Tags.FindS("Files").c_str());
+
+ // Strip the DirStrip prefix from the FileName and add the PathPrefix
+ string NewFileName;
+ if (DirStrip.empty() == false &&
+ FileName.length() > DirStrip.length() &&
+ stringcmp(OriginalPath,OriginalPath + DirStrip.length(),
+ DirStrip.begin(),DirStrip.end()) == 0)
+ NewFileName = string(OriginalPath + DirStrip.length());
+ else
+ NewFileName = OriginalPath;
+ if (PathPrefix.empty() == false)
+ NewFileName = flCombine(PathPrefix,NewFileName);
+
+ string Directory = flNotFile(OriginalPath);
+ string Package = Tags.FindS("Source");
+
+ // Perform the delinking operation over all of the files
+ string ParseJnk;
+ const char *C = Files;
+ for (;isspace(*C); C++);
+ while (*C != 0)
+ {
+ // Parse each of the elements
+ if (ParseQuoteWord(C,ParseJnk) == false ||
+ ParseQuoteWord(C,ParseJnk) == false ||
+ ParseQuoteWord(C,ParseJnk) == false)
+ return _error->Error("Error parsing file record");
+
+ char Jnk[2];
+ string OriginalPath = Directory + ParseJnk;
+ if (RealPath != 0 && readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
+ realpath(OriginalPath.c_str(),RealPath) != 0)
+ {
+ string RP = RealPath;
+ if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St) == false)
+ return false;
+ }
+ }
+
+ Directory = flNotFile(NewFileName);
+ if (Directory.length() > 2)
+ Directory.erase(Directory.end()-1);
+
+ // This lists all the changes to the fields we are going to make.
+ TFRewriteData Changes[] = {{"Source",Package.c_str(),"Package"},
+ {"Files",Files},
+ {"Directory",Directory.c_str()},
+ {"Section",SOverItem->Section.c_str()},
+ {"Priority",BestPrio.c_str()},
+ {"Status",0},
+ {}, // For maintainer
+ {}};
+ unsigned int End = 0;
+ for (End = 0; Changes[End].Tag != 0; End++);
+
+ // Rewrite the maintainer field if necessary
+ bool MaintFailed;
+ string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
+ if (MaintFailed == true)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ c1out << " " << Package << " maintainer is " <<
+ Tags.FindS("Maintainer") << " not " <<
+ OverItem->OldMaint << endl;
+ }
+ }
+ if (NewMaint.empty() == false)
+ {
+ Changes[End].Rewrite = NewMaint.c_str();
+ Changes[End++].Tag = "Maintainer";
+ }
+
+ // Rewrite and store the fields.
+ if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes) == false)
+ return false;
+ fprintf(Output,"\n");
+
+ Stats.Packages++;
+
+ return true;
+}
+ /*}}}*/
+
+// ContentsWriter::ContentsWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+ContentsWriter::ContentsWriter(string DB) :
+ Db(DB), Stats(Db.Stats)
+
+{
+ Ext[0] = ".deb";
+ Ext[1] = 0;
+ Output = stdout;
+}
+ /*}}}*/
+// ContentsWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* If Package is the empty string the control record will be parsed to
+ determine what the package name is. */
+bool ContentsWriter::DoPackage(string FileName,string Package)
+{
+ // Open the archive
+ FileFd F(FileName,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Stat the file for later
+ struct stat St;
+ if (fstat(F.Fd(),&St) != 0)
+ return _error->Errno("fstat","Failed too stat %s",FileName.c_str());
+
+ // Ready the DB
+ if (Db.SetFile(FileName,St,&F) == false ||
+ Db.LoadContents(false) == false)
+ return false;
+
+ // Parse the package name
+ if (Package.empty() == true)
+ {
+ if (Db.LoadControl() == false)
+ return false;
+ Package = Db.Control.Section.FindS("Package");
+ }
+
+ Db.Contents.Add(Gen,Package);
+
+ return Db.Finish();
+}
+ /*}}}*/
+// ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress)
+{
+ MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Open the package file
+ int CompFd = -1;
+ int Proc = -1;
+ if (Pkgs.OpenOld(CompFd,Proc) == false)
+ return false;
+
+ // No auto-close FD
+ FileFd Fd(CompFd,false);
+ pkgTagFile Tags(&Fd);
+ if (_error->PendingError() == true)
+ {
+ Pkgs.CloseOld(CompFd,Proc);
+ return false;
+ }
+
+ // Parse.
+ pkgTagSection Section;
+ while (Tags.Step(Section) == true)
+ {
+ string File = flCombine(Prefix,Section.FindS("FileName"));
+ string Package = Section.FindS("Section");
+ if (Package.empty() == false && Package.end()[-1] != '/')
+ {
+ Package += '/';
+ Package += Section.FindS("Package");
+ }
+ else
+ Package += Section.FindS("Package");
+
+ DoPackage(File,Package);
+ if (_error->empty() == false)
+ {
+ _error->Error("Errors apply to file '%s'",File.c_str());
+ _error->DumpErrors();
+ }
+ }
+
+ // Tidy the compressor
+ if (Pkgs.CloseOld(CompFd,Proc) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
diff --git a/ftparchive/writer.h b/ftparchive/writer.h
new file mode 100644
index 00000000..a5fb6f52
--- /dev/null
+++ b/ftparchive/writer.h
@@ -0,0 +1,145 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: writer.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Writer
+
+ The file writer classes. These write various types of output, sources,
+ packages and contents.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef WRITER_H
+#define WRITER_H
+
+#ifdef __GNUG__
+#pragma interface "writer.h"
+#endif
+
+#include <string>
+#include <stdio.h>
+
+#include "cachedb.h"
+#include "override.h"
+#include "apt-ftparchive.h"
+
+class FTWScanner
+{
+ protected:
+
+ char *TmpExt;
+ const char *Ext[10];
+ const char *OriginalPath;
+ char *RealPath;
+ bool ErrorPrinted;
+
+ // Stuff for the delinker
+ bool NoLinkAct;
+
+ static FTWScanner *Owner;
+ static int Scanner(const char *File,const struct stat *sb,int Flag);
+
+ bool Delink(string &FileName,const char *OriginalPath,
+ unsigned long &Bytes,struct stat &St);
+
+ inline void NewLine(unsigned Priority)
+ {
+ if (ErrorPrinted == false && Quiet <= Priority)
+ {
+ cout << endl;
+ ErrorPrinted = true;
+ }
+ }
+
+ public:
+
+ unsigned long DeLinkLimit;
+ string InternalPrefix;
+
+ virtual bool DoPackage(string FileName) = 0;
+ bool RecursiveScan(string Dir);
+ bool LoadFileList(string BaseDir,string File);
+ bool SetExts(string Vals);
+
+ FTWScanner();
+ virtual ~FTWScanner() {delete [] RealPath; delete [] TmpExt;};
+};
+
+class PackagesWriter : public FTWScanner
+{
+ Override Over;
+ CacheDB Db;
+
+ public:
+
+ // Some flags
+ bool DoMD5;
+ bool NoOverride;
+ bool DoContents;
+
+ // General options
+ string PathPrefix;
+ string DirStrip;
+ FILE *Output;
+ struct CacheDB::Stats &Stats;
+
+ inline bool ReadOverride(string File) {return Over.ReadOverride(File);};
+ virtual bool DoPackage(string FileName);
+
+ PackagesWriter(string DB,string Overrides);
+ virtual ~PackagesWriter() {};
+};
+
+class ContentsWriter : public FTWScanner
+{
+ CacheDB Db;
+
+ GenContents Gen;
+
+ public:
+
+ // General options
+ FILE *Output;
+ struct CacheDB::Stats &Stats;
+ string Prefix;
+
+ bool DoPackage(string FileName,string Package);
+ virtual bool DoPackage(string FileName)
+ {return DoPackage(FileName,string());};
+ bool ReadFromPkgs(string PkgFile,string PkgCompress);
+
+ void Finish() {Gen.Print(Output);};
+ inline bool ReadyDB(string DB) {return Db.ReadyDB(DB);};
+
+ ContentsWriter(string DB);
+ virtual ~ContentsWriter() {};
+};
+
+class SourcesWriter : public FTWScanner
+{
+ Override BOver;
+ Override SOver;
+ char *Buffer;
+ unsigned long BufSize;
+
+ public:
+
+ bool NoOverride;
+
+ // General options
+ string PathPrefix;
+ string DirStrip;
+ FILE *Output;
+ struct CacheDB::Stats Stats;
+
+/* inline bool ReadBinOverride(string File) {return BOver.ReadOverride(File);};
+ bool ReadSrcOverride(string File); // {return BOver.ReadOverride(File);};*/
+ virtual bool DoPackage(string FileName);
+
+ SourcesWriter(string BOverrides,string SOverrides);
+ virtual ~SourcesWriter() {free(Buffer);};
+};
+
+
+#endif