summaryrefslogtreecommitdiff
path: root/ftparchive
diff options
context:
space:
mode:
Diffstat (limited to 'ftparchive')
-rw-r--r--ftparchive/apt-ftparchive.cc73
-rw-r--r--ftparchive/cachedb.cc30
-rw-r--r--ftparchive/cachedb.h22
-rw-r--r--ftparchive/contents.cc2
-rw-r--r--ftparchive/contents.h2
-rw-r--r--ftparchive/multicompress.cc14
-rw-r--r--ftparchive/multicompress.h12
-rw-r--r--ftparchive/override.cc16
-rw-r--r--ftparchive/override.h10
-rw-r--r--ftparchive/writer.cc168
-rw-r--r--ftparchive/writer.h60
11 files changed, 263 insertions, 146 deletions
diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc
index 5b6b3940c..6f9fa7ab3 100644
--- a/ftparchive/apt-ftparchive.cc
+++ b/ftparchive/apt-ftparchive.cc
@@ -16,6 +16,7 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/cmndline.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/init.h>
#include <config.h>
#include <apti18n.h>
#include <algorithm>
@@ -62,6 +63,10 @@ struct PackageMap
string SrcOverride;
string SrcExtraOverride;
+ // Translation master file
+ bool LongDesc;
+ TranslationWriter *TransWriter;
+
// Contents
string Contents;
string ContentsHead;
@@ -100,8 +105,9 @@ struct PackageMap
vector<PackageMap>::iterator End,
unsigned long &Left);
- PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false),
- PkgDone(false), SrcDone(false), ContentsMTime(0) {};
+ PackageMap() : LongDesc(true), TransWriter(NULL), DeLinkLimit(0), Permissions(1),
+ ContentsDone(false), PkgDone(false), SrcDone(false),
+ ContentsMTime(0) {};
};
/*}}}*/
@@ -129,8 +135,6 @@ void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block)
PkgExt = Block.Find("Packages::Extensions",
Setup.Find("Default::Packages::Extensions",".deb").c_str());
- Permissions = Setup.FindI("Default::FileMode",0644);
-
if (FLFile.empty() == false)
FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile);
@@ -169,6 +173,9 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats)
Packages.DirStrip = ArchiveDir;
Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
+ Packages.TransWriter = TransWriter;
+ Packages.LongDescription = LongDesc;
+
Packages.Stats.DeLinkBytes = Stats.DeLinkBytes;
Packages.DeLinkLimit = DeLinkLimit;
@@ -333,7 +340,7 @@ bool PackageMap::GenContents(Configuration &Setup,
gettimeofday(&StartTime,0);
// Create a package writer object.
- ContentsWriter Contents("");
+ ContentsWriter Contents("", Arch);
if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false)
return _error->Error(_("Package extension list is too long"));
if (_error->PendingError() == true)
@@ -436,6 +443,8 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
"$(DIST)/$(SECTION)/source/");
string DPkg = Setup.Find("TreeDefault::Packages",
"$(DIST)/$(SECTION)/binary-$(ARCH)/Packages");
+ string DTrans = Setup.Find("TreeDefault::Translation",
+ "$(DIST)/$(SECTION)/i18n/Translation-en");
string DIPrfx = Setup.Find("TreeDefault::InternalPrefix",
"$(DIST)/$(SECTION)/");
string DContents = Setup.Find("TreeDefault::Contents",
@@ -448,6 +457,12 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
string DFLFile = Setup.Find("TreeDefault::FileList", "");
string DSFLFile = Setup.Find("TreeDefault::SourceFileList", "");
+ int const Permissions = Setup.FindI("Default::FileMode",0644);
+
+ bool const LongDescription = Setup.FindB("Default::LongDescription",
+ _config->FindB("APT::FTPArchive::LongDescription", true));
+ string const TranslationCompress = Setup.Find("Default::Translation::Compress",". gzip").c_str();
+
// Process 'tree' type sections
const Configuration::Item *Top = Setup.Tree("tree");
for (Top = (Top == 0?0:Top->Child); Top != 0;)
@@ -461,17 +476,30 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
string Section;
while (ParseQuoteWord(Sections,Section) == true)
{
- string Tmp2 = Block.Find("Architectures");
string Arch;
+ struct SubstVar const Vars[] = {{"$(DIST)",&Dist},
+ {"$(SECTION)",&Section},
+ {"$(ARCH)",&Arch},
+ {}};
+ mode_t const Perms = Block.FindI("FileMode", Permissions);
+ bool const LongDesc = Block.FindB("LongDescription", LongDescription);
+ TranslationWriter *TransWriter;
+ if (DTrans.empty() == false && LongDesc == false)
+ {
+ string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"),
+ SubstVar(Block.Find("Translation", DTrans.c_str()), Vars));
+ string const TransCompress = Block.Find("Translation::Compress", TranslationCompress);
+ TransWriter = new TranslationWriter(TranslationFile, TransCompress, Perms);
+ }
+ else
+ TransWriter = NULL;
+
+ string const Tmp2 = Block.Find("Architectures");
const char *Archs = Tmp2.c_str();
while (ParseQuoteWord(Archs,Arch) == true)
{
- struct SubstVar Vars[] = {{"$(DIST)",&Dist},
- {"$(SECTION)",&Section},
- {"$(ARCH)",&Arch},
- {}};
PackageMap Itm;
-
+ Itm.Permissions = Perms;
Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars);
Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars);
@@ -491,6 +519,12 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars);
Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars);
Itm.Arch = Arch;
+ Itm.LongDesc = LongDesc;
+ if (TransWriter != NULL)
+ {
+ TransWriter->IncreaseRefCounter();
+ Itm.TransWriter = TransWriter;
+ }
Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars);
Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars);
Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars);
@@ -500,6 +534,9 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
Itm.GetGeneral(Setup,Block);
PkgList.push_back(Itm);
}
+ // we didn't use this TransWriter, so we can release it
+ if (TransWriter != NULL && TransWriter->GetRefCounter() == 0)
+ delete TransWriter;
}
Top = Top->Next;
@@ -606,7 +643,7 @@ bool SimpleGenPackages(CommandLine &CmdL)
// Create a package writer object.
PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"),
- Override, "");
+ Override, "", _config->Find("APT::FTPArchive::Architecture"));
if (_error->PendingError() == true)
return false;
@@ -629,7 +666,7 @@ bool SimpleGenContents(CommandLine &CmdL)
return ShowHelp(CmdL);
// Create a package writer object.
- ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"));
+ ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture"));
if (_error->PendingError() == true)
return false;
@@ -788,7 +825,12 @@ bool Generate(CommandLine &CmdL)
delete [] List;
}
-
+
+ // close the Translation master files
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ if (I->TransWriter != NULL && I->TransWriter->DecreaseRefCounter() == 0)
+ delete I->TransWriter;
+
if (_config->FindB("APT::FTPArchive::Contents",true) == false)
return true;
@@ -910,6 +952,7 @@ int main(int argc, const char *argv[])
{0,"delink","APT::FTPArchive::DeLinkAct",0},
{0,"readonly","APT::FTPArchive::ReadOnlyDB",0},
{0,"contents","APT::FTPArchive::Contents",0},
+ {'a',"arch","APT::FTPArchive::Architecture",CommandLine::HasArg},
{'c',"config-file",0,CommandLine::ConfigFile},
{'o',"option",0,CommandLine::ArbItem},
{0,0,0,0}};
@@ -924,7 +967,7 @@ int main(int argc, const char *argv[])
// Parse the command line and initialize the package library
CommandLine CmdL(Args,_config);
- if (CmdL.Parse(argc,argv) == false)
+ if (pkgInitConfig(*_config) == false || CmdL.Parse(argc,argv) == false)
{
_error->DumpErrors();
return 100;
diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc
index e02f0e1b6..b04244347 100644
--- a/ftparchive/cachedb.cc
+++ b/ftparchive/cachedb.cc
@@ -26,7 +26,7 @@
// CacheDB::ReadyDB - Ready the DB2 /*{{{*/
// ---------------------------------------------------------------------
/* This opens the DB2 file for caching package information */
-bool CacheDB::ReadyDB(string DB)
+bool CacheDB::ReadyDB(string const &DB)
{
int err;
@@ -102,9 +102,9 @@ bool CacheDB::OpenFile()
// ---------------------------------------------------------------------
/* This gets the size from the database if it's there. If we need
* to look at the file, also get the mtime from the file. */
-bool CacheDB::GetFileStat()
+bool CacheDB::GetFileStat(bool const &doStat)
{
- if ((CurStat.Flags & FlSize) == FlSize)
+ if ((CurStat.Flags & FlSize) == FlSize && doStat == false)
{
/* Already worked out the file size */
}
@@ -160,9 +160,9 @@ bool CacheDB::GetCurStat()
/*}}}*/
// CacheDB::GetFileInfo - Get all the info about the file /*{{{*/
// ---------------------------------------------------------------------
-bool CacheDB::GetFileInfo(string FileName, bool DoControl, bool DoContents,
- bool GenContentsOnly,
- bool DoMD5, bool DoSHA1, bool DoSHA256)
+bool CacheDB::GetFileInfo(string const &FileName, bool const &DoControl, bool const &DoContents,
+ bool const &GenContentsOnly, bool const &DoMD5, bool const &DoSHA1,
+ bool const &DoSHA256, bool const &checkMtime)
{
this->FileName = FileName;
@@ -171,14 +171,18 @@ bool CacheDB::GetFileInfo(string FileName, bool DoControl, bool DoContents,
return false;
}
OldStat = CurStat;
-
- if (GetFileStat() == false)
+
+ if (GetFileStat(checkMtime) == false)
{
delete Fd;
Fd = NULL;
return false;
}
+ /* if mtime changed, update CurStat from disk */
+ if (checkMtime == true && OldStat.mtime != CurStat.mtime)
+ CurStat.Flags = FlSize;
+
Stats.Bytes += CurStat.FileSize;
Stats.Packages++;
@@ -247,7 +251,7 @@ bool CacheDB::LoadControl()
// CacheDB::LoadContents - Load the File Listing /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool CacheDB::LoadContents(bool GenOnly)
+bool CacheDB::LoadContents(bool const &GenOnly)
{
// Try to read the control information out of the DB.
if ((CurStat.Flags & FlContents) == FlContents)
@@ -297,7 +301,7 @@ static string bytes2hex(uint8_t *bytes, size_t length) {
return string(space);
}
-static inline unsigned char xdig2num(char dig) {
+static inline unsigned char xdig2num(char const &dig) {
if (isdigit(dig)) return dig - '0';
if ('a' <= dig && dig <= 'f') return dig - 'a' + 10;
if ('A' <= dig && dig <= 'F') return dig - 'A' + 10;
@@ -318,7 +322,7 @@ static void hex2bytes(uint8_t *bytes, const char *hex, int length) {
// CacheDB::GetMD5 - Get the MD5 hash /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool CacheDB::GetMD5(bool GenOnly)
+bool CacheDB::GetMD5(bool const &GenOnly)
{
// Try to read the control information out of the DB.
if ((CurStat.Flags & FlMD5) == FlMD5)
@@ -349,7 +353,7 @@ bool CacheDB::GetMD5(bool GenOnly)
// CacheDB::GetSHA1 - Get the SHA1 hash /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool CacheDB::GetSHA1(bool GenOnly)
+bool CacheDB::GetSHA1(bool const &GenOnly)
{
// Try to read the control information out of the DB.
if ((CurStat.Flags & FlSHA1) == FlSHA1)
@@ -380,7 +384,7 @@ bool CacheDB::GetSHA1(bool GenOnly)
// CacheDB::GetSHA256 - Get the SHA256 hash /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool CacheDB::GetSHA256(bool GenOnly)
+bool CacheDB::GetSHA256(bool const &GenOnly)
{
// Try to read the control information out of the DB.
if ((CurStat.Flags & FlSHA256) == FlSHA256)
diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h
index c10f41ecc..0ba80909a 100644
--- a/ftparchive/cachedb.h
+++ b/ftparchive/cachedb.h
@@ -49,7 +49,7 @@ class CacheDB
{
return Dbp->get(Dbp,0,&Key,&Data,0) == 0;
};
- inline bool Put(const void *In,unsigned long Length)
+ inline bool Put(const void *In,unsigned long const &Length)
{
if (ReadOnly == true)
return true;
@@ -63,13 +63,13 @@ class CacheDB
return true;
}
bool OpenFile();
- bool GetFileStat();
+ bool GetFileStat(bool const &doStat = false);
bool GetCurStat();
bool LoadControl();
- bool LoadContents(bool GenOnly);
- bool GetMD5(bool GenOnly);
- bool GetSHA1(bool GenOnly);
- bool GetSHA256(bool GenOnly);
+ bool LoadContents(bool const &GenOnly);
+ bool GetMD5(bool const &GenOnly);
+ bool GetSHA1(bool const &GenOnly);
+ bool GetSHA256(bool const &GenOnly);
// Stat info stored in the DB, Fixed types since it is written to disk.
enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2),
@@ -117,20 +117,20 @@ class CacheDB
Stats() : Bytes(0), MD5Bytes(0), SHA1Bytes(0), SHA256Bytes(0), Packages(0), Misses(0), DeLinkBytes(0) {};
} Stats;
- bool ReadyDB(string DB);
+ bool ReadyDB(string const &DB);
inline bool DBFailed() {return Dbp != 0 && DBLoaded == false;};
inline bool Loaded() {return DBLoaded == true;};
inline off_t GetFileSize(void) {return CurStat.FileSize;}
- bool SetFile(string FileName,struct stat St,FileFd *Fd);
- bool GetFileInfo(string FileName, bool DoControl, bool DoContents,
- bool GenContentsOnly, bool DoMD5, bool DoSHA1, bool DoSHA256);
+ bool SetFile(string const &FileName,struct stat St,FileFd *Fd);
+ bool GetFileInfo(string const &FileName, bool const &DoControl, bool const &DoContents, bool const &GenContentsOnly,
+ bool const &DoMD5, bool const &DoSHA1, bool const &DoSHA256, bool const &checkMtime = false);
bool Finish();
bool Clean();
- CacheDB(string DB) : Dbp(0), Fd(NULL), DebFile(0) {ReadyDB(DB);};
+ CacheDB(string const &DB) : Dbp(0), Fd(NULL), DebFile(0) {ReadyDB(DB);};
~CacheDB() {ReadyDB(string()); delete DebFile;};
};
diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc
index fb1438f74..b761d9204 100644
--- a/ftparchive/contents.cc
+++ b/ftparchive/contents.cc
@@ -399,7 +399,7 @@ bool ContentsExtract::TakeContents(const void *NewData,unsigned long Length)
// ContentsExtract::Add - Read the contents data into the sorter /*{{{*/
// ---------------------------------------------------------------------
/* */
-void ContentsExtract::Add(GenContents &Contents,string Package)
+void ContentsExtract::Add(GenContents &Contents,string const &Package)
{
const char *Start = Data;
char *Pkg = Contents.Mystrdup(Package.c_str());
diff --git a/ftparchive/contents.h b/ftparchive/contents.h
index d8457cd45..5b5092b66 100644
--- a/ftparchive/contents.h
+++ b/ftparchive/contents.h
@@ -80,7 +80,7 @@ class ContentsExtract : public pkgDirStream
virtual bool DoItem(Item &Itm,int &Fd);
void Reset() {CurSize = 0;};
bool TakeContents(const void *Data,unsigned long Length);
- void Add(GenContents &Contents,string Package);
+ void Add(GenContents &Contents,string const &Package);
ContentsExtract() : Data(0), MaxSize(0), CurSize(0) {};
virtual ~ContentsExtract() {delete [] Data;};
diff --git a/ftparchive/multicompress.cc b/ftparchive/multicompress.cc
index 16cef9769..bb4beedf9 100644
--- a/ftparchive/multicompress.cc
+++ b/ftparchive/multicompress.cc
@@ -40,14 +40,14 @@ const MultiCompress::CompType MultiCompress::Compressors[] =
// MultiCompress::MultiCompress - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* Setup the file outputs, compression modes and fork the writer child */
-MultiCompress::MultiCompress(string Output,string Compress,
- mode_t Permissions,bool Write)
+MultiCompress::MultiCompress(string const &Output,string const &Compress,
+ mode_t const &Permissions,bool const &Write) :
+ Permissions(Permissions)
{
Outputs = 0;
Outputter = -1;
Input = 0;
UpdateMTime = 0;
- this->Permissions = Permissions;
/* Parse the compression string, a space separated lists of compresison
types */
@@ -126,7 +126,7 @@ MultiCompress::~MultiCompress()
/* This checks each compressed file to make sure it exists and returns
stat information for a random file from the collection. False means
one or more of the files is missing. */
-bool MultiCompress::GetStat(string Output,string Compress,struct stat &St)
+bool MultiCompress::GetStat(string const &Output,string const &Compress,struct stat &St)
{
/* Parse the compression string, a space separated lists of compresison
types */
@@ -268,8 +268,8 @@ bool MultiCompress::Finalize(unsigned long &OutSize)
/* This opens the compressor, either in compress mode or decompress
mode. FileFd is always the compressor input/output file,
OutFd is the created pipe, Input for Compress, Output for Decompress. */
-bool MultiCompress::OpenCompress(const CompType *Prog,pid_t &Pid,int FileFd,
- int &OutFd,bool Comp)
+bool MultiCompress::OpenCompress(const CompType *Prog,pid_t &Pid,int const &FileFd,
+ int &OutFd,bool const &Comp)
{
Pid = -1;
@@ -369,7 +369,7 @@ bool MultiCompress::CloseOld(int Fd,pid_t Proc)
computes the MD5 of the raw data. After this the raw data in the
original files is compared to see if this data is new. If the data
is new then the temp files are renamed, otherwise they are erased. */
-bool MultiCompress::Child(int FD)
+bool MultiCompress::Child(int const &FD)
{
// Start the compression children.
for (Files *I = Outputs; I != 0; I = I->Next)
diff --git a/ftparchive/multicompress.h b/ftparchive/multicompress.h
index a65077e73..3ac3b8fb2 100644
--- a/ftparchive/multicompress.h
+++ b/ftparchive/multicompress.h
@@ -53,9 +53,9 @@ class MultiCompress
mode_t Permissions;
static const CompType Compressors[];
- bool OpenCompress(const CompType *Prog,pid_t &Pid,int FileFd,
- int &OutFd,bool Comp);
- bool Child(int Fd);
+ bool OpenCompress(const CompType *Prog,pid_t &Pid,int const &FileFd,
+ int &OutFd,bool const &Comp);
+ bool Child(int const &Fd);
bool Start();
bool Die();
@@ -68,10 +68,10 @@ class MultiCompress
bool Finalize(unsigned long &OutSize);
bool OpenOld(int &Fd,pid_t &Proc);
bool CloseOld(int Fd,pid_t Proc);
- static bool GetStat(string Output,string Compress,struct stat &St);
+ static bool GetStat(string const &Output,string const &Compress,struct stat &St);
- MultiCompress(string Output,string Compress,mode_t Permissions,
- bool Write = true);
+ MultiCompress(string const &Output,string const &Compress,
+ mode_t const &Permissions, bool const &Write = true);
~MultiCompress();
};
diff --git a/ftparchive/override.cc b/ftparchive/override.cc
index 6f40bc865..3cf10b89b 100644
--- a/ftparchive/override.cc
+++ b/ftparchive/override.cc
@@ -24,7 +24,7 @@
// Override::ReadOverride - Read the override file /*{{{*/
// ---------------------------------------------------------------------
/* This parses the override file and reads it into the map */
-bool Override::ReadOverride(string File,bool Source)
+bool Override::ReadOverride(string const &File,bool const &Source)
{
if (File.empty() == true)
return true;
@@ -132,7 +132,7 @@ bool Override::ReadOverride(string File,bool Source)
// Override::ReadExtraOverride - Read the extra override file /*{{{*/
// ---------------------------------------------------------------------
/* This parses the extra override file and reads it into the map */
-bool Override::ReadExtraOverride(string File,bool Source)
+bool Override::ReadExtraOverride(string const &File,bool const &Source)
{
if (File.empty() == true)
return true;
@@ -209,9 +209,9 @@ bool Override::ReadExtraOverride(string File,bool Source)
/* Returns a override item for the given package and the given architecture.
* Treats "all" special
*/
-Override::Item* Override::GetItem(string Package, string Architecture)
+Override::Item* Override::GetItem(string const &Package, string const &Architecture)
{
- map<string,Item>::iterator I = Mapping.find(Package);
+ map<string,Item>::const_iterator I = Mapping.find(Package);
map<string,Item>::iterator J = Mapping.find(Package + "/" + Architecture);
if (I == Mapping.end() && J == Mapping.end())
@@ -230,7 +230,7 @@ Override::Item* Override::GetItem(string Package, string Architecture)
if (R->Priority != "") result->Priority = R->Priority;
if (R->OldMaint != "") result->OldMaint = R->OldMaint;
if (R->NewMaint != "") result->NewMaint = R->NewMaint;
- for (map<string,string>::iterator foI = R->FieldOverride.begin();
+ for (map<string,string>::const_iterator foI = R->FieldOverride.begin();
foI != R->FieldOverride.end(); foI++)
{
result->FieldOverride[foI->first] = foI->second;
@@ -247,7 +247,7 @@ Override::Item* Override::GetItem(string Package, string Architecture)
there is a rule but it does not match then the empty string is returned,
also if there was no rewrite rule the empty string is returned. Failed
indicates if there was some kind of problem while rewriting. */
-string Override::Item::SwapMaint(string Orig,bool &Failed)
+string Override::Item::SwapMaint(string const &Orig,bool &Failed)
{
Failed = false;
@@ -262,10 +262,10 @@ string Override::Item::SwapMaint(string Orig,bool &Failed)
override file. Thus it persists.*/
#if 1
// Break OldMaint up into little bits on double slash boundaries.
- string::iterator End = OldMaint.begin();
+ string::const_iterator End = OldMaint.begin();
while (1)
{
- string::iterator Start = End;
+ string::const_iterator Start = End;
for (; End < OldMaint.end() &&
(End + 3 >= OldMaint.end() || End[0] != ' ' ||
End[1] != '/' || End[2] != '/'); End++);
diff --git a/ftparchive/override.h b/ftparchive/override.h
index f270556eb..c5cacc2b4 100644
--- a/ftparchive/override.h
+++ b/ftparchive/override.h
@@ -31,20 +31,20 @@ class Override
string NewMaint;
map<string,string> FieldOverride;
- string SwapMaint(string Orig,bool &Failed);
+ string SwapMaint(string const &Orig,bool &Failed);
~Item() {};
};
map<string,Item> Mapping;
- inline Item *GetItem(string Package)
+ inline Item *GetItem(string const &Package)
{
return GetItem(Package, "");
}
- Item *GetItem(string Package, string Architecture);
+ Item *GetItem(string const &Package, string const &Architecture);
- bool ReadOverride(string File,bool Source = false);
- bool ReadExtraOverride(string File,bool Source = false);
+ bool ReadOverride(string const &File,bool const &Source = false);
+ bool ReadExtraOverride(string const &File,bool const &Source = false);
};
#endif
diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc
index 95b73a84d..6cda29b21 100644
--- a/ftparchive/writer.cc
+++ b/ftparchive/writer.cc
@@ -28,6 +28,7 @@
#include <ftw.h>
#include <fnmatch.h>
#include <iostream>
+#include <sstream>
#include <memory>
#include "cachedb.h"
@@ -54,7 +55,7 @@ inline void SetTFRewriteData(struct TFRewriteData &tfrd,
// FTWScanner::FTWScanner - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-FTWScanner::FTWScanner()
+FTWScanner::FTWScanner(string const &Arch): Arch(Arch)
{
ErrorPrinted = false;
NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
@@ -85,7 +86,7 @@ int FTWScanner::ScannerFTW(const char *File,const struct stat *sb,int Flag)
// FTWScanner::ScannerFile - File Scanner /*{{{*/
// ---------------------------------------------------------------------
/* */
-int FTWScanner::ScannerFile(const char *File, bool ReadLink)
+int FTWScanner::ScannerFile(const char *File, bool const &ReadLink)
{
const char *LastComponent = strrchr(File, '/');
char *RealPath = NULL;
@@ -95,7 +96,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink)
else
LastComponent++;
- vector<string>::iterator I;
+ vector<string>::const_iterator I;
for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I)
{
if (fnmatch((*I).c_str(), LastComponent, 0) == 0)
@@ -128,7 +129,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink)
{
Owner->NewLine(1);
- bool Type = _error->PopMessage(Err);
+ bool const Type = _error->PopMessage(Err);
if (Type == true)
cerr << _("E: ") << Err << endl;
else
@@ -149,7 +150,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink)
// FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool FTWScanner::RecursiveScan(string Dir)
+bool FTWScanner::RecursiveScan(string const &Dir)
{
char *RealPath = NULL;
/* If noprefix is set then jam the scan root in, so we don't generate
@@ -164,7 +165,7 @@ bool FTWScanner::RecursiveScan(string Dir)
// Do recursive directory searching
Owner = this;
- int Res = ftw(Dir.c_str(),ScannerFTW,30);
+ int const Res = ftw(Dir.c_str(),ScannerFTW,30);
// Error treewalking?
if (Res != 0)
@@ -181,7 +182,7 @@ bool FTWScanner::RecursiveScan(string Dir)
// ---------------------------------------------------------------------
/* This is an alternative to using FTW to locate files, it reads the list
of files from another file. */
-bool FTWScanner::LoadFileList(string Dir,string File)
+bool FTWScanner::LoadFileList(string const &Dir, string const &File)
{
char *RealPath = NULL;
/* If noprefix is set then jam the scan root in, so we don't generate
@@ -241,7 +242,7 @@ bool FTWScanner::LoadFileList(string Dir,string File)
/* */
bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
unsigned long &DeLinkBytes,
- off_t FileSize)
+ off_t const &FileSize)
{
// See if this isn't an internaly prefix'd file name.
if (InternalPrefix.empty() == false &&
@@ -298,26 +299,26 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
// PackagesWriter::PackagesWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides,
- string aArch) :
- Db(DB),Stats(Db.Stats), Arch(aArch)
+PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides,
+ string const &Arch) :
+ FTWScanner(Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL)
{
Output = stdout;
- SetExts(".deb .udeb .foo .bar .baz");
- AddPattern("*.deb");
+ SetExts(".deb .udeb");
DeLinkLimit = 0;
// Process the command line options
DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true);
DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true);
+ DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
DoContents = _config->FindB("APT::FTPArchive::Contents",true);
NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
if (Db.Loaded() == false)
DoContents = false;
-
+
// Read the override file
if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
return;
@@ -333,23 +334,22 @@ PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides,
// FTWScanner::SetExts - Set extensions to support /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool FTWScanner::SetExts(string Vals)
+bool FTWScanner::SetExts(string const &Vals)
{
ClearPatterns();
string::size_type Start = 0;
while (Start <= Vals.length()-1)
{
- string::size_type Space = Vals.find(' ',Start);
- string::size_type Length;
- if (Space == string::npos)
+ string::size_type const Space = Vals.find(' ',Start);
+ string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
+ if ( Arch.empty() == false )
{
- Length = Vals.length()-Start;
+ AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
+ AddPattern(string("*_all") + Vals.substr(Start, Length));
}
else
- {
- Length = Space-Start;
- }
- AddPattern(string("*") + Vals.substr(Start, Length));
+ AddPattern(string("*") + Vals.substr(Start, Length));
+
Start += Length + 1;
}
@@ -365,7 +365,7 @@ bool FTWScanner::SetExts(string Vals)
bool PackagesWriter::DoPackage(string FileName)
{
// Pull all the data we need form the DB
- if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256)
+ if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoAlwaysStat)
== false)
{
return false;
@@ -449,6 +449,8 @@ bool PackagesWriter::DoPackage(string FileName)
descmd5.Add(desc.c_str());
DescriptionMd5 = descmd5.Result().Value();
SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str());
+ if (TransWriter != NULL)
+ TransWriter->DoPackage(Package, desc, DescriptionMd5);
}
// Rewrite the maintainer field if necessary
@@ -480,7 +482,7 @@ bool PackagesWriter::DoPackage(string FileName)
SetTFRewriteData(Changes[End++], "Suggests", OptionalStr.c_str());
}
- for (map<string,string>::iterator I = OverItem->FieldOverride.begin();
+ for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
I != OverItem->FieldOverride.end(); I++)
SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
@@ -495,11 +497,60 @@ bool PackagesWriter::DoPackage(string FileName)
}
/*}}}*/
+// TranslationWriter::TranslationWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Create a Translation-Master file for this Packages file */
+TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
+ mode_t const &Permissions) : Output(NULL),
+ RefCounter(0)
+{
+ if (File.empty() == true)
+ return;
+
+ Comp = new MultiCompress(File, TransCompress, Permissions);
+ Output = Comp->Input;
+}
+ /*}}}*/
+// TranslationWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* Create a Translation-Master file for this Packages file */
+bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
+ string const &MD5)
+{
+ if (Output == NULL)
+ return true;
+
+ // Different archs can include different versions and therefore
+ // different descriptions - so we need to check for both name and md5.
+ string const Record = Pkg + ":" + MD5;
+
+ if (Included.find(Record) != Included.end())
+ return true;
+
+ fprintf(Output, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
+ Pkg.c_str(), MD5.c_str(), Desc.c_str());
+
+ Included.insert(Record);
+ return true;
+}
+ /*}}}*/
+// TranslationWriter::~TranslationWriter - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+TranslationWriter::~TranslationWriter()
+{
+ if (Comp == NULL)
+ return;
+
+ delete Comp;
+}
+ /*}}}*/
+
// SourcesWriter::SourcesWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-SourcesWriter::SourcesWriter(string BOverrides,string SOverrides,
- string ExtOverrides)
+SourcesWriter::SourcesWriter(string const &BOverrides,string const &SOverrides,
+ string const &ExtOverrides)
{
Output = stdout;
AddPattern("*.dsc");
@@ -656,23 +707,20 @@ bool SourcesWriter::DoPackage(string FileName)
// Add the dsc to the files hash list
string const strippedName = flNotDir(FileName);
- char Files[1000];
- snprintf(Files,sizeof(Files),"\n %s %lu %s\n %s",
- string(MD5.Result()).c_str(),St.st_size,
- strippedName.c_str(),
- Tags.FindS("Files").c_str());
-
- char ChecksumsSha1[1000];
- snprintf(ChecksumsSha1,sizeof(ChecksumsSha1),"\n %s %lu %s\n %s",
- string(SHA1.Result()).c_str(),St.st_size,
- strippedName.c_str(),
- Tags.FindS("Checksums-Sha1").c_str());
-
- char ChecksumsSha256[1000];
- snprintf(ChecksumsSha256,sizeof(ChecksumsSha256),"\n %s %lu %s\n %s",
- string(SHA256.Result()).c_str(),St.st_size,
- strippedName.c_str(),
- Tags.FindS("Checksums-Sha256").c_str());
+ std::ostringstream ostreamFiles;
+ ostreamFiles << "\n " << string(MD5.Result()) << " " << St.st_size << " "
+ << strippedName << "\n " << Tags.FindS("Files");
+ string const Files = ostreamFiles.str();
+
+ std::ostringstream ostreamSha1;
+ ostreamSha1 << "\n " << string(SHA1.Result()) << " " << St.st_size << " "
+ << strippedName << "\n " << Tags.FindS("Checksums-Sha1");
+ string const ChecksumsSha1 = ostreamSha1.str();
+
+ std::ostringstream ostreamSha256;
+ ostreamSha256 << "\n " << string(SHA256.Result()) << " " << St.st_size << " "
+ << strippedName << "\n " << Tags.FindS("Checksums-Sha256");
+ string const ChecksumsSha256 = ostreamSha256.str();
// Strip the DirStrip prefix from the FileName and add the PathPrefix
string NewFileName;
@@ -690,7 +738,7 @@ bool SourcesWriter::DoPackage(string FileName)
// Perform the delinking operation over all of the files
string ParseJnk;
- const char *C = Files;
+ const char *C = Files.c_str();
char *RealPath = NULL;
for (;isspace(*C); C++);
while (*C != 0)
@@ -723,9 +771,9 @@ bool SourcesWriter::DoPackage(string FileName)
unsigned int End = 0;
SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package");
- SetTFRewriteData(Changes[End++],"Files",Files);
- SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1);
- SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256);
+ SetTFRewriteData(Changes[End++],"Files",Files.c_str());
+ SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1.c_str());
+ SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256.c_str());
if (Directory != "./")
SetTFRewriteData(Changes[End++],"Directory",Directory.c_str());
SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str());
@@ -746,7 +794,7 @@ bool SourcesWriter::DoPackage(string FileName)
if (NewMaint.empty() == false)
SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
- for (map<string,string>::iterator I = SOverItem->FieldOverride.begin();
+ for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
I != SOverItem->FieldOverride.end(); I++)
SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
@@ -766,11 +814,11 @@ bool SourcesWriter::DoPackage(string FileName)
// ContentsWriter::ContentsWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-ContentsWriter::ContentsWriter(string DB) :
- Db(DB), Stats(Db.Stats)
+ContentsWriter::ContentsWriter(string const &DB, string const &Arch) :
+ FTWScanner(Arch), Db(DB), Stats(Db.Stats)
{
- AddPattern("*.deb");
+ SetExts(".deb");
Output = stdout;
}
/*}}}*/
@@ -778,9 +826,9 @@ ContentsWriter::ContentsWriter(string DB) :
// ---------------------------------------------------------------------
/* If Package is the empty string the control record will be parsed to
determine what the package name is. */
-bool ContentsWriter::DoPackage(string FileName,string Package)
+bool ContentsWriter::DoPackage(string FileName, string Package)
{
- if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false))
+ if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false))
{
return false;
}
@@ -799,7 +847,7 @@ bool ContentsWriter::DoPackage(string FileName,string Package)
// ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress)
+bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
{
MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
if (_error->PendingError() == true)
@@ -854,7 +902,7 @@ bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress)
// ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-ReleaseWriter::ReleaseWriter(string DB)
+ReleaseWriter::ReleaseWriter(string const &DB)
{
AddPattern("Packages");
AddPattern("Packages.gz");
@@ -868,7 +916,7 @@ ReleaseWriter::ReleaseWriter(string DB)
AddPattern("md5sum.txt");
Output = stdout;
- time_t now = time(NULL);
+ time_t const now = time(NULL);
char datestr[128];
if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
gmtime(&now)) == 0)
@@ -955,7 +1003,7 @@ bool ReleaseWriter::DoPackage(string FileName)
void ReleaseWriter::Finish()
{
fprintf(Output, "MD5Sum:\n");
- for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
+ for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
I != CheckSums.end();
++I)
{
@@ -966,7 +1014,7 @@ void ReleaseWriter::Finish()
}
fprintf(Output, "SHA1:\n");
- for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
+ for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
I != CheckSums.end();
++I)
{
@@ -977,7 +1025,7 @@ void ReleaseWriter::Finish()
}
fprintf(Output, "SHA256:\n");
- for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
+ for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
I != CheckSums.end();
++I)
{
diff --git a/ftparchive/writer.h b/ftparchive/writer.h
index 9aaadbdab..3123a7f46 100644
--- a/ftparchive/writer.h
+++ b/ftparchive/writer.h
@@ -19,8 +19,10 @@
#include <iostream>
#include <vector>
#include <map>
+#include <set>
#include "cachedb.h"
+#include "multicompress.h"
#include "override.h"
#include "apt-ftparchive.h"
@@ -34,6 +36,7 @@ class FTWScanner
{
protected:
vector<string> Patterns;
+ string Arch;
const char *OriginalPath;
bool ErrorPrinted;
@@ -42,12 +45,12 @@ class FTWScanner
static FTWScanner *Owner;
static int ScannerFTW(const char *File,const struct stat *sb,int Flag);
- static int ScannerFile(const char *File, bool ReadLink);
+ static int ScannerFile(const char *File, bool const &ReadLink);
bool Delink(string &FileName,const char *OriginalPath,
- unsigned long &Bytes,off_t FileSize);
+ unsigned long &Bytes,off_t const &FileSize);
- inline void NewLine(unsigned Priority)
+ inline void NewLine(unsigned const &Priority)
{
if (ErrorPrinted == false && Quiet <= Priority)
{
@@ -62,13 +65,31 @@ class FTWScanner
string InternalPrefix;
virtual bool DoPackage(string FileName) = 0;
- bool RecursiveScan(string Dir);
- bool LoadFileList(string BaseDir,string File);
+ bool RecursiveScan(string const &Dir);
+ bool LoadFileList(string const &BaseDir,string const &File);
void ClearPatterns() { Patterns.clear(); };
- void AddPattern(string Pattern) { Patterns.push_back(Pattern); };
- bool SetExts(string Vals);
+ void AddPattern(string const &Pattern) { Patterns.push_back(Pattern); };
+ bool SetExts(string const &Vals);
- FTWScanner();
+ FTWScanner(string const &Arch = string());
+};
+
+class TranslationWriter
+{
+ MultiCompress *Comp;
+ FILE *Output;
+ std::set<string> Included;
+ unsigned short RefCounter;
+
+ public:
+ void IncreaseRefCounter() { ++RefCounter; };
+ unsigned short DecreaseRefCounter() { return (RefCounter == 0) ? 0 : --RefCounter; };
+ unsigned short GetRefCounter() const { return RefCounter; };
+ bool DoPackage(string const &Pkg, string const &Desc, string const &MD5);
+
+ TranslationWriter(string const &File, string const &TransCompress, mode_t const &Permissions);
+ TranslationWriter() : Comp(NULL), Output(NULL), RefCounter(0) {};
+ ~TranslationWriter();
};
class PackagesWriter : public FTWScanner
@@ -82,6 +103,7 @@ class PackagesWriter : public FTWScanner
bool DoMD5;
bool DoSHA1;
bool DoSHA256;
+ bool DoAlwaysStat;
bool NoOverride;
bool DoContents;
bool LongDescription;
@@ -91,15 +113,15 @@ class PackagesWriter : public FTWScanner
string DirStrip;
FILE *Output;
struct CacheDB::Stats &Stats;
- string Arch;
+ TranslationWriter *TransWriter;
- inline bool ReadOverride(string File) {return Over.ReadOverride(File);};
- inline bool ReadExtraOverride(string File)
+ inline bool ReadOverride(string const &File) {return Over.ReadOverride(File);};
+ inline bool ReadExtraOverride(string const &File)
{return Over.ReadExtraOverride(File);};
virtual bool DoPackage(string FileName);
- PackagesWriter(string DB,string Overrides,string ExtOverrides=string(),
- string Arch=string());
+ PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides=string(),
+ string const &Arch=string());
virtual ~PackagesWriter() {};
};
@@ -119,12 +141,12 @@ class ContentsWriter : public FTWScanner
bool DoPackage(string FileName,string Package);
virtual bool DoPackage(string FileName)
{return DoPackage(FileName,string());};
- bool ReadFromPkgs(string PkgFile,string PkgCompress);
+ bool ReadFromPkgs(string const &PkgFile,string const &PkgCompress);
void Finish() {Gen.Print(Output);};
- inline bool ReadyDB(string DB) {return Db.ReadyDB(DB);};
+ inline bool ReadyDB(string const &DB) {return Db.ReadyDB(DB);};
- ContentsWriter(string DB);
+ ContentsWriter(string const &DB, string const &Arch = string());
virtual ~ContentsWriter() {};
};
@@ -147,15 +169,15 @@ class SourcesWriter : public FTWScanner
virtual bool DoPackage(string FileName);
- SourcesWriter(string BOverrides,string SOverrides,
- string ExtOverrides=string());
+ SourcesWriter(string const &BOverrides,string const &SOverrides,
+ string const &ExtOverrides=string());
virtual ~SourcesWriter() {free(Buffer);};
};
class ReleaseWriter : public FTWScanner
{
public:
- ReleaseWriter(string DB);
+ ReleaseWriter(string const &DB);
virtual bool DoPackage(string FileName);
void Finish();