From 9c24493fabefe1a2549eaab81770dbe6f24916d9 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Thu, 26 Nov 2009 23:46:49 +0100 Subject: Add APT::FTPArchive::LongDescription to disable the inclusion of the LongDescriptions in the generated Packages file. --- ftparchive/writer.cc | 21 +++++++++++++++++++-- ftparchive/writer.h | 1 + 2 files changed, 20 insertions(+), 2 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 293e851f5..b2ebdca8a 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -308,6 +308,7 @@ PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides, DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true); DoContents = _config->FindB("APT::FTPArchive::Contents",true); NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false); + LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true); if (Db.Loaded() == false) DoContents = false; @@ -414,10 +415,18 @@ bool PackagesWriter::DoPackage(string FileName) NewFileName = FileName; if (PathPrefix.empty() == false) NewFileName = flCombine(PathPrefix,NewFileName); - + + /* Configuration says we don't want to include the long Description + in the package file - instead we want to ship a separated file */ + string desc; + if (LongDescription == false) { + desc = Tags.FindS("Description").append("\n"); + OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str(); + } + // This lists all the changes to the fields we are going to make. // (7 hardcoded + maintainer + suggests + end marker) - TFRewriteData Changes[6+2+OverItem->FieldOverride.size()+1]; + TFRewriteData Changes[6+2+OverItem->FieldOverride.size()+1+1]; unsigned int End = 0; SetTFRewriteData(Changes[End++], "Size", Size); @@ -429,6 +438,14 @@ bool PackagesWriter::DoPackage(string FileName) SetTFRewriteData(Changes[End++], "Status", 0); SetTFRewriteData(Changes[End++], "Optional", 0); + string DescriptionMd5; + if (LongDescription == false) { + MD5Summation descmd5; + descmd5.Add(desc.c_str()); + DescriptionMd5 = descmd5.Result().Value(); + SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str()); + } + // Rewrite the maintainer field if necessary bool MaintFailed; string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed); diff --git a/ftparchive/writer.h b/ftparchive/writer.h index 6e161c752..e76438900 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -86,6 +86,7 @@ class PackagesWriter : public FTWScanner bool DoSHA256; bool NoOverride; bool DoContents; + bool LongDescription; // General options string PathPrefix; -- cgit v1.2.3 From ff574e76beb97c101924c2d4746b5a2dbb862f19 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 28 Nov 2009 02:12:36 +0100 Subject: add APT::FTPArchive::AlwaysStat to disable the too aggressive caching if versions are build multiply times (not recommend) Patch by Christoph Goehre, thanks! (Closes: #463260) --- ftparchive/cachedb.cc | 14 +++++++++----- ftparchive/cachedb.h | 4 ++-- ftparchive/writer.cc | 5 +++-- ftparchive/writer.h | 1 + 4 files changed, 15 insertions(+), 9 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index dfda827b6..c352aa53c 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -102,9 +102,9 @@ bool CacheDB::OpenFile() // --------------------------------------------------------------------- /* This gets the size from the database if it's there. If we need * to look at the file, also get the mtime from the file. */ -bool CacheDB::GetFileStat() +bool CacheDB::GetFileStat(bool const &doStat) { - if ((CurStat.Flags & FlSize) == FlSize) + if ((CurStat.Flags & FlSize) == FlSize && doStat == false) { /* Already worked out the file size */ } @@ -162,7 +162,7 @@ bool CacheDB::GetCurStat() // --------------------------------------------------------------------- bool CacheDB::GetFileInfo(string FileName, bool DoControl, bool DoContents, bool GenContentsOnly, - bool DoMD5, bool DoSHA1, bool DoSHA256) + bool DoMD5, bool DoSHA1, bool DoSHA256, bool const &checkMtime) { this->FileName = FileName; @@ -171,14 +171,18 @@ bool CacheDB::GetFileInfo(string FileName, bool DoControl, bool DoContents, return false; } OldStat = CurStat; - - if (GetFileStat() == false) + + if (GetFileStat(checkMtime) == false) { delete Fd; Fd = NULL; return false; } + /* if mtime changed, update CurStat from disk */ + if (checkMtime == true && OldStat.mtime != CurStat.mtime) + CurStat.Flags = FlSize; + Stats.Bytes += CurStat.FileSize; Stats.Packages++; diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h index c10f41ecc..15add459c 100644 --- a/ftparchive/cachedb.h +++ b/ftparchive/cachedb.h @@ -63,7 +63,7 @@ class CacheDB return true; } bool OpenFile(); - bool GetFileStat(); + bool GetFileStat(bool const &doStat = false); bool GetCurStat(); bool LoadControl(); bool LoadContents(bool GenOnly); @@ -125,7 +125,7 @@ class CacheDB bool SetFile(string FileName,struct stat St,FileFd *Fd); bool GetFileInfo(string FileName, bool DoControl, bool DoContents, - bool GenContentsOnly, bool DoMD5, bool DoSHA1, bool DoSHA256); + bool GenContentsOnly, bool DoMD5, bool DoSHA1, bool DoSHA256, bool const &checkMtime = false); bool Finish(); bool Clean(); diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index b2ebdca8a..6756021f8 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -306,6 +306,7 @@ PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides, DoMD5 = _config->FindB("APT::FTPArchive::MD5",true); DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true); DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true); + DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false); DoContents = _config->FindB("APT::FTPArchive::Contents",true); NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false); LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true); @@ -360,7 +361,7 @@ bool FTWScanner::SetExts(string Vals) bool PackagesWriter::DoPackage(string FileName) { // Pull all the data we need form the DB - if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256) + if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoAlwaysStat) == false) { return false; @@ -753,7 +754,7 @@ ContentsWriter::ContentsWriter(string DB) : determine what the package name is. */ bool ContentsWriter::DoPackage(string FileName,string Package) { - if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false)) + if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false)) { return false; } diff --git a/ftparchive/writer.h b/ftparchive/writer.h index e76438900..8864461d5 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -84,6 +84,7 @@ class PackagesWriter : public FTWScanner bool DoMD5; bool DoSHA1; bool DoSHA256; + bool DoAlwaysStat; bool NoOverride; bool DoContents; bool LongDescription; -- cgit v1.2.3 From 9209ec4701d9f6c21d4ae9ebb648d94a1f32665a Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 28 Nov 2009 03:19:52 +0100 Subject: tell every method in ftparchive/ that const& is sexy --- ftparchive/cachedb.cc | 18 +++++++++--------- ftparchive/cachedb.h | 20 ++++++++++---------- ftparchive/contents.cc | 2 +- ftparchive/contents.h | 2 +- ftparchive/multicompress.cc | 14 +++++++------- ftparchive/multicompress.h | 12 ++++++------ ftparchive/override.cc | 16 ++++++++-------- ftparchive/override.h | 10 +++++----- ftparchive/writer.cc | 44 ++++++++++++++++++++++---------------------- ftparchive/writer.h | 34 +++++++++++++++++----------------- 10 files changed, 86 insertions(+), 86 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index c352aa53c..64638459a 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -26,7 +26,7 @@ // CacheDB::ReadyDB - Ready the DB2 /*{{{*/ // --------------------------------------------------------------------- /* This opens the DB2 file for caching package information */ -bool CacheDB::ReadyDB(string DB) +bool CacheDB::ReadyDB(string const &DB) { int err; @@ -160,9 +160,9 @@ bool CacheDB::GetCurStat() /*}}}*/ // CacheDB::GetFileInfo - Get all the info about the file /*{{{*/ // --------------------------------------------------------------------- -bool CacheDB::GetFileInfo(string FileName, bool DoControl, bool DoContents, - bool GenContentsOnly, - bool DoMD5, bool DoSHA1, bool DoSHA256, bool const &checkMtime) +bool CacheDB::GetFileInfo(string const &FileName, bool const &DoControl, bool const &DoContents, + bool const &GenContentsOnly, bool const &DoMD5, bool const &DoSHA1, + bool const &DoSHA256, bool const &checkMtime) { this->FileName = FileName; @@ -251,7 +251,7 @@ bool CacheDB::LoadControl() // CacheDB::LoadContents - Load the File Listing /*{{{*/ // --------------------------------------------------------------------- /* */ -bool CacheDB::LoadContents(bool GenOnly) +bool CacheDB::LoadContents(bool const &GenOnly) { // Try to read the control information out of the DB. if ((CurStat.Flags & FlContents) == FlContents) @@ -301,7 +301,7 @@ static string bytes2hex(uint8_t *bytes, size_t length) { return string(space); } -static inline unsigned char xdig2num(char dig) { +static inline unsigned char xdig2num(char const &dig) { if (isdigit(dig)) return dig - '0'; if ('a' <= dig && dig <= 'f') return dig - 'a' + 10; if ('A' <= dig && dig <= 'F') return dig - 'A' + 10; @@ -322,7 +322,7 @@ static void hex2bytes(uint8_t *bytes, const char *hex, int length) { // CacheDB::GetMD5 - Get the MD5 hash /*{{{*/ // --------------------------------------------------------------------- /* */ -bool CacheDB::GetMD5(bool GenOnly) +bool CacheDB::GetMD5(bool const &GenOnly) { // Try to read the control information out of the DB. if ((CurStat.Flags & FlMD5) == FlMD5) @@ -353,7 +353,7 @@ bool CacheDB::GetMD5(bool GenOnly) // CacheDB::GetSHA1 - Get the SHA1 hash /*{{{*/ // --------------------------------------------------------------------- /* */ -bool CacheDB::GetSHA1(bool GenOnly) +bool CacheDB::GetSHA1(bool const &GenOnly) { // Try to read the control information out of the DB. if ((CurStat.Flags & FlSHA1) == FlSHA1) @@ -384,7 +384,7 @@ bool CacheDB::GetSHA1(bool GenOnly) // CacheDB::GetSHA256 - Get the SHA256 hash /*{{{*/ // --------------------------------------------------------------------- /* */ -bool CacheDB::GetSHA256(bool GenOnly) +bool CacheDB::GetSHA256(bool const &GenOnly) { // Try to read the control information out of the DB. if ((CurStat.Flags & FlSHA256) == FlSHA256) diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h index 15add459c..0ba80909a 100644 --- a/ftparchive/cachedb.h +++ b/ftparchive/cachedb.h @@ -49,7 +49,7 @@ class CacheDB { return Dbp->get(Dbp,0,&Key,&Data,0) == 0; }; - inline bool Put(const void *In,unsigned long Length) + inline bool Put(const void *In,unsigned long const &Length) { if (ReadOnly == true) return true; @@ -66,10 +66,10 @@ class CacheDB bool GetFileStat(bool const &doStat = false); bool GetCurStat(); bool LoadControl(); - bool LoadContents(bool GenOnly); - bool GetMD5(bool GenOnly); - bool GetSHA1(bool GenOnly); - bool GetSHA256(bool GenOnly); + bool LoadContents(bool const &GenOnly); + bool GetMD5(bool const &GenOnly); + bool GetSHA1(bool const &GenOnly); + bool GetSHA256(bool const &GenOnly); // Stat info stored in the DB, Fixed types since it is written to disk. enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2), @@ -117,20 +117,20 @@ class CacheDB Stats() : Bytes(0), MD5Bytes(0), SHA1Bytes(0), SHA256Bytes(0), Packages(0), Misses(0), DeLinkBytes(0) {}; } Stats; - bool ReadyDB(string DB); + bool ReadyDB(string const &DB); inline bool DBFailed() {return Dbp != 0 && DBLoaded == false;}; inline bool Loaded() {return DBLoaded == true;}; inline off_t GetFileSize(void) {return CurStat.FileSize;} - bool SetFile(string FileName,struct stat St,FileFd *Fd); - bool GetFileInfo(string FileName, bool DoControl, bool DoContents, - bool GenContentsOnly, bool DoMD5, bool DoSHA1, bool DoSHA256, bool const &checkMtime = false); + bool SetFile(string const &FileName,struct stat St,FileFd *Fd); + bool GetFileInfo(string const &FileName, bool const &DoControl, bool const &DoContents, bool const &GenContentsOnly, + bool const &DoMD5, bool const &DoSHA1, bool const &DoSHA256, bool const &checkMtime = false); bool Finish(); bool Clean(); - CacheDB(string DB) : Dbp(0), Fd(NULL), DebFile(0) {ReadyDB(DB);}; + CacheDB(string const &DB) : Dbp(0), Fd(NULL), DebFile(0) {ReadyDB(DB);}; ~CacheDB() {ReadyDB(string()); delete DebFile;}; }; diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc index 1f2cbcc3d..693c36f9a 100644 --- a/ftparchive/contents.cc +++ b/ftparchive/contents.cc @@ -399,7 +399,7 @@ bool ContentsExtract::TakeContents(const void *NewData,unsigned long Length) // ContentsExtract::Add - Read the contents data into the sorter /*{{{*/ // --------------------------------------------------------------------- /* */ -void ContentsExtract::Add(GenContents &Contents,string Package) +void ContentsExtract::Add(GenContents &Contents,string const &Package) { const char *Start = Data; char *Pkg = Contents.Mystrdup(Package.c_str()); diff --git a/ftparchive/contents.h b/ftparchive/contents.h index d8457cd45..5b5092b66 100644 --- a/ftparchive/contents.h +++ b/ftparchive/contents.h @@ -80,7 +80,7 @@ class ContentsExtract : public pkgDirStream virtual bool DoItem(Item &Itm,int &Fd); void Reset() {CurSize = 0;}; bool TakeContents(const void *Data,unsigned long Length); - void Add(GenContents &Contents,string Package); + void Add(GenContents &Contents,string const &Package); ContentsExtract() : Data(0), MaxSize(0), CurSize(0) {}; virtual ~ContentsExtract() {delete [] Data;}; diff --git a/ftparchive/multicompress.cc b/ftparchive/multicompress.cc index 2fc8efcbf..7c91d34fe 100644 --- a/ftparchive/multicompress.cc +++ b/ftparchive/multicompress.cc @@ -40,14 +40,14 @@ const MultiCompress::CompType MultiCompress::Compressors[] = // MultiCompress::MultiCompress - Constructor /*{{{*/ // --------------------------------------------------------------------- /* Setup the file outputs, compression modes and fork the writer child */ -MultiCompress::MultiCompress(string Output,string Compress, - mode_t Permissions,bool Write) +MultiCompress::MultiCompress(string const &Output,string const &Compress, + mode_t const &Permissions,bool const &Write) : + Permissions(Permissions) { Outputs = 0; Outputter = -1; Input = 0; UpdateMTime = 0; - this->Permissions = Permissions; /* Parse the compression string, a space separated lists of compresison types */ @@ -126,7 +126,7 @@ MultiCompress::~MultiCompress() /* This checks each compressed file to make sure it exists and returns stat information for a random file from the collection. False means one or more of the files is missing. */ -bool MultiCompress::GetStat(string Output,string Compress,struct stat &St) +bool MultiCompress::GetStat(string const &Output,string const &Compress,struct stat &St) { /* Parse the compression string, a space separated lists of compresison types */ @@ -268,8 +268,8 @@ bool MultiCompress::Finalize(unsigned long &OutSize) /* This opens the compressor, either in compress mode or decompress mode. FileFd is always the compressor input/output file, OutFd is the created pipe, Input for Compress, Output for Decompress. */ -bool MultiCompress::OpenCompress(const CompType *Prog,pid_t &Pid,int FileFd, - int &OutFd,bool Comp) +bool MultiCompress::OpenCompress(const CompType *Prog,pid_t &Pid,int const &FileFd, + int &OutFd,bool const &Comp) { Pid = -1; @@ -369,7 +369,7 @@ bool MultiCompress::CloseOld(int Fd,pid_t Proc) computes the MD5 of the raw data. After this the raw data in the original files is compared to see if this data is new. If the data is new then the temp files are renamed, otherwise they are erased. */ -bool MultiCompress::Child(int FD) +bool MultiCompress::Child(int const &FD) { // Start the compression children. for (Files *I = Outputs; I != 0; I = I->Next) diff --git a/ftparchive/multicompress.h b/ftparchive/multicompress.h index a65077e73..3ac3b8fb2 100644 --- a/ftparchive/multicompress.h +++ b/ftparchive/multicompress.h @@ -53,9 +53,9 @@ class MultiCompress mode_t Permissions; static const CompType Compressors[]; - bool OpenCompress(const CompType *Prog,pid_t &Pid,int FileFd, - int &OutFd,bool Comp); - bool Child(int Fd); + bool OpenCompress(const CompType *Prog,pid_t &Pid,int const &FileFd, + int &OutFd,bool const &Comp); + bool Child(int const &Fd); bool Start(); bool Die(); @@ -68,10 +68,10 @@ class MultiCompress bool Finalize(unsigned long &OutSize); bool OpenOld(int &Fd,pid_t &Proc); bool CloseOld(int Fd,pid_t Proc); - static bool GetStat(string Output,string Compress,struct stat &St); + static bool GetStat(string const &Output,string const &Compress,struct stat &St); - MultiCompress(string Output,string Compress,mode_t Permissions, - bool Write = true); + MultiCompress(string const &Output,string const &Compress, + mode_t const &Permissions, bool const &Write = true); ~MultiCompress(); }; diff --git a/ftparchive/override.cc b/ftparchive/override.cc index 6f40bc865..3cf10b89b 100644 --- a/ftparchive/override.cc +++ b/ftparchive/override.cc @@ -24,7 +24,7 @@ // Override::ReadOverride - Read the override file /*{{{*/ // --------------------------------------------------------------------- /* This parses the override file and reads it into the map */ -bool Override::ReadOverride(string File,bool Source) +bool Override::ReadOverride(string const &File,bool const &Source) { if (File.empty() == true) return true; @@ -132,7 +132,7 @@ bool Override::ReadOverride(string File,bool Source) // Override::ReadExtraOverride - Read the extra override file /*{{{*/ // --------------------------------------------------------------------- /* This parses the extra override file and reads it into the map */ -bool Override::ReadExtraOverride(string File,bool Source) +bool Override::ReadExtraOverride(string const &File,bool const &Source) { if (File.empty() == true) return true; @@ -209,9 +209,9 @@ bool Override::ReadExtraOverride(string File,bool Source) /* Returns a override item for the given package and the given architecture. * Treats "all" special */ -Override::Item* Override::GetItem(string Package, string Architecture) +Override::Item* Override::GetItem(string const &Package, string const &Architecture) { - map::iterator I = Mapping.find(Package); + map::const_iterator I = Mapping.find(Package); map::iterator J = Mapping.find(Package + "/" + Architecture); if (I == Mapping.end() && J == Mapping.end()) @@ -230,7 +230,7 @@ Override::Item* Override::GetItem(string Package, string Architecture) if (R->Priority != "") result->Priority = R->Priority; if (R->OldMaint != "") result->OldMaint = R->OldMaint; if (R->NewMaint != "") result->NewMaint = R->NewMaint; - for (map::iterator foI = R->FieldOverride.begin(); + for (map::const_iterator foI = R->FieldOverride.begin(); foI != R->FieldOverride.end(); foI++) { result->FieldOverride[foI->first] = foI->second; @@ -247,7 +247,7 @@ Override::Item* Override::GetItem(string Package, string Architecture) there is a rule but it does not match then the empty string is returned, also if there was no rewrite rule the empty string is returned. Failed indicates if there was some kind of problem while rewriting. */ -string Override::Item::SwapMaint(string Orig,bool &Failed) +string Override::Item::SwapMaint(string const &Orig,bool &Failed) { Failed = false; @@ -262,10 +262,10 @@ string Override::Item::SwapMaint(string Orig,bool &Failed) override file. Thus it persists.*/ #if 1 // Break OldMaint up into little bits on double slash boundaries. - string::iterator End = OldMaint.begin(); + string::const_iterator End = OldMaint.begin(); while (1) { - string::iterator Start = End; + string::const_iterator Start = End; for (; End < OldMaint.end() && (End + 3 >= OldMaint.end() || End[0] != ' ' || End[1] != '/' || End[2] != '/'); End++); diff --git a/ftparchive/override.h b/ftparchive/override.h index f270556eb..c5cacc2b4 100644 --- a/ftparchive/override.h +++ b/ftparchive/override.h @@ -31,20 +31,20 @@ class Override string NewMaint; map FieldOverride; - string SwapMaint(string Orig,bool &Failed); + string SwapMaint(string const &Orig,bool &Failed); ~Item() {}; }; map Mapping; - inline Item *GetItem(string Package) + inline Item *GetItem(string const &Package) { return GetItem(Package, ""); } - Item *GetItem(string Package, string Architecture); + Item *GetItem(string const &Package, string const &Architecture); - bool ReadOverride(string File,bool Source = false); - bool ReadExtraOverride(string File,bool Source = false); + bool ReadOverride(string const &File,bool const &Source = false); + bool ReadExtraOverride(string const &File,bool const &Source = false); }; #endif diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 6756021f8..bf6e9f617 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -89,7 +89,7 @@ int FTWScanner::ScannerFTW(const char *File,const struct stat *sb,int Flag) // FTWScanner::ScannerFile - File Scanner /*{{{*/ // --------------------------------------------------------------------- /* */ -int FTWScanner::ScannerFile(const char *File, bool ReadLink) +int FTWScanner::ScannerFile(const char *File, bool const &ReadLink) { const char *LastComponent = strrchr(File, '/'); if (LastComponent == NULL) @@ -97,7 +97,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink) else LastComponent++; - vector::iterator I; + vector::const_iterator I; for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I) { if (fnmatch((*I).c_str(), LastComponent, 0) == 0) @@ -127,7 +127,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink) { Owner->NewLine(1); - bool Type = _error->PopMessage(Err); + bool const Type = _error->PopMessage(Err); if (Type == true) cerr << _("E: ") << Err << endl; else @@ -148,7 +148,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink) // FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/ // --------------------------------------------------------------------- /* */ -bool FTWScanner::RecursiveScan(string Dir) +bool FTWScanner::RecursiveScan(string const &Dir) { /* If noprefix is set then jam the scan root in, so we don't generate link followed paths out of control */ @@ -161,7 +161,7 @@ bool FTWScanner::RecursiveScan(string Dir) // Do recursive directory searching Owner = this; - int Res = ftw(Dir.c_str(),ScannerFTW,30); + int const Res = ftw(Dir.c_str(),ScannerFTW,30); // Error treewalking? if (Res != 0) @@ -178,7 +178,7 @@ bool FTWScanner::RecursiveScan(string Dir) // --------------------------------------------------------------------- /* This is an alternative to using FTW to locate files, it reads the list of files from another file. */ -bool FTWScanner::LoadFileList(string Dir,string File) +bool FTWScanner::LoadFileList(string const &Dir, string const &File) { /* If noprefix is set then jam the scan root in, so we don't generate link followed paths out of control */ @@ -236,7 +236,7 @@ bool FTWScanner::LoadFileList(string Dir,string File) /* */ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, unsigned long &DeLinkBytes, - off_t FileSize) + off_t const &FileSize) { // See if this isn't an internaly prefix'd file name. if (InternalPrefix.empty() == false && @@ -293,8 +293,8 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, // PackagesWriter::PackagesWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides, - string aArch) : +PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides, + string const &aArch) : Db(DB),Stats(Db.Stats), Arch(aArch) { Output = stdout; @@ -329,7 +329,7 @@ PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides, // FTWScanner::SetExts - Set extensions to support /*{{{*/ // --------------------------------------------------------------------- /* */ -bool FTWScanner::SetExts(string Vals) +bool FTWScanner::SetExts(string const &Vals) { ClearPatterns(); string::size_type Start = 0; @@ -476,7 +476,7 @@ bool PackagesWriter::DoPackage(string FileName) SetTFRewriteData(Changes[End++], "Suggests", OptionalStr.c_str()); } - for (map::iterator I = OverItem->FieldOverride.begin(); + for (map::const_iterator I = OverItem->FieldOverride.begin(); I != OverItem->FieldOverride.end(); I++) SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); @@ -494,8 +494,8 @@ bool PackagesWriter::DoPackage(string FileName) // SourcesWriter::SourcesWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -SourcesWriter::SourcesWriter(string BOverrides,string SOverrides, - string ExtOverrides) +SourcesWriter::SourcesWriter(string const &BOverrides,string const &SOverrides, + string const &ExtOverrides) { Output = stdout; AddPattern("*.dsc"); @@ -720,7 +720,7 @@ bool SourcesWriter::DoPackage(string FileName) if (NewMaint.empty() == false) SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str()); - for (map::iterator I = SOverItem->FieldOverride.begin(); + for (map::const_iterator I = SOverItem->FieldOverride.begin(); I != SOverItem->FieldOverride.end(); I++) SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); @@ -740,7 +740,7 @@ bool SourcesWriter::DoPackage(string FileName) // ContentsWriter::ContentsWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -ContentsWriter::ContentsWriter(string DB) : +ContentsWriter::ContentsWriter(string const &DB) : Db(DB), Stats(Db.Stats) { @@ -752,7 +752,7 @@ ContentsWriter::ContentsWriter(string DB) : // --------------------------------------------------------------------- /* If Package is the empty string the control record will be parsed to determine what the package name is. */ -bool ContentsWriter::DoPackage(string FileName,string Package) +bool ContentsWriter::DoPackage(string FileName, string Package) { if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false)) { @@ -773,7 +773,7 @@ bool ContentsWriter::DoPackage(string FileName,string Package) // ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/ // --------------------------------------------------------------------- /* */ -bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress) +bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress) { MultiCompress Pkgs(PkgFile,PkgCompress,0,false); if (_error->PendingError() == true) @@ -828,7 +828,7 @@ bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress) // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -ReleaseWriter::ReleaseWriter(string DB) +ReleaseWriter::ReleaseWriter(string const &DB) { AddPattern("Packages"); AddPattern("Packages.gz"); @@ -842,7 +842,7 @@ ReleaseWriter::ReleaseWriter(string DB) AddPattern("md5sum.txt"); Output = stdout; - time_t now = time(NULL); + time_t const now = time(NULL); char datestr[128]; if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC", gmtime(&now)) == 0) @@ -929,7 +929,7 @@ bool ReleaseWriter::DoPackage(string FileName) void ReleaseWriter::Finish() { fprintf(Output, "MD5Sum:\n"); - for(map::iterator I = CheckSums.begin(); + for(map::const_iterator I = CheckSums.begin(); I != CheckSums.end(); ++I) { @@ -940,7 +940,7 @@ void ReleaseWriter::Finish() } fprintf(Output, "SHA1:\n"); - for(map::iterator I = CheckSums.begin(); + for(map::const_iterator I = CheckSums.begin(); I != CheckSums.end(); ++I) { @@ -951,7 +951,7 @@ void ReleaseWriter::Finish() } fprintf(Output, "SHA256:\n"); - for(map::iterator I = CheckSums.begin(); + for(map::const_iterator I = CheckSums.begin(); I != CheckSums.end(); ++I) { diff --git a/ftparchive/writer.h b/ftparchive/writer.h index 8864461d5..ad58dee0a 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -43,12 +43,12 @@ class FTWScanner static FTWScanner *Owner; static int ScannerFTW(const char *File,const struct stat *sb,int Flag); - static int ScannerFile(const char *File, bool ReadLink); + static int ScannerFile(const char *File, bool const &ReadLink); bool Delink(string &FileName,const char *OriginalPath, - unsigned long &Bytes,off_t FileSize); + unsigned long &Bytes,off_t const &FileSize); - inline void NewLine(unsigned Priority) + inline void NewLine(unsigned const &Priority) { if (ErrorPrinted == false && Quiet <= Priority) { @@ -63,11 +63,11 @@ class FTWScanner string InternalPrefix; virtual bool DoPackage(string FileName) = 0; - bool RecursiveScan(string Dir); - bool LoadFileList(string BaseDir,string File); + bool RecursiveScan(string const &Dir); + bool LoadFileList(string const &BaseDir,string const &File); void ClearPatterns() { Patterns.clear(); }; - void AddPattern(string Pattern) { Patterns.push_back(Pattern); }; - bool SetExts(string Vals); + void AddPattern(string const &Pattern) { Patterns.push_back(Pattern); }; + bool SetExts(string const &Vals); FTWScanner(); virtual ~FTWScanner() {delete [] RealPath;}; @@ -96,13 +96,13 @@ class PackagesWriter : public FTWScanner struct CacheDB::Stats &Stats; string Arch; - inline bool ReadOverride(string File) {return Over.ReadOverride(File);}; - inline bool ReadExtraOverride(string File) + inline bool ReadOverride(string const &File) {return Over.ReadOverride(File);}; + inline bool ReadExtraOverride(string const &File) {return Over.ReadExtraOverride(File);}; virtual bool DoPackage(string FileName); - PackagesWriter(string DB,string Overrides,string ExtOverrides=string(), - string Arch=string()); + PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides=string(), + string const &Arch=string()); virtual ~PackagesWriter() {}; }; @@ -122,12 +122,12 @@ class ContentsWriter : public FTWScanner bool DoPackage(string FileName,string Package); virtual bool DoPackage(string FileName) {return DoPackage(FileName,string());}; - bool ReadFromPkgs(string PkgFile,string PkgCompress); + bool ReadFromPkgs(string const &PkgFile,string const &PkgCompress); void Finish() {Gen.Print(Output);}; - inline bool ReadyDB(string DB) {return Db.ReadyDB(DB);}; + inline bool ReadyDB(string const &DB) {return Db.ReadyDB(DB);}; - ContentsWriter(string DB); + ContentsWriter(string const &DB); virtual ~ContentsWriter() {}; }; @@ -150,15 +150,15 @@ class SourcesWriter : public FTWScanner virtual bool DoPackage(string FileName); - SourcesWriter(string BOverrides,string SOverrides, - string ExtOverrides=string()); + SourcesWriter(string const &BOverrides,string const &SOverrides, + string const &ExtOverrides=string()); virtual ~SourcesWriter() {free(Buffer);}; }; class ReleaseWriter : public FTWScanner { public: - ReleaseWriter(string DB); + ReleaseWriter(string const &DB); virtual bool DoPackage(string FileName); void Finish(); -- cgit v1.2.3 From c6474fb6ff482b0457674986a82afab0a3749af2 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Tue, 1 Dec 2009 00:28:26 +0100 Subject: fix a few typos in strings, comments and manpage of apt-ftparchive thanks Karl Goetz! (Closes: #558757) --- ftparchive/apt-ftparchive.cc | 4 ++-- ftparchive/cachedb.cc | 6 +++--- ftparchive/contents.cc | 2 +- ftparchive/multicompress.cc | 2 +- ftparchive/writer.cc | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index d0dea7768..5b6b3940c 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -3,7 +3,7 @@ // $Id: apt-ftparchive.cc,v 1.8.2.3 2004/01/02 22:01:48 mdz Exp $ /* ###################################################################### - apt-scanpackages - Efficient work-alike for dpkg-scanpackages + apt-ftparchive - Efficient work-alike for dpkg-scanpackages Let contents be disabled from the conf @@ -792,7 +792,7 @@ bool Generate(CommandLine &CmdL) if (_config->FindB("APT::FTPArchive::Contents",true) == false) return true; - c1out << "Done Packages, Starting contents." << endl; + c1out << "Packages done, Starting contents." << endl; // Sort the contents file list by date string ArchiveDir = Setup.FindDir("Dir::ArchiveDir"); diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index 64638459a..b04244347 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -69,7 +69,7 @@ bool CacheDB::ReadyDB(string const &DB) // apt 0.6.44 if (err == EINVAL) { - _error->Error(_("DB format is invalid. If you upgraded from a older version of apt, please remove and re-create the database.")); + _error->Error(_("DB format is invalid. If you upgraded from an older version of apt, please remove and re-create the database.")); } if (err) { @@ -83,7 +83,7 @@ bool CacheDB::ReadyDB(string const &DB) return true; } /*}}}*/ -// CacheDB::OpenFile - Open the filei /*{{{*/ +// CacheDB::OpenFile - Open the file /*{{{*/ // --------------------------------------------------------------------- /* */ bool CacheDB::OpenFile() @@ -139,7 +139,7 @@ bool CacheDB::GetCurStat() if (DBLoaded) { - /* First see if thre is anything about it + /* First see if there is anything about it in the database */ /* Get the flags (and mtime) */ diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc index 693c36f9a..b761d9204 100644 --- a/ftparchive/contents.cc +++ b/ftparchive/contents.cc @@ -13,7 +13,7 @@ removing the massive sort time overhead. By breaking all the pathnames into components and storing them - separately a space savings is realized by not duplicating the string + separately a space saving is realized by not duplicating the string over and over again. Ultimately this saving is sacrificed to storage of the tree structure itself but the tree structure yields a speed gain in the sorting and processing. Ultimately it takes about 5 seconds to diff --git a/ftparchive/multicompress.cc b/ftparchive/multicompress.cc index 7c91d34fe..bb4beedf9 100644 --- a/ftparchive/multicompress.cc +++ b/ftparchive/multicompress.cc @@ -365,7 +365,7 @@ bool MultiCompress::CloseOld(int Fd,pid_t Proc) // MultiCompress::Child - The writer child /*{{{*/ // --------------------------------------------------------------------- /* The child process forks a bunch of compression children and takes - input on FD and passes it to all the compressor childer. On the way it + input on FD and passes it to all the compressor child. On the way it computes the MD5 of the raw data. After this the raw data in the original files is compared to see if this data is new. If the data is new then the temp files are renamed, otherwise they are erased. */ diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index bf6e9f617..5547c6aa5 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -464,7 +464,7 @@ bool PackagesWriter::DoPackage(string FileName) SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str()); /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that - dpkg-scanpackages does.. Well sort of. dpkg-scanpackages just does renaming + dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming but dpkg does this append bit. So we do the append bit, at least that way the status file and package file will remain similar. There are other transforms but optional is the only legacy one still in use for some lazy reason. */ -- cgit v1.2.3 From f99da9089c1122bdb47173171d3c6a692fb4d439 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 30 Jan 2010 23:57:27 +0100 Subject: * ftparchive/writer.cc: - generate sha1 and sha256 checksums for dsc (Closes: #567343) --- ftparchive/writer.cc | 30 +++++++++++++++++++++++++----- 1 file changed, 25 insertions(+), 5 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 5547c6aa5..b9dd554b3 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -555,7 +555,12 @@ bool SourcesWriter::DoPackage(string FileName) char *BlkEnd = Buffer + St.st_size; MD5Summation MD5; MD5.Add((unsigned char *)Start,BlkEnd - Start); - + + SHA1Summation SHA1; + SHA256Summation SHA256; + SHA1.Add((unsigned char *)Start,BlkEnd - Start); + SHA256.Add((unsigned char *)Start,BlkEnd - Start); + // Add an extra \n to the end, just in case *BlkEnd++ = '\n'; @@ -646,12 +651,25 @@ bool SourcesWriter::DoPackage(string FileName) } // Add the dsc to the files hash list + string const strippedName = flNotDir(FileName); char Files[1000]; snprintf(Files,sizeof(Files),"\n %s %lu %s\n %s", string(MD5.Result()).c_str(),St.st_size, - flNotDir(FileName).c_str(), + strippedName.c_str(), Tags.FindS("Files").c_str()); - + + char ChecksumsSha1[1000]; + snprintf(ChecksumsSha1,sizeof(ChecksumsSha1),"\n %s %lu %s\n %s", + string(SHA1.Result()).c_str(),St.st_size, + strippedName.c_str(), + Tags.FindS("Checksums-Sha1").c_str()); + + char ChecksumsSha256[1000]; + snprintf(ChecksumsSha256,sizeof(ChecksumsSha256),"\n %s %lu %s\n %s", + string(SHA256.Result()).c_str(),St.st_size, + strippedName.c_str(), + Tags.FindS("Checksums-Sha256").c_str()); + // Strip the DirStrip prefix from the FileName and add the PathPrefix string NewFileName; if (DirStrip.empty() == false && @@ -694,12 +712,14 @@ bool SourcesWriter::DoPackage(string FileName) Directory.erase(Directory.end()-1); // This lists all the changes to the fields we are going to make. - // (5 hardcoded + maintainer + end marker) - TFRewriteData Changes[5+1+SOverItem->FieldOverride.size()+1]; + // (5 hardcoded + checksums + maintainer + end marker) + TFRewriteData Changes[5+2+1+SOverItem->FieldOverride.size()+1]; unsigned int End = 0; SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package"); SetTFRewriteData(Changes[End++],"Files",Files); + SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1); + SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256); if (Directory != "./") SetTFRewriteData(Changes[End++],"Directory",Directory.c_str()); SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str()); -- cgit v1.2.3 From 319810767180e5c57c296b06c93e3ebec9f36a8e Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Thu, 18 Feb 2010 00:05:14 +0100 Subject: - add --arch option for apt-ftparchive packages and contents commands - if an arch is given accept only *_all.deb and *_arch.deb instead of *.deb. Thanks Stephan Bosch for the patch! (Closes: #319710) --- ftparchive/apt-ftparchive.cc | 7 ++++--- ftparchive/writer.cc | 30 ++++++++++++++---------------- ftparchive/writer.h | 6 +++--- 3 files changed, 21 insertions(+), 22 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index 5b6b3940c..f1a182e52 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -333,7 +333,7 @@ bool PackageMap::GenContents(Configuration &Setup, gettimeofday(&StartTime,0); // Create a package writer object. - ContentsWriter Contents(""); + ContentsWriter Contents("", Arch); if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false) return _error->Error(_("Package extension list is too long")); if (_error->PendingError() == true) @@ -606,7 +606,7 @@ bool SimpleGenPackages(CommandLine &CmdL) // Create a package writer object. PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"), - Override, ""); + Override, "", _config->Find("APT::FTPArchive::Architecture")); if (_error->PendingError() == true) return false; @@ -629,7 +629,7 @@ bool SimpleGenContents(CommandLine &CmdL) return ShowHelp(CmdL); // Create a package writer object. - ContentsWriter Contents(_config->Find("APT::FTPArchive::DB")); + ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture")); if (_error->PendingError() == true) return false; @@ -910,6 +910,7 @@ int main(int argc, const char *argv[]) {0,"delink","APT::FTPArchive::DeLinkAct",0}, {0,"readonly","APT::FTPArchive::ReadOnlyDB",0}, {0,"contents","APT::FTPArchive::Contents",0}, + {'a',"arch","APT::FTPArchive::Architecture",CommandLine::HasArg}, {'c',"config-file",0,CommandLine::ConfigFile}, {'o',"option",0,CommandLine::ArbItem}, {0,0,0,0}}; diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 18a3de0c2..9e5b7d4f3 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -54,7 +54,7 @@ inline void SetTFRewriteData(struct TFRewriteData &tfrd, // FTWScanner::FTWScanner - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -FTWScanner::FTWScanner() +FTWScanner::FTWScanner(string const &Arch): Arch(Arch) { ErrorPrinted = false; NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true); @@ -299,12 +299,11 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, // --------------------------------------------------------------------- /* */ PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides, - string const &aArch) : - Db(DB),Stats(Db.Stats), Arch(aArch) + string const &Arch) : + FTWScanner(Arch), Db(DB), Stats(Db.Stats) { Output = stdout; - SetExts(".deb .udeb .foo .bar .baz"); - AddPattern("*.deb"); + SetExts(".deb .udeb"); DeLinkLimit = 0; // Process the command line options @@ -340,17 +339,16 @@ bool FTWScanner::SetExts(string const &Vals) string::size_type Start = 0; while (Start <= Vals.length()-1) { - string::size_type Space = Vals.find(' ',Start); - string::size_type Length; - if (Space == string::npos) + string::size_type const Space = Vals.find(' ',Start); + string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start; + if ( Arch.empty() == false ) { - Length = Vals.length()-Start; + AddPattern(string("*_") + Arch + Vals.substr(Start, Length)); + AddPattern(string("*_all") + Vals.substr(Start, Length)); } else - { - Length = Space-Start; - } - AddPattern(string("*") + Vals.substr(Start, Length)); + AddPattern(string("*") + Vals.substr(Start, Length)); + Start += Length + 1; } @@ -767,11 +765,11 @@ bool SourcesWriter::DoPackage(string FileName) // ContentsWriter::ContentsWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -ContentsWriter::ContentsWriter(string const &DB) : - Db(DB), Stats(Db.Stats) +ContentsWriter::ContentsWriter(string const &DB, string const &Arch) : + FTWScanner(Arch), Db(DB), Stats(Db.Stats) { - AddPattern("*.deb"); + SetExts(".deb"); Output = stdout; } /*}}}*/ diff --git a/ftparchive/writer.h b/ftparchive/writer.h index 520e91dd6..af7ba4edd 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -34,6 +34,7 @@ class FTWScanner { protected: vector Patterns; + string Arch; const char *OriginalPath; bool ErrorPrinted; @@ -68,7 +69,7 @@ class FTWScanner void AddPattern(string const &Pattern) { Patterns.push_back(Pattern); }; bool SetExts(string const &Vals); - FTWScanner(); + FTWScanner(string const &Arch = string()); }; class PackagesWriter : public FTWScanner @@ -92,7 +93,6 @@ class PackagesWriter : public FTWScanner string DirStrip; FILE *Output; struct CacheDB::Stats &Stats; - string Arch; inline bool ReadOverride(string const &File) {return Over.ReadOverride(File);}; inline bool ReadExtraOverride(string const &File) @@ -125,7 +125,7 @@ class ContentsWriter : public FTWScanner void Finish() {Gen.Print(Output);}; inline bool ReadyDB(string const &DB) {return Db.ReadyDB(DB);}; - ContentsWriter(string const &DB); + ContentsWriter(string const &DB, string const &Arch = string()); virtual ~ContentsWriter() {}; }; -- cgit v1.2.3 From 66905344357d03c206d99964a0d941b261f7146c Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 22 Mar 2010 18:25:37 +0100 Subject: * ftparchive/writer.cc: - write LongDescriptions if they shouldn't be included in Packages file into i18n/Translation-en by default. It is ensured that each package+description is listed only ones in the Translation file even if we generate multiple Packages file in one run. The file is only generated in "generate" - the simple file commands can't create it by now. Also, the LongDescription is currently a global setting, so generating archives with and without LongDescriptions in the Packages file in the same run are currently not possible. --- ftparchive/apt-ftparchive.cc | 47 +++++++++++++++++++++++++++++++++------- ftparchive/writer.cc | 51 ++++++++++++++++++++++++++++++++++++++++++-- ftparchive/writer.h | 19 +++++++++++++++++ 3 files changed, 107 insertions(+), 10 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index f1a182e52..e69c88ddd 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -62,6 +62,9 @@ struct PackageMap string SrcOverride; string SrcExtraOverride; + // Translation master file + TranslationWriter *TransWriter; + // Contents string Contents; string ContentsHead; @@ -100,8 +103,9 @@ struct PackageMap vector::iterator End, unsigned long &Left); - PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false), - PkgDone(false), SrcDone(false), ContentsMTime(0) {}; + PackageMap() : TransWriter(NULL), DeLinkLimit(0), Permissions(1), + ContentsDone(false), PkgDone(false), SrcDone(false), + ContentsMTime(0) {}; }; /*}}}*/ @@ -169,6 +173,8 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) Packages.DirStrip = ArchiveDir; Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix); + Packages.TransWriter = TransWriter; + Packages.Stats.DeLinkBytes = Stats.DeLinkBytes; Packages.DeLinkLimit = DeLinkLimit; @@ -436,6 +442,8 @@ void LoadTree(vector &PkgList,Configuration &Setup) "$(DIST)/$(SECTION)/source/"); string DPkg = Setup.Find("TreeDefault::Packages", "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages"); + string DTrans = Setup.Find("TreeDefault::Translation", + "$(DIST)/$(SECTION)/i18n/Translation-en"); string DIPrfx = Setup.Find("TreeDefault::InternalPrefix", "$(DIST)/$(SECTION)/"); string DContents = Setup.Find("TreeDefault::Contents", @@ -461,15 +469,25 @@ void LoadTree(vector &PkgList,Configuration &Setup) string Section; while (ParseQuoteWord(Sections,Section) == true) { - string Tmp2 = Block.Find("Architectures"); string Arch; + struct SubstVar const Vars[] = {{"$(DIST)",&Dist}, + {"$(SECTION)",&Section}, + {"$(ARCH)",&Arch}, + {}}; + TranslationWriter *TransWriter; + if (DTrans.empty() == false) + { + string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"), + SubstVar(Block.Find("Translation", DTrans.c_str()), Vars)); + TransWriter = new TranslationWriter(TranslationFile); + } + else + TransWriter = NULL; + + string const Tmp2 = Block.Find("Architectures"); const char *Archs = Tmp2.c_str(); while (ParseQuoteWord(Archs,Arch) == true) { - struct SubstVar Vars[] = {{"$(DIST)",&Dist}, - {"$(SECTION)",&Section}, - {"$(ARCH)",&Arch}, - {}}; PackageMap Itm; Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars); @@ -491,6 +509,11 @@ void LoadTree(vector &PkgList,Configuration &Setup) Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars); Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars); Itm.Arch = Arch; + if (TransWriter != NULL) + { + TransWriter->IncreaseRefCounter(); + Itm.TransWriter = TransWriter; + } Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars); Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars); Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars); @@ -500,6 +523,9 @@ void LoadTree(vector &PkgList,Configuration &Setup) Itm.GetGeneral(Setup,Block); PkgList.push_back(Itm); } + // we didn't use this TransWriter, so we can release it + if (TransWriter != NULL && TransWriter->GetRefCounter() == 0) + delete TransWriter; } Top = Top->Next; @@ -788,7 +814,12 @@ bool Generate(CommandLine &CmdL) delete [] List; } - + + // close the Translation master files + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + if (I->TransWriter != NULL && I->TransWriter->DecreaseRefCounter() == 0) + delete I->TransWriter; + if (_config->FindB("APT::FTPArchive::Contents",true) == false) return true; diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 9e5b7d4f3..b395903b7 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -300,7 +300,7 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, /* */ PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides, string const &Arch) : - FTWScanner(Arch), Db(DB), Stats(Db.Stats) + FTWScanner(Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL) { Output = stdout; SetExts(".deb .udeb"); @@ -317,7 +317,7 @@ PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string c if (Db.Loaded() == false) DoContents = false; - + // Read the override file if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false) return; @@ -448,6 +448,8 @@ bool PackagesWriter::DoPackage(string FileName) descmd5.Add(desc.c_str()); DescriptionMd5 = descmd5.Result().Value(); SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str()); + if (TransWriter != NULL) + TransWriter->DoPackage(Package, desc, DescriptionMd5); } // Rewrite the maintainer field if necessary @@ -494,6 +496,51 @@ bool PackagesWriter::DoPackage(string FileName) } /*}}}*/ +// TranslationWriter::TranslationWriter - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* Create a Translation-Master file for this Packages file */ +TranslationWriter::TranslationWriter(string const &File) : Output(NULL), + RefCounter(0) +{ + if (File.empty() == true) + return; + + Output = fopen(File.c_str(), "w"); +} + /*}}}*/ +// TranslationWriter::DoPackage - Process a single package /*{{{*/ +// --------------------------------------------------------------------- +/* Create a Translation-Master file for this Packages file */ +bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc, + string const &MD5) +{ + if (Output == NULL) + return true; + + // Different archs can include different versions and therefore + // different descriptions - so we need to check for both name and md5. + string const Record = Pkg + ":" + MD5; + + if (Included.find(Record) != Included.end()) + return true; + + fprintf(Output, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n", + Pkg.c_str(), MD5.c_str(), Desc.c_str()); + + Included.insert(Record); + return true; +} + /*}}}*/ +// TranslationWriter::~TranslationWriter - Destructor /*{{{*/ +// --------------------------------------------------------------------- +/* */ +TranslationWriter::~TranslationWriter() +{ + if (Output != NULL) + fclose(Output); +} + /*}}}*/ + // SourcesWriter::SourcesWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ diff --git a/ftparchive/writer.h b/ftparchive/writer.h index af7ba4edd..2afd1af1f 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -19,6 +19,7 @@ #include #include #include +#include #include "cachedb.h" #include "override.h" @@ -72,6 +73,23 @@ class FTWScanner FTWScanner(string const &Arch = string()); }; +class TranslationWriter +{ + FILE *Output; + std::set Included; + unsigned short RefCounter; + + public: + void IncreaseRefCounter() { ++RefCounter; }; + unsigned short DecreaseRefCounter() { return (RefCounter == 0) ? 0 : --RefCounter; }; + unsigned short GetRefCounter() const { return RefCounter; }; + bool DoPackage(string const &Pkg, string const &Desc, string const &MD5); + + TranslationWriter(string const &File); + TranslationWriter() : Output(NULL), RefCounter(0) {}; + ~TranslationWriter(); +}; + class PackagesWriter : public FTWScanner { Override Over; @@ -93,6 +111,7 @@ class PackagesWriter : public FTWScanner string DirStrip; FILE *Output; struct CacheDB::Stats &Stats; + TranslationWriter *TransWriter; inline bool ReadOverride(string const &File) {return Over.ReadOverride(File);}; inline bool ReadExtraOverride(string const &File) -- cgit v1.2.3 From b34d4b4745e4f7ead090a03e5efe35bd2e2e82b5 Mon Sep 17 00:00:00 2001 From: Julian Andres Klode Date: Fri, 26 Mar 2010 15:35:36 +0100 Subject: * ftparchive/apt-ftparchive.cc: - Read default configuration (Closes: #383257) --- ftparchive/apt-ftparchive.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'ftparchive') diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index f1a182e52..5456dd474 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -925,7 +925,7 @@ int main(int argc, const char *argv[]) // Parse the command line and initialize the package library CommandLine CmdL(Args,_config); - if (CmdL.Parse(argc,argv) == false) + if (pkgInitConfig(*_config) == false || CmdL.Parse(argc,argv) == false) { _error->DumpErrors(); return 100; -- cgit v1.2.3 From 3b1fffc35f3eb277d92f38c687c270edd1d8550d Mon Sep 17 00:00:00 2001 From: Julian Andres Klode Date: Fri, 26 Mar 2010 16:37:16 +0100 Subject: ftparchive/apt-ftparchive.cc: Include apt-pkg/init.h. --- ftparchive/apt-ftparchive.cc | 1 + 1 file changed, 1 insertion(+) (limited to 'ftparchive') diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index 5456dd474..4c26f79b8 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -16,6 +16,7 @@ #include #include #include +#include #include #include #include -- cgit v1.2.3 From 4e794c509becfd7e2bddfddc1205dc81397a48bd Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Fri, 26 Mar 2010 19:18:21 +0100 Subject: Inclusion of Long Descriptions in the Packages files can be set now also in TreeDefaults and Tree to support generation of archives which should support and which shouldn't support splitted out Translation-en files in the same run. --- ftparchive/apt-ftparchive.cc | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index e69c88ddd..f3e91d90d 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -63,6 +63,7 @@ struct PackageMap string SrcExtraOverride; // Translation master file + bool LongDesc; TranslationWriter *TransWriter; // Contents @@ -103,7 +104,7 @@ struct PackageMap vector::iterator End, unsigned long &Left); - PackageMap() : TransWriter(NULL), DeLinkLimit(0), Permissions(1), + PackageMap() : LongDesc(true), TransWriter(NULL), DeLinkLimit(0), Permissions(1), ContentsDone(false), PkgDone(false), SrcDone(false), ContentsMTime(0) {}; }; @@ -174,6 +175,7 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix); Packages.TransWriter = TransWriter; + Packages.LongDescription = LongDesc; Packages.Stats.DeLinkBytes = Stats.DeLinkBytes; Packages.DeLinkLimit = DeLinkLimit; @@ -456,6 +458,9 @@ void LoadTree(vector &PkgList,Configuration &Setup) string DFLFile = Setup.Find("TreeDefault::FileList", ""); string DSFLFile = Setup.Find("TreeDefault::SourceFileList", ""); + bool const LongDescription = Setup.FindB("TreeDefault::LongDescription", + _config->FindB("APT::FTPArchive::LongDescription", true)); + // Process 'tree' type sections const Configuration::Item *Top = Setup.Tree("tree"); for (Top = (Top == 0?0:Top->Child); Top != 0;) @@ -474,8 +479,9 @@ void LoadTree(vector &PkgList,Configuration &Setup) {"$(SECTION)",&Section}, {"$(ARCH)",&Arch}, {}}; + bool const LongDesc = Block.FindB("LongDescription", LongDescription); TranslationWriter *TransWriter; - if (DTrans.empty() == false) + if (DTrans.empty() == false && LongDesc == false) { string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"), SubstVar(Block.Find("Translation", DTrans.c_str()), Vars)); @@ -509,6 +515,7 @@ void LoadTree(vector &PkgList,Configuration &Setup) Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars); Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars); Itm.Arch = Arch; + Itm.LongDesc = LongDesc; if (TransWriter != NULL) { TransWriter->IncreaseRefCounter(); -- cgit v1.2.3 From 34f1d96cf5657b5e34cd9880dccfa2028fa16b13 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Fri, 26 Mar 2010 22:38:50 +0100 Subject: Switch the TranslationWriter to use MultiCompress to be able to generate the compressed files as we want them and to prevent the file to be replaced without a reason which could save us from steady redownloads of a file with the same content. --- ftparchive/apt-ftparchive.cc | 13 ++++++++----- ftparchive/writer.cc | 12 ++++++++---- ftparchive/writer.h | 6 ++++-- 3 files changed, 20 insertions(+), 11 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index f3e91d90d..46831b385 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -134,8 +134,6 @@ void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block) PkgExt = Block.Find("Packages::Extensions", Setup.Find("Default::Packages::Extensions",".deb").c_str()); - Permissions = Setup.FindI("Default::FileMode",0644); - if (FLFile.empty() == false) FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile); @@ -458,8 +456,11 @@ void LoadTree(vector &PkgList,Configuration &Setup) string DFLFile = Setup.Find("TreeDefault::FileList", ""); string DSFLFile = Setup.Find("TreeDefault::SourceFileList", ""); - bool const LongDescription = Setup.FindB("TreeDefault::LongDescription", + int const Permissions = Setup.FindI("Default::FileMode",0644); + + bool const LongDescription = Setup.FindB("Default::LongDescription", _config->FindB("APT::FTPArchive::LongDescription", true)); + string const TranslationCompress = Setup.Find("Default::Translation::Compress",". gzip").c_str(); // Process 'tree' type sections const Configuration::Item *Top = Setup.Tree("tree"); @@ -479,13 +480,15 @@ void LoadTree(vector &PkgList,Configuration &Setup) {"$(SECTION)",&Section}, {"$(ARCH)",&Arch}, {}}; + mode_t const Perms = Block.FindI("FileMode", Permissions); bool const LongDesc = Block.FindB("LongDescription", LongDescription); TranslationWriter *TransWriter; if (DTrans.empty() == false && LongDesc == false) { string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"), SubstVar(Block.Find("Translation", DTrans.c_str()), Vars)); - TransWriter = new TranslationWriter(TranslationFile); + string const TransCompress = Block.Find("Translation::Compress", TranslationCompress); + TransWriter = new TranslationWriter(TranslationFile, TransCompress, Perms); } else TransWriter = NULL; @@ -495,7 +498,7 @@ void LoadTree(vector &PkgList,Configuration &Setup) while (ParseQuoteWord(Archs,Arch) == true) { PackageMap Itm; - + Itm.Permissions = Perms; Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars); Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars); diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index b395903b7..45a8d212b 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -499,13 +499,15 @@ bool PackagesWriter::DoPackage(string FileName) // TranslationWriter::TranslationWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* Create a Translation-Master file for this Packages file */ -TranslationWriter::TranslationWriter(string const &File) : Output(NULL), +TranslationWriter::TranslationWriter(string const &File, string const &TransCompress, + mode_t const &Permissions) : Output(NULL), RefCounter(0) { if (File.empty() == true) return; - Output = fopen(File.c_str(), "w"); + Comp = new MultiCompress(File, TransCompress, Permissions); + Output = Comp->Input; } /*}}}*/ // TranslationWriter::DoPackage - Process a single package /*{{{*/ @@ -536,8 +538,10 @@ bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc, /* */ TranslationWriter::~TranslationWriter() { - if (Output != NULL) - fclose(Output); + if (Comp == NULL) + return; + + delete Comp; } /*}}}*/ diff --git a/ftparchive/writer.h b/ftparchive/writer.h index 2afd1af1f..3123a7f46 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -22,6 +22,7 @@ #include #include "cachedb.h" +#include "multicompress.h" #include "override.h" #include "apt-ftparchive.h" @@ -75,6 +76,7 @@ class FTWScanner class TranslationWriter { + MultiCompress *Comp; FILE *Output; std::set Included; unsigned short RefCounter; @@ -85,8 +87,8 @@ class TranslationWriter unsigned short GetRefCounter() const { return RefCounter; }; bool DoPackage(string const &Pkg, string const &Desc, string const &MD5); - TranslationWriter(string const &File); - TranslationWriter() : Output(NULL), RefCounter(0) {}; + TranslationWriter(string const &File, string const &TransCompress, mode_t const &Permissions); + TranslationWriter() : Comp(NULL), Output(NULL), RefCounter(0) {}; ~TranslationWriter(); }; -- cgit v1.2.3 From bf99a6d3307af667a23fe09bfc437a553bbbd182 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Fri, 23 Apr 2010 17:13:02 +0200 Subject: * ftparchive/writer.cc: - remove 999 chars Files and Checksums rewrite limit (Closes: #577759) --- ftparchive/writer.cc | 40 +++++++++++++++++++--------------------- 1 file changed, 19 insertions(+), 21 deletions(-) (limited to 'ftparchive') diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 45a8d212b..6cda29b21 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -28,6 +28,7 @@ #include #include #include +#include #include #include "cachedb.h" @@ -706,23 +707,20 @@ bool SourcesWriter::DoPackage(string FileName) // Add the dsc to the files hash list string const strippedName = flNotDir(FileName); - char Files[1000]; - snprintf(Files,sizeof(Files),"\n %s %lu %s\n %s", - string(MD5.Result()).c_str(),St.st_size, - strippedName.c_str(), - Tags.FindS("Files").c_str()); - - char ChecksumsSha1[1000]; - snprintf(ChecksumsSha1,sizeof(ChecksumsSha1),"\n %s %lu %s\n %s", - string(SHA1.Result()).c_str(),St.st_size, - strippedName.c_str(), - Tags.FindS("Checksums-Sha1").c_str()); - - char ChecksumsSha256[1000]; - snprintf(ChecksumsSha256,sizeof(ChecksumsSha256),"\n %s %lu %s\n %s", - string(SHA256.Result()).c_str(),St.st_size, - strippedName.c_str(), - Tags.FindS("Checksums-Sha256").c_str()); + std::ostringstream ostreamFiles; + ostreamFiles << "\n " << string(MD5.Result()) << " " << St.st_size << " " + << strippedName << "\n " << Tags.FindS("Files"); + string const Files = ostreamFiles.str(); + + std::ostringstream ostreamSha1; + ostreamSha1 << "\n " << string(SHA1.Result()) << " " << St.st_size << " " + << strippedName << "\n " << Tags.FindS("Checksums-Sha1"); + string const ChecksumsSha1 = ostreamSha1.str(); + + std::ostringstream ostreamSha256; + ostreamSha256 << "\n " << string(SHA256.Result()) << " " << St.st_size << " " + << strippedName << "\n " << Tags.FindS("Checksums-Sha256"); + string const ChecksumsSha256 = ostreamSha256.str(); // Strip the DirStrip prefix from the FileName and add the PathPrefix string NewFileName; @@ -740,7 +738,7 @@ bool SourcesWriter::DoPackage(string FileName) // Perform the delinking operation over all of the files string ParseJnk; - const char *C = Files; + const char *C = Files.c_str(); char *RealPath = NULL; for (;isspace(*C); C++); while (*C != 0) @@ -773,9 +771,9 @@ bool SourcesWriter::DoPackage(string FileName) unsigned int End = 0; SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package"); - SetTFRewriteData(Changes[End++],"Files",Files); - SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1); - SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256); + SetTFRewriteData(Changes[End++],"Files",Files.c_str()); + SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1.c_str()); + SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256.c_str()); if (Directory != "./") SetTFRewriteData(Changes[End++],"Directory",Directory.c_str()); SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str()); -- cgit v1.2.3 From f5cd2dbfc493127b18d9ea2b0bb258b736b91cd4 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 29 May 2010 19:10:19 +0200 Subject: * ftparchive/writer.h: - add a virtual destructor to FTWScanner class (for cppcheck) --- ftparchive/writer.h | 1 + 1 file changed, 1 insertion(+) (limited to 'ftparchive') diff --git a/ftparchive/writer.h b/ftparchive/writer.h index 3123a7f46..c08ddea85 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -72,6 +72,7 @@ class FTWScanner bool SetExts(string const &Vals); FTWScanner(string const &Arch = string()); + virtual ~FTWScanner(); }; class TranslationWriter -- cgit v1.2.3 From 4c265635a8417b857a3a8f537c74313d5533da9b Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 31 May 2010 12:58:20 +0200 Subject: =?UTF-8?q?i=20managed=20to=20commit=20broken=20code=E2=80=A6=20wh?= =?UTF-8?q?ich=20(at=20least=20in=20my=20mind)=20worked=20yesterday.=20Str?= =?UTF-8?q?ange=E2=80=A6=20anyway=20only=20small=20fixes.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ftparchive/writer.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'ftparchive') diff --git a/ftparchive/writer.h b/ftparchive/writer.h index c08ddea85..49d430c47 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -72,7 +72,7 @@ class FTWScanner bool SetExts(string const &Vals); FTWScanner(string const &Arch = string()); - virtual ~FTWScanner(); + virtual ~FTWScanner() {}; }; class TranslationWriter -- cgit v1.2.3 From c99e48ec26e693d9aa4a2a9f868284f7aa49784d Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Wed, 9 Jun 2010 00:12:14 +0200 Subject: * ftparchive/writer.cc: - add ValidTime option to generate a Valid-Until header in Release file --- ftparchive/writer.cc | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'ftparchive') diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 6cda29b21..650eec57c 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -924,6 +924,15 @@ ReleaseWriter::ReleaseWriter(string const &DB) datestr[0] = '\0'; } + time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0); + char validstr[128]; + if (now == validuntil || + strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC", + gmtime(&validuntil)) == 0) + { + datestr[0] = '\0'; + } + map Fields; Fields["Origin"] = ""; Fields["Label"] = ""; @@ -931,6 +940,7 @@ ReleaseWriter::ReleaseWriter(string const &DB) Fields["Version"] = ""; Fields["Codename"] = ""; Fields["Date"] = datestr; + Fields["Valid-Until"] = validstr; Fields["Architectures"] = ""; Fields["Components"] = ""; Fields["Description"] = ""; -- cgit v1.2.3