From ce928105d7279c5604f034740b04dc6a745fb859 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Fri, 4 Apr 2014 14:30:17 +0200 Subject: Implement CacheDB for source packages in apt-ftparchive --- apt-pkg/contrib/hashes.cc | 2 +- apt-pkg/contrib/hashes.h | 2 + ftparchive/apt-ftparchive.cc | 30 ++++++- ftparchive/cachedb.cc | 146 +++++++++++++++++++++---------- ftparchive/cachedb.h | 28 +++++- ftparchive/makefile | 2 +- ftparchive/sources.cc | 47 ++++++++++ ftparchive/sources.h | 32 +++++++ ftparchive/writer.cc | 107 ++++++++++++----------- ftparchive/writer.h | 2 +- test/integration/framework | 5 +- test/integration/test-apt-ftparchive | 164 +++++++++++++++++++++++++++++++++++ 12 files changed, 459 insertions(+), 108 deletions(-) create mode 100644 ftparchive/sources.cc create mode 100644 ftparchive/sources.h create mode 100755 test/integration/test-apt-ftparchive diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc index 1fce0d75f..15f83615d 100644 --- a/apt-pkg/contrib/hashes.cc +++ b/apt-pkg/contrib/hashes.cc @@ -133,7 +133,7 @@ bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5, bool const addSHA1, bool const addSHA256, bool const addSHA512) { unsigned char Buf[64*64]; - bool const ToEOF = (Size == 0); + bool const ToEOF = (Size == UntilEOF); while (Size != 0 || ToEOF) { unsigned long long n = sizeof(Buf); diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h index 5cd1af03b..7a62f8a8f 100644 --- a/apt-pkg/contrib/hashes.h +++ b/apt-pkg/contrib/hashes.h @@ -78,6 +78,8 @@ class Hashes SHA256Summation SHA256; SHA512Summation SHA512; + static const int UntilEOF = 0; + inline bool Add(const unsigned char *Data,unsigned long long Size) { return MD5.Add(Data,Size) && SHA1.Add(Data,Size) && SHA256.Add(Data,Size) && SHA512.Add(Data,Size); diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index 692f19e25..c1614398b 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -62,6 +62,7 @@ struct PackageMap // Stuff for the Package File string PkgFile; string BinCacheDB; + string SrcCacheDB; string BinOverride; string ExtraOverride; @@ -106,6 +107,12 @@ struct PackageMap inline bool operator() (const PackageMap &x,const PackageMap &y) {return x.BinCacheDB < y.BinCacheDB;}; }; + + struct SrcDBCompare : public binary_function + { + inline bool operator() (const PackageMap &x,const PackageMap &y) + {return x.SrcCacheDB < y.SrcCacheDB;}; + }; void GetGeneral(Configuration &Setup,Configuration &Block); bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats); @@ -232,11 +239,14 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) gettimeofday(&NewTime,0); double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - + c0out << Packages.Stats.Packages << " files " << /* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */ SizeToStr(Packages.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; + + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; Stats.Add(Packages.Stats); Stats.DeLinkBytes = Packages.Stats.DeLinkBytes; @@ -263,7 +273,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) SrcDone = true; // Create a package writer object. - SourcesWriter Sources(_config->Find("APT::FTPArchive::DB"), + SourcesWriter Sources(flCombine(CacheDir, SrcCacheDB), flCombine(OverrideDir,BinOverride), flCombine(OverrideDir,SrcOverride), flCombine(OverrideDir,SrcExtraOverride)); @@ -323,6 +333,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) c0out << Sources.Stats.Packages << " pkgs in " << TimeToStr((long)Delta) << endl; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses << endl; + Stats.Add(Sources.Stats); Stats.DeLinkBytes = Sources.Stats.DeLinkBytes; @@ -435,6 +448,9 @@ bool PackageMap::GenContents(Configuration &Setup, double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Contents.Stats.Misses<< endl; + c0out << Contents.Stats.Packages << " files " << SizeToStr(Contents.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; @@ -465,6 +481,8 @@ static void LoadTree(vector &PkgList,Configuration &Setup) string DContentsH = Setup.Find("TreeDefault::Contents::Header",""); string DBCache = Setup.Find("TreeDefault::BinCacheDB", "packages-$(ARCH).db"); + string SrcDBCache = Setup.Find("TreeDefault::SrcCacheDB", + "sources-$(SECTION).db"); string DSources = Setup.Find("TreeDefault::Sources", "$(DIST)/$(SECTION)/source/Sources"); string DFLFile = Setup.Find("TreeDefault::FileList", ""); @@ -524,6 +542,7 @@ static void LoadTree(vector &PkgList,Configuration &Setup) Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars); Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars); Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars); + Itm.SrcCacheDB = SubstVar(Block.Find("SrcCacheDB",SrcDBCache.c_str()),Vars); } else { @@ -573,6 +592,7 @@ static void LoadBinDir(vector &PkgList,Configuration &Setup) Itm.PkgFile = Block.Find("Packages"); Itm.SrcFile = Block.Find("Sources"); Itm.BinCacheDB = Block.Find("BinCacheDB"); + Itm.SrcCacheDB = Block.Find("SrcCacheDB"); Itm.BinOverride = Block.Find("BinOverride"); Itm.ExtraOverride = Block.Find("ExtraOverride"); Itm.SrcExtraOverride = Block.Find("SrcExtraOverride"); @@ -777,6 +797,7 @@ static bool Generate(CommandLine &CmdL) // Sort by cache DB to improve IO locality. stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); // Generate packages if (CmdL.FileSize() <= 2) @@ -947,8 +968,11 @@ static bool Clean(CommandLine &CmdL) _error->DumpErrors(); string CacheDB = I->BinCacheDB; - for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I); + for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I) + ; } + + // FIXME: clean for the SourcesDB return true; } diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index 523c6b5fa..a63b5b9d9 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -20,6 +20,7 @@ #include #include #include +#include #include // htonl, etc #include @@ -99,13 +100,32 @@ bool CacheDB::OpenFile() Fd = new FileFd(FileName,FileFd::ReadOnly); if (_error->PendingError() == true) { - delete Fd; - Fd = NULL; - return false; + CloseFile(); + return false; } return true; } /*}}}*/ +void CacheDB::CloseFile() +{ + delete Fd; + Fd = NULL; +} + +bool CacheDB::OpenDebFile() +{ + DebFile = new debDebFile(*Fd); + if (_error->PendingError() == true) + return false; + return true; +} + +void CacheDB::CloseDebFile() +{ + delete DebFile; + DebFile = NULL; +} + // CacheDB::GetFileStat - Get stats from the file /*{{{*/ // --------------------------------------------------------------------- /* This gets the size from the database if it's there. If we need @@ -168,56 +188,94 @@ bool CacheDB::GetCurStat() /*}}}*/ // CacheDB::GetFileInfo - Get all the info about the file /*{{{*/ // --------------------------------------------------------------------- -bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents, - bool const &GenContentsOnly, bool const &DoMD5, bool const &DoSHA1, - bool const &DoSHA256, bool const &DoSHA512, +bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl, + bool const &DoContents, + bool const &GenContentsOnly, + bool const &DoSource, + bool const &DoMD5, bool const &DoSHA1, + bool const &DoSHA256, bool const &DoSHA512, bool const &checkMtime) { - this->FileName = FileName; + bool result = true; + this->FileName = FileName; - if (GetCurStat() == false) + if (GetCurStat() == false) { - return false; + return false; } OldStat = CurStat; - if (GetFileStat(checkMtime) == false) - { - delete Fd; - Fd = NULL; - return false; - } + if (GetFileStat(checkMtime) == false) + { + CloseFile(); + return false; + } /* if mtime changed, update CurStat from disk */ if (checkMtime == true && OldStat.mtime != CurStat.mtime) - CurStat.Flags = FlSize; - - Stats.Bytes += CurStat.FileSize; - Stats.Packages++; - - if ((DoControl && LoadControl() == false) - || (DoContents && LoadContents(GenContentsOnly) == false) - || (DoMD5 && GetMD5(false) == false) - || (DoSHA1 && GetSHA1(false) == false) - || (DoSHA256 && GetSHA256(false) == false) - || (DoSHA512 && GetSHA512(false) == false) - ) - { - delete Fd; - Fd = NULL; - delete DebFile; - DebFile = NULL; - return false; - } + CurStat.Flags = FlSize; - delete Fd; - Fd = NULL; - delete DebFile; - DebFile = NULL; + Stats.Bytes += CurStat.FileSize; + Stats.Packages++; - return true; + if ((DoControl && LoadControl() == false) + || (DoContents && LoadContents(GenContentsOnly) == false) + || (DoSource && LoadSource() == false) + || (DoMD5 && GetMD5(false) == false) + || (DoSHA1 && GetSHA1(false) == false) + || (DoSHA256 && GetSHA256(false) == false) + || (DoSHA512 && GetSHA512(false) == false) + ) + { + result = false; + } + + CloseFile(); + CloseDebFile(); + + return result; } /*}}}*/ + +bool CacheDB::LoadSource() +{ + // Try to read the control information out of the DB. + if ((CurStat.Flags & FlSource) == FlSource) + { + // Lookup the control information + InitQuery("cs"); + if (Get() == true && Dsc.TakeDsc(Data.data, Data.size) == true) + return true; + CurStat.Flags &= ~FlSource; + } + + if (Fd == NULL && OpenFile() == false) + { + return false; + } + + // Read the .dsc file + if (Fd == NULL) + { + if(OpenFile() == false) + return false; + } + + Stats.Misses++; + if (Dsc.Read(FileName) == false) + return false; + + if (Dsc.Data == 0) + return _error->Error(_("Failed to read .dsc")); + + // Write back the control information + InitQuery("cs"); + if (Put(Dsc.Data, Dsc.Length) == true) + CurStat.Flags |= FlSource; + + return true; +} + // CacheDB::LoadControl - Load Control information /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -238,11 +296,10 @@ bool CacheDB::LoadControl() return false; } // Create a deb instance to read the archive - if (DebFile == 0) + if (DebFile == NULL) { - DebFile = new debDebFile(*Fd); - if (_error->PendingError() == true) - return false; + if(OpenDebFile() == false) + return false; } Stats.Misses++; @@ -288,8 +345,7 @@ bool CacheDB::LoadContents(bool const &GenOnly) // Create a deb instance to read the archive if (DebFile == 0) { - DebFile = new debDebFile(*Fd); - if (_error->PendingError() == true) + if(OpenDebFile() == false) return false; } diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h index 49b9a0ef5..4e33c8635 100644 --- a/ftparchive/cachedb.h +++ b/ftparchive/cachedb.h @@ -22,9 +22,11 @@ #include #include "contents.h" +#include "sources.h" class FileFd; + class CacheDB { protected: @@ -65,10 +67,16 @@ class CacheDB return true; } bool OpenFile(); + void CloseFile(); + + bool OpenDebFile(); + void CloseDebFile(); + bool GetFileStat(bool const &doStat = false); bool GetCurStat(); bool LoadControl(); bool LoadContents(bool const &GenOnly); + bool LoadSource(); bool GetMD5(bool const &GenOnly); bool GetSHA1(bool const &GenOnly); bool GetSHA256(bool const &GenOnly); @@ -77,7 +85,8 @@ class CacheDB // Stat info stored in the DB, Fixed types since it is written to disk. enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2), FlSize=(1<<3), FlSHA1=(1<<4), FlSHA256=(1<<5), - FlSHA512=(1<<6)}; + FlSHA512=(1<<6), FlSource=(1<<7), + }; struct StatStore { @@ -101,6 +110,8 @@ class CacheDB // Data collection helpers debDebFile::MemControlExtract Control; ContentsExtract Contents; + DscExtract Dsc; + std::string MD5Res; std::string SHA1Res; std::string SHA256Res; @@ -139,8 +150,19 @@ class CacheDB inline unsigned long long GetFileSize(void) {return CurStat.FileSize;} bool SetFile(std::string const &FileName,struct stat St,FileFd *Fd); - bool GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents, bool const &GenContentsOnly, - bool const &DoMD5, bool const &DoSHA1, bool const &DoSHA256, bool const &DoSHA512, bool const &checkMtime = false); + + // terrible old overloaded interface + bool GetFileInfo(std::string const &FileName, + bool const &DoControl, + bool const &DoContents, + bool const &GenContentsOnly, + bool const &DoSource, + bool const &DoMD5, + bool const &DoSHA1, + bool const &DoSHA256, + bool const &DoSHA512, + bool const &checkMtime = false); + bool Finish(); bool Clean(); diff --git a/ftparchive/makefile b/ftparchive/makefile index c53ecff72..d1ffe182a 100644 --- a/ftparchive/makefile +++ b/ftparchive/makefile @@ -12,7 +12,7 @@ PROGRAM=apt-ftparchive SLIBS = -lapt-pkg -lapt-inst $(BDBLIB) $(INTLLIBS) LIB_MAKES = apt-pkg/makefile apt-inst/makefile SOURCE = apt-ftparchive.cc cachedb.cc writer.cc contents.cc override.cc \ - multicompress.cc + multicompress.cc sources.cc include $(PROGRAM_H) else PROGRAM=apt-ftparchive diff --git a/ftparchive/sources.cc b/ftparchive/sources.cc new file mode 100644 index 000000000..d0878a70a --- /dev/null +++ b/ftparchive/sources.cc @@ -0,0 +1,47 @@ +#include +#include + +// for memcpy +#include + +#include +#include + +#include "sources.h" + +bool DscExtract::TakeDsc(const void *newData, unsigned long newSize) +{ + if(newSize > maxSize) + return _error->Error("DSC data is too large %lu!", newSize); + + if (newSize == 0) + { + Length = 0; + return true; + } + memcpy(Data, newData, newSize); + Length = newSize; + + return true; +} + +bool DscExtract::Read(std::string FileName) +{ + FileFd F; + if (OpenMaybeClearSignedFile(FileName, F) == false) + return false; + + unsigned long long const FSize = F.FileSize(); + if(FSize > maxSize) + return _error->Error("DSC file '%s' is too large!",FileName.c_str()); + + if (F.Read(Data, FSize) == false) + return false; + Length = FSize; + + IsClearSigned = (FileName != F.Name()); + + return true; +} + + diff --git a/ftparchive/sources.h b/ftparchive/sources.h new file mode 100644 index 000000000..91e0b1376 --- /dev/null +++ b/ftparchive/sources.h @@ -0,0 +1,32 @@ +#ifndef SOURCES_H +#define SOURCES_H + +#include + +class DscExtract +{ + public: + //FIXME: do we really need to enforce a maximum size of the dsc file? + static const int maxSize = 128*1024; + + char *Data; + pkgTagSection Section; + unsigned long Length; + bool IsClearSigned; + + bool TakeDsc(const void *Data, unsigned long Size); + bool Read(std::string FileName); + + DscExtract() : Data(0), Length(0) { + Data = new char[maxSize]; + }; + ~DscExtract() { + if(Data != NULL) { + delete [] Data; + Data = NULL; + } + }; +}; + + +#endif diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 153c4fb42..7c1c9cc03 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -385,10 +385,14 @@ bool FTWScanner::SetExts(string const &Vals) bool PackagesWriter::DoPackage(string FileName) { // Pull all the data we need form the DB - if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) - == false) + if (Db.GetFileInfo(FileName, + true, /* DoControl */ + DoContents, + true, /* GenContentsOnly */ + false, /* DoSource */ + DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) == false) { - return false; + return false; } unsigned long long FileSize = Db.GetFileSize(); @@ -614,59 +618,36 @@ SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string c /* */ bool SourcesWriter::DoPackage(string FileName) { - // Open the archive - FileFd F; - if (OpenMaybeClearSignedFile(FileName, F) == false) - return false; - - unsigned long long const FSize = F.FileSize(); - //FIXME: do we really need to enforce a maximum size of the dsc file? - if (FSize > 128*1024) - return _error->Error("DSC file '%s' is too large!",FileName.c_str()); - - if (BufSize < FSize + 2) + // Pull all the data we need form the DB + if (Db.GetFileInfo(FileName, + false, /* DoControl */ + false, /* DoContents */ + false, /* GenContentsOnly */ + true, /* DoSource */ + DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) == false) { - BufSize = FSize + 2; - Buffer = (char *)realloc(Buffer , BufSize); - } - - if (F.Read(Buffer, FSize) == false) return false; + } - // Stat the file for later (F might be clearsigned, so not F.FileSize()) - struct stat St; - if (stat(FileName.c_str(), &St) != 0) - return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); + // we need to perform a "write" here (this is what finish is doing) + // because the call to Db.GetFileInfo() in the loop will change + // the "db cursor" + Db.Finish(); - // Hash the file - char *Start = Buffer; - char *BlkEnd = Buffer + FSize; - - Hashes DscHashes; - if (FSize == (unsigned long long) St.st_size) - { - if (DoMD5 == true) - DscHashes.MD5.Add((unsigned char *)Start,BlkEnd - Start); - if (DoSHA1 == true) - DscHashes.SHA1.Add((unsigned char *)Start,BlkEnd - Start); - if (DoSHA256 == true) - DscHashes.SHA256.Add((unsigned char *)Start,BlkEnd - Start); - if (DoSHA512 == true) - DscHashes.SHA512.Add((unsigned char *)Start,BlkEnd - Start); - } - else - { - FileFd DscFile(FileName, FileFd::ReadOnly); - DscHashes.AddFD(DscFile, St.st_size, DoMD5, DoSHA1, DoSHA256, DoSHA512); - } + // read stuff + char *Start = Db.Dsc.Data; + char *BlkEnd = Db.Dsc.Data + Db.Dsc.Length; // Add extra \n to the end, just in case (as in clearsigned they are missing) *BlkEnd++ = '\n'; *BlkEnd++ = '\n'; pkgTagSection Tags; - if (Tags.Scan(Start,BlkEnd - Start) == false || Tags.Exists("Source") == false) + if (Tags.Scan(Start,BlkEnd - Start) == false) return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str()); + + if (Tags.Exists("Source") == false) + return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str()); Tags.Trim(); // Lookup the overide information, finding first the best priority. @@ -714,6 +695,10 @@ bool SourcesWriter::DoPackage(string FileName) OverItem = auto_ptr(new Override::Item); } + struct stat St; + if (stat(FileName.c_str(), &St) != 0) + return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); + auto_ptr SOverItem(SOver.GetItem(Tags.FindS("Source"))); // const auto_ptr autoSOverItem(SOverItem); if (SOverItem.get() == 0) @@ -732,23 +717,23 @@ bool SourcesWriter::DoPackage(string FileName) string const strippedName = flNotDir(FileName); std::ostringstream ostreamFiles; if (DoMD5 == true && Tags.Exists("Files")) - ostreamFiles << "\n " << string(DscHashes.MD5.Result()) << " " << St.st_size << " " + ostreamFiles << "\n " << Db.MD5Res.c_str() << " " << St.st_size << " " << strippedName << "\n " << Tags.FindS("Files"); string const Files = ostreamFiles.str(); std::ostringstream ostreamSha1; if (DoSHA1 == true && Tags.Exists("Checksums-Sha1")) - ostreamSha1 << "\n " << string(DscHashes.SHA1.Result()) << " " << St.st_size << " " + ostreamSha1 << "\n " << string(Db.SHA1Res.c_str()) << " " << St.st_size << " " << strippedName << "\n " << Tags.FindS("Checksums-Sha1"); std::ostringstream ostreamSha256; if (DoSHA256 == true && Tags.Exists("Checksums-Sha256")) - ostreamSha256 << "\n " << string(DscHashes.SHA256.Result()) << " " << St.st_size << " " + ostreamSha256 << "\n " << string(Db.SHA256Res.c_str()) << " " << St.st_size << " " << strippedName << "\n " << Tags.FindS("Checksums-Sha256"); std::ostringstream ostreamSha512; if (DoSHA512 == true && Tags.Exists("Checksums-Sha512")) - ostreamSha512 << "\n " << string(DscHashes.SHA512.Result()) << " " << St.st_size << " " + ostreamSha512 << "\n " << string(Db.SHA512Res.c_str()) << " " << St.st_size << " " << strippedName << "\n " << Tags.FindS("Checksums-Sha512"); // Strip the DirStrip prefix from the FileName and add the PathPrefix @@ -785,8 +770,13 @@ bool SourcesWriter::DoPackage(string FileName) (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) || (DoSHA512 == true && !Tags.Exists("Checksums-Sha512"))) { - if (Db.GetFileInfo(OriginalPath, false, false, false, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) - == false) + if (Db.GetFileInfo(OriginalPath, + false, /* DoControl */ + false, /* DoContents */ + false, /* GenContentsOnly */ + false, /* DoSource */ + DoMD5, DoSHA1, DoSHA256, DoSHA512, + DoAlwaysStat) == false) { return _error->Error("Error getting file info"); } @@ -802,6 +792,9 @@ bool SourcesWriter::DoPackage(string FileName) if (DoSHA512 == true && !Tags.Exists("Checksums-Sha512")) ostreamSha512 << "\n " << string(Db.SHA512Res) << " " << Db.GetFileSize() << " " << ParseJnk; + + // write back the GetFileInfo() stats data + Db.Finish(); } // Perform the delinking operation @@ -872,7 +865,7 @@ bool SourcesWriter::DoPackage(string FileName) Stats.Packages++; - return Db.Finish(); + return true; } /*}}}*/ @@ -893,7 +886,15 @@ ContentsWriter::ContentsWriter(string const &DB, string const &Arch) : determine what the package name is. */ bool ContentsWriter::DoPackage(string FileName, string Package) { - if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false)) + if (!Db.GetFileInfo(FileName, + Package.empty(), /* DoControl */ + true, /* DoContents */ + false, /* GenContentsOnly */ + false, /* DoSource */ + false, /* DoMD5 */ + false, /* DoSHA1 */ + false, /* DoSHA256 */ + false)) /* DoSHA512 */ { return false; } diff --git a/ftparchive/writer.h b/ftparchive/writer.h index 86884dcfc..b1a653e7d 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -174,7 +174,7 @@ class SourcesWriter : public FTWScanner string PathPrefix; string DirStrip; FILE *Output; - struct CacheDB::Stats Stats; + struct CacheDB::Stats &Stats; virtual bool DoPackage(string FileName); diff --git a/test/integration/framework b/test/integration/framework index 1c6f041b0..fae21eac4 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -128,7 +128,10 @@ dpkgcheckbuilddeps() { } gdb() { echo "gdb: run »$*«" - APT_CONFIG=aptconfig.conf LD_LIBRARY_PATH=${LIBRARYPATH} command gdb ${BUILDDIRECTORY}/$1 --args "$@" + CMD="$1" + shift + + APT_CONFIG=aptconfig.conf LD_LIBRARY_PATH=${LIBRARYPATH} command gdb ${BUILDDIRECTORY}/$CMD --args ${BUILDDIRECTORY}/$CMD "$@" } gpg() { # see apt-key for the whole trickery. Setup is done in setupenvironment diff --git a/test/integration/test-apt-ftparchive b/test/integration/test-apt-ftparchive new file mode 100755 index 000000000..b05c15c47 --- /dev/null +++ b/test/integration/test-apt-ftparchive @@ -0,0 +1,164 @@ +#!/bin/sh +set -e + +assert_correct_sources_file() { + testequal "Package: bar +Binary: bar +Version: 1.0 +Architecture: all +Format: 3.0 (native) +Directory: pool/main +Files: + 7b57dd065e51de5905288a5104d4bef5 406 bar_1.0.dsc + d41d8cd98f00b204e9800998ecf8427e 0 bar_1.0.tar.gz +Package-List: + bar deb admin extra +Checksums-Sha1: + 17a40b76715f393ab7fd6485c9392a02f1adf903 406 bar_1.0.dsc + da39a3ee5e6b4b0d3255bfef95601890afd80709 0 bar_1.0.tar.gz +Checksums-Sha256: + d9d7507f66a89258b6920aca47747d7a30e0e64b09ecabbf02b2efbdabf840a9 406 bar_1.0.dsc + e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 bar_1.0.tar.gz +Checksums-Sha512: + ee0a9bfb6614159b45203fc29487d4f37387993ca0e6d6f27b80010498f3731d75753188ece307508ae9af0259bd11a6af15a1a38f0b87dbd5ea1273b7a7d53e 406 bar_1.0.dsc + cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e 0 bar_1.0.tar.gz + +Package: foo +Binary: foo +Version: 1.0 +Architecture: all +Format: 3.0 (native) +Directory: pool/main +Files: + d144826e6f02831c1933e910c92cd7e0 171 foo_1.0.dsc + d41d8cd98f00b204e9800998ecf8427e 0 foo_1.0.tar.gz +Package-List: + foo deb admin extra +Checksums-Sha1: + 979306aa3ccff3d61bba062bb6977e2493c6f907 171 foo_1.0.dsc + da39a3ee5e6b4b0d3255bfef95601890afd80709 0 foo_1.0.tar.gz +Checksums-Sha256: + 8c780af8b5a6d5b3c2e2f9518940beebea52ac6d6ad7b52c082dc925cfe5b532 171 foo_1.0.dsc + e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 foo_1.0.tar.gz +Checksums-Sha512: + 3da0240fd764657c2f3661b4d750578a9a99b0580591b133756379d48117ebda87a5ed2467f513200d6e7eaf51422cbe91c15720eef7fb4bba2cc8ff81ebc547 171 foo_1.0.dsc + cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e 0 foo_1.0.tar.gz +" cat ./aptarchive/dists/test/main/source/Sources +} + +create_source_files() { + NAME="$1" + REQUEST_CLEARSIGN="$2" + + TARFILE="aptarchive/pool/main/${NAME}_1.0.tar.gz" + DSC_FILE="aptarchive/pool/main/${NAME}_1.0.dsc" + touch $TARFILE + if [ "$REQUEST_CLEARSIGN" = "CLEARSIGN" ]; then + printf -- "-----BEGIN PGP SIGNED MESSAGE-----\n\n" > $DSC_FILE + fi + cat >> $DSC_FILE << EOF +Format: 3.0 (native) +Source: $NAME +Binary: $NAME +Architecture: all +Version: 1.0 +Package-List: + $NAME deb admin extra +Files: + $(md5sum $TARFILE|cut -f1 -d' ') $(stat --print="%s" $TARFILE) ${NAME}_1.0.tar.gz +EOF + if [ "$REQUEST_CLEARSIGN" = "CLEARSIGN" ]; then + cat >> $DSC_FILE < apt-ftparchive.conf <<"EOF" +Dir { + ArchiveDir "./aptarchive"; + OverrideDir "./aptarchive-overrides"; + CacheDir "./aptarchive-cache"; +}; + +Default { + Packages::Compress ". gzip bzip2"; + Contents::Compress ". gzip bzip2"; + LongDescription "false"; +}; + +TreeDefault { + BinCacheDB "packages-$(SECTION)-$(ARCH).db"; + SrcCacheDB "sources-$(SECTION).db"; + + Directory "pool/$(SECTION)"; + SrcDirectory "pool/$(SECTION)"; + + Packages "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages"; + Sources "$(DIST)/$(SECTION)/source/Sources"; + Contents "$(DIST)/Contents-$(ARCH)"; +}; + +Tree "dists/test" { + Sections "main"; + Architectures "source"; + +}; +EOF + + +# generate (no cachedb) +aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt +testequal " Misses in Cache: 2" grep Misses stats-out.txt +assert_correct_sources_file + + +# generate again out of the cache +rm -f ./aptarchive/dists/test/main/source/Sources +aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt +testequal " Misses in Cache: 0" grep Misses stats-out.txt +assert_correct_sources_file + +# generate invalid files +mkdir aptarchive/pool/invalid +printf "meep" > aptarchive/pool/invalid/invalid_1.0.dsc +testequal " +E: Could not find a Source entry in the DSC 'aptarchive/pool/invalid/invalid_1.0.dsc'" aptftparchive sources aptarchive/pool/invalid +rm -f aptarchive/pool/invalid/invalid_1.0.dsc + +dd if=/dev/zero of="aptarchive/pool/invalid/toobig_1.0.dsc" bs=1k count=129 2>/dev/null +testequal " +E: DSC file 'aptarchive/pool/invalid/toobig_1.0.dsc' is too large!" aptftparchive sources aptarchive/pool/invalid + + -- cgit v1.2.3 From 0a3b93fc3da95c5cbeb18b2d92738cbd50e95d83 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Fri, 4 Apr 2014 15:36:42 +0200 Subject: add test for binary cachedb and contents generation --- ftparchive/cachedb.cc | 1 + test/integration/test-apt-ftparchive | 164 ----------------------- test/integration/test-apt-ftparchive-cachedb | 93 +++++++++++++ test/integration/test-apt-ftparchive-src-cachedb | 162 ++++++++++++++++++++++ 4 files changed, 256 insertions(+), 164 deletions(-) delete mode 100755 test/integration/test-apt-ftparchive create mode 100755 test/integration/test-apt-ftparchive-cachedb create mode 100755 test/integration/test-apt-ftparchive-src-cachedb diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index a63b5b9d9..f6cbad668 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -349,6 +349,7 @@ bool CacheDB::LoadContents(bool const &GenOnly) return false; } + Stats.Misses++; if (Contents.Read(*DebFile) == false) return false; diff --git a/test/integration/test-apt-ftparchive b/test/integration/test-apt-ftparchive deleted file mode 100755 index b05c15c47..000000000 --- a/test/integration/test-apt-ftparchive +++ /dev/null @@ -1,164 +0,0 @@ -#!/bin/sh -set -e - -assert_correct_sources_file() { - testequal "Package: bar -Binary: bar -Version: 1.0 -Architecture: all -Format: 3.0 (native) -Directory: pool/main -Files: - 7b57dd065e51de5905288a5104d4bef5 406 bar_1.0.dsc - d41d8cd98f00b204e9800998ecf8427e 0 bar_1.0.tar.gz -Package-List: - bar deb admin extra -Checksums-Sha1: - 17a40b76715f393ab7fd6485c9392a02f1adf903 406 bar_1.0.dsc - da39a3ee5e6b4b0d3255bfef95601890afd80709 0 bar_1.0.tar.gz -Checksums-Sha256: - d9d7507f66a89258b6920aca47747d7a30e0e64b09ecabbf02b2efbdabf840a9 406 bar_1.0.dsc - e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 bar_1.0.tar.gz -Checksums-Sha512: - ee0a9bfb6614159b45203fc29487d4f37387993ca0e6d6f27b80010498f3731d75753188ece307508ae9af0259bd11a6af15a1a38f0b87dbd5ea1273b7a7d53e 406 bar_1.0.dsc - cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e 0 bar_1.0.tar.gz - -Package: foo -Binary: foo -Version: 1.0 -Architecture: all -Format: 3.0 (native) -Directory: pool/main -Files: - d144826e6f02831c1933e910c92cd7e0 171 foo_1.0.dsc - d41d8cd98f00b204e9800998ecf8427e 0 foo_1.0.tar.gz -Package-List: - foo deb admin extra -Checksums-Sha1: - 979306aa3ccff3d61bba062bb6977e2493c6f907 171 foo_1.0.dsc - da39a3ee5e6b4b0d3255bfef95601890afd80709 0 foo_1.0.tar.gz -Checksums-Sha256: - 8c780af8b5a6d5b3c2e2f9518940beebea52ac6d6ad7b52c082dc925cfe5b532 171 foo_1.0.dsc - e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 foo_1.0.tar.gz -Checksums-Sha512: - 3da0240fd764657c2f3661b4d750578a9a99b0580591b133756379d48117ebda87a5ed2467f513200d6e7eaf51422cbe91c15720eef7fb4bba2cc8ff81ebc547 171 foo_1.0.dsc - cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e 0 foo_1.0.tar.gz -" cat ./aptarchive/dists/test/main/source/Sources -} - -create_source_files() { - NAME="$1" - REQUEST_CLEARSIGN="$2" - - TARFILE="aptarchive/pool/main/${NAME}_1.0.tar.gz" - DSC_FILE="aptarchive/pool/main/${NAME}_1.0.dsc" - touch $TARFILE - if [ "$REQUEST_CLEARSIGN" = "CLEARSIGN" ]; then - printf -- "-----BEGIN PGP SIGNED MESSAGE-----\n\n" > $DSC_FILE - fi - cat >> $DSC_FILE << EOF -Format: 3.0 (native) -Source: $NAME -Binary: $NAME -Architecture: all -Version: 1.0 -Package-List: - $NAME deb admin extra -Files: - $(md5sum $TARFILE|cut -f1 -d' ') $(stat --print="%s" $TARFILE) ${NAME}_1.0.tar.gz -EOF - if [ "$REQUEST_CLEARSIGN" = "CLEARSIGN" ]; then - cat >> $DSC_FILE < apt-ftparchive.conf <<"EOF" -Dir { - ArchiveDir "./aptarchive"; - OverrideDir "./aptarchive-overrides"; - CacheDir "./aptarchive-cache"; -}; - -Default { - Packages::Compress ". gzip bzip2"; - Contents::Compress ". gzip bzip2"; - LongDescription "false"; -}; - -TreeDefault { - BinCacheDB "packages-$(SECTION)-$(ARCH).db"; - SrcCacheDB "sources-$(SECTION).db"; - - Directory "pool/$(SECTION)"; - SrcDirectory "pool/$(SECTION)"; - - Packages "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages"; - Sources "$(DIST)/$(SECTION)/source/Sources"; - Contents "$(DIST)/Contents-$(ARCH)"; -}; - -Tree "dists/test" { - Sections "main"; - Architectures "source"; - -}; -EOF - - -# generate (no cachedb) -aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt -testequal " Misses in Cache: 2" grep Misses stats-out.txt -assert_correct_sources_file - - -# generate again out of the cache -rm -f ./aptarchive/dists/test/main/source/Sources -aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt -testequal " Misses in Cache: 0" grep Misses stats-out.txt -assert_correct_sources_file - -# generate invalid files -mkdir aptarchive/pool/invalid -printf "meep" > aptarchive/pool/invalid/invalid_1.0.dsc -testequal " -E: Could not find a Source entry in the DSC 'aptarchive/pool/invalid/invalid_1.0.dsc'" aptftparchive sources aptarchive/pool/invalid -rm -f aptarchive/pool/invalid/invalid_1.0.dsc - -dd if=/dev/zero of="aptarchive/pool/invalid/toobig_1.0.dsc" bs=1k count=129 2>/dev/null -testequal " -E: DSC file 'aptarchive/pool/invalid/toobig_1.0.dsc' is too large!" aptftparchive sources aptarchive/pool/invalid - - diff --git a/test/integration/test-apt-ftparchive-cachedb b/test/integration/test-apt-ftparchive-cachedb new file mode 100755 index 000000000..2a3bfce99 --- /dev/null +++ b/test/integration/test-apt-ftparchive-cachedb @@ -0,0 +1,93 @@ +#!/bin/sh +set -e + +ensure_correct_packages_file() { + testequal "Package: foo +Priority: optional +Section: others +Installed-Size: 29 +Maintainer: Joe Sixpack +Architecture: i386 +Version: 1 +Filename: pool/main/foo_1_i386.deb" head -n8 ./aptarchive/dists/test/main/binary-i386/Packages +} + +ensure_correct_contents_file() { + testequal "usr/bin/foo-i386 others/foo +usr/share/doc/foo/FEATURES others/foo +usr/share/doc/foo/changelog others/foo +usr/share/doc/foo/copyright others/foo" cat ./aptarchive/dists/test/Contents-i386 +} + +# +# main() +# +TESTDIR=$(readlink -f $(dirname $0)) +. $TESTDIR/framework +setupenvironment +configarchitecture "i386" + +mkdir -p aptarchive/dists/test/main/i18n/ +mkdir -p aptarchive/dists/test/main/source/ +mkdir -p aptarchive/dists/test/main/binary-i386 +mkdir -p aptarchive/pool/main + +mkdir aptarchive-overrides +mkdir aptarchive-cache +cat > ftparchive.conf <<"EOF" +Dir { + ArchiveDir "./aptarchive"; + OverrideDir "./aptarchive-overrides"; + CacheDir "./aptarchive-cache"; +}; + +Default { + Packages::Compress ". gzip bzip2"; + Contents::Compress ". gzip bzip2"; + LongDescription "false"; +}; + +TreeDefault { + BinCacheDB "packages-$(SECTION)-$(ARCH).db"; + + Directory "pool/$(SECTION)"; + SrcDirectory "pool/$(SECTION)"; + + Packages "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages"; + Contents "$(DIST)/Contents-$(ARCH)"; +}; + +Tree "dists/test" { + Sections "main"; + Architectures "i386"; + +}; +EOF + +# build one pacakge +buildsimplenativepackage 'foo' 'i386' '1' 'test' +mv incoming/* aptarchive/pool/main/ + +# generate (empty cachedb) +aptftparchive generate ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt +ensure_correct_packages_file +ensure_correct_contents_file +testequal " Misses in Cache: 2 + dists/test/Contents-i386: New 402 B Misses in Cache: 0" grep Misses stats-out.txt + +# generate again +aptftparchive generate ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt +ensure_correct_packages_file +ensure_correct_contents_file +testequal " Misses in Cache: 0 + dists/test/Contents-i386: Misses in Cache: 0" grep Misses stats-out.txt + +# and again (with removing the Packages file) +rm -f ./aptarchive/dists/test/main/binary-i386/* +rm -f ./aptarchive/dists/test/Contents-i386 +aptftparchive generate ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt +ensure_correct_packages_file +ensure_correct_contents_file +testequal " Misses in Cache: 0 + dists/test/Contents-i386: New 402 B Misses in Cache: 0" grep Misses stats-out.txt + diff --git a/test/integration/test-apt-ftparchive-src-cachedb b/test/integration/test-apt-ftparchive-src-cachedb new file mode 100755 index 000000000..3a5507c21 --- /dev/null +++ b/test/integration/test-apt-ftparchive-src-cachedb @@ -0,0 +1,162 @@ +#!/bin/sh +set -e + +assert_correct_sources_file() { + testequal "Package: bar +Binary: bar +Version: 1.0 +Architecture: all +Format: 3.0 (native) +Directory: pool/main +Files: + 7b57dd065e51de5905288a5104d4bef5 406 bar_1.0.dsc + d41d8cd98f00b204e9800998ecf8427e 0 bar_1.0.tar.gz +Package-List: + bar deb admin extra +Checksums-Sha1: + 17a40b76715f393ab7fd6485c9392a02f1adf903 406 bar_1.0.dsc + da39a3ee5e6b4b0d3255bfef95601890afd80709 0 bar_1.0.tar.gz +Checksums-Sha256: + d9d7507f66a89258b6920aca47747d7a30e0e64b09ecabbf02b2efbdabf840a9 406 bar_1.0.dsc + e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 bar_1.0.tar.gz +Checksums-Sha512: + ee0a9bfb6614159b45203fc29487d4f37387993ca0e6d6f27b80010498f3731d75753188ece307508ae9af0259bd11a6af15a1a38f0b87dbd5ea1273b7a7d53e 406 bar_1.0.dsc + cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e 0 bar_1.0.tar.gz + +Package: foo +Binary: foo +Version: 1.0 +Architecture: all +Format: 3.0 (native) +Directory: pool/main +Files: + d144826e6f02831c1933e910c92cd7e0 171 foo_1.0.dsc + d41d8cd98f00b204e9800998ecf8427e 0 foo_1.0.tar.gz +Package-List: + foo deb admin extra +Checksums-Sha1: + 979306aa3ccff3d61bba062bb6977e2493c6f907 171 foo_1.0.dsc + da39a3ee5e6b4b0d3255bfef95601890afd80709 0 foo_1.0.tar.gz +Checksums-Sha256: + 8c780af8b5a6d5b3c2e2f9518940beebea52ac6d6ad7b52c082dc925cfe5b532 171 foo_1.0.dsc + e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 0 foo_1.0.tar.gz +Checksums-Sha512: + 3da0240fd764657c2f3661b4d750578a9a99b0580591b133756379d48117ebda87a5ed2467f513200d6e7eaf51422cbe91c15720eef7fb4bba2cc8ff81ebc547 171 foo_1.0.dsc + cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e 0 foo_1.0.tar.gz +" cat ./aptarchive/dists/test/main/source/Sources +} + +create_source_files() { + NAME="$1" + REQUEST_CLEARSIGN="$2" + + TARFILE="aptarchive/pool/main/${NAME}_1.0.tar.gz" + DSC_FILE="aptarchive/pool/main/${NAME}_1.0.dsc" + touch $TARFILE + if [ "$REQUEST_CLEARSIGN" = "CLEARSIGN" ]; then + printf -- "-----BEGIN PGP SIGNED MESSAGE-----\n\n" > $DSC_FILE + fi + cat >> $DSC_FILE << EOF +Format: 3.0 (native) +Source: $NAME +Binary: $NAME +Architecture: all +Version: 1.0 +Package-List: + $NAME deb admin extra +Files: + $(md5sum $TARFILE|cut -f1 -d' ') $(stat --print="%s" $TARFILE) ${NAME}_1.0.tar.gz +EOF + if [ "$REQUEST_CLEARSIGN" = "CLEARSIGN" ]; then + cat >> $DSC_FILE < apt-ftparchive.conf <<"EOF" +Dir { + ArchiveDir "./aptarchive"; + OverrideDir "./aptarchive-overrides"; + CacheDir "./aptarchive-cache"; +}; + +Default { + Packages::Compress ". gzip bzip2"; + Contents::Compress ". gzip bzip2"; + LongDescription "false"; +}; + +TreeDefault { + BinCacheDB "packages-$(SECTION)-$(ARCH).db"; + SrcCacheDB "sources-$(SECTION).db"; + + Directory "pool/$(SECTION)"; + SrcDirectory "pool/$(SECTION)"; + + Sources "$(DIST)/$(SECTION)/source/Sources"; +}; + +Tree "dists/test" { + Sections "main"; + Architectures "source"; + +}; +EOF + + +# generate (empty cachedb) +aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt +testequal " Misses in Cache: 2" grep Misses stats-out.txt +assert_correct_sources_file + + +# generate again out of the cache +rm -f ./aptarchive/dists/test/main/source/Sources +aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt +testequal " Misses in Cache: 0" grep Misses stats-out.txt +assert_correct_sources_file + +# generate invalid files +mkdir aptarchive/pool/invalid +printf "meep" > aptarchive/pool/invalid/invalid_1.0.dsc +testequal " +E: Could not find a Source entry in the DSC 'aptarchive/pool/invalid/invalid_1.0.dsc'" aptftparchive sources aptarchive/pool/invalid +rm -f aptarchive/pool/invalid/invalid_1.0.dsc + +dd if=/dev/zero of="aptarchive/pool/invalid/toobig_1.0.dsc" bs=1k count=129 2>/dev/null +testequal " +E: DSC file 'aptarchive/pool/invalid/toobig_1.0.dsc' is too large!" aptftparchive sources aptarchive/pool/invalid + + -- cgit v1.2.3 From 215b0faf7b00f836e54f9903c4fe7398c0927e0f Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Fri, 4 Apr 2014 16:15:26 +0200 Subject: refactor to make OpenFile/OpenDebFile more robust --- ftparchive/cachedb.cc | 230 ++++++++++++++++++++++++-------------------------- 1 file changed, 110 insertions(+), 120 deletions(-) diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index f6cbad668..539ed671b 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -97,64 +97,92 @@ bool CacheDB::ReadyDB(std::string const &DB) /* */ bool CacheDB::OpenFile() { - Fd = new FileFd(FileName,FileFd::ReadOnly); - if (_error->PendingError() == true) - { - CloseFile(); - return false; - } - return true; + // its open already + if(Fd && Fd->Name() == this->FileName) + return true; + + // a different file is open, close it first + if(Fd && Fd->Name() != this->FileName) + CloseFile(); + + // open a new file + Fd = new FileFd(FileName,FileFd::ReadOnly); + if (_error->PendingError() == true) + { + CloseFile(); + return false; + } + return true; } /*}}}*/ +// CacheDB::CloseFile - Close the file /*{{{*/ void CacheDB::CloseFile() { - delete Fd; - Fd = NULL; + if(Fd != NULL) + { + delete Fd; + Fd = NULL; + } } - + /*}}}*/ +// CacheDB::OpenDebFile - Open a debfile /*{{{*/ bool CacheDB::OpenDebFile() { + // debfile is already open + if(DebFile && &DebFile->GetFile() == Fd) + return true; + + // a different debfile is open, close it first + if(DebFile && &DebFile->GetFile() != Fd) + CloseDebFile(); + + // first open the fd, then pass it to the debDebFile + if(OpenFile() == false) + return false; DebFile = new debDebFile(*Fd); if (_error->PendingError() == true) return false; return true; } - + /*}}}*/ +// CacheDB::CloseDebFile - Close a debfile again /*{{{*/ void CacheDB::CloseDebFile() { - delete DebFile; - DebFile = NULL; -} + CloseFile(); + if(DebFile != NULL) + { + delete DebFile; + DebFile = NULL; + } +} + /*}}}*/ // CacheDB::GetFileStat - Get stats from the file /*{{{*/ // --------------------------------------------------------------------- /* This gets the size from the database if it's there. If we need * to look at the file, also get the mtime from the file. */ bool CacheDB::GetFileStat(bool const &doStat) { - if ((CurStat.Flags & FlSize) == FlSize && doStat == false) - { - /* Already worked out the file size */ - } - else - { - /* Get it from the file. */ - if (Fd == NULL && OpenFile() == false) - { - return false; - } - // Stat the file - struct stat St; - if (fstat(Fd->Fd(),&St) != 0) - { - return _error->Errno("fstat", - _("Failed to stat %s"),FileName.c_str()); - } - CurStat.FileSize = St.st_size; - CurStat.mtime = htonl(St.st_mtime); - CurStat.Flags |= FlSize; - } - return true; + if ((CurStat.Flags & FlSize) == FlSize && doStat == false) + return true; + + /* Get it from the file. */ + if (OpenFile() == false) + return false; + + // Stat the file + struct stat St; + if (fstat(Fd->Fd(),&St) != 0) + { + CloseFile(); + return _error->Errno("fstat", + _("Failed to stat %s"),FileName.c_str()); + } + CurStat.FileSize = St.st_size; + CurStat.mtime = htonl(St.st_mtime); + CurStat.Flags |= FlSize; + + return true; } /*}}}*/ // CacheDB::GetCurStat - Set the CurStat variable. /*{{{*/ @@ -165,25 +193,25 @@ bool CacheDB::GetCurStat() { memset(&CurStat,0,sizeof(CurStat)); - if (DBLoaded) - { - /* First see if there is anything about it - in the database */ - - /* Get the flags (and mtime) */ - InitQuery("st"); - // Ensure alignment of the returned structure - Data.data = &CurStat; - Data.ulen = sizeof(CurStat); - Data.flags = DB_DBT_USERMEM; - if (Get() == false) + if (DBLoaded) + { + /* First see if there is anything about it + in the database */ + + /* Get the flags (and mtime) */ + InitQuery("st"); + // Ensure alignment of the returned structure + Data.data = &CurStat; + Data.ulen = sizeof(CurStat); + Data.flags = DB_DBT_USERMEM; + if (Get() == false) { CurStat.Flags = 0; } - CurStat.Flags = ntohl(CurStat.Flags); - CurStat.FileSize = ntohl(CurStat.FileSize); + CurStat.Flags = ntohl(CurStat.Flags); + CurStat.FileSize = ntohl(CurStat.FileSize); } - return true; + return true; } /*}}}*/ // CacheDB::GetFileInfo - Get all the info about the file /*{{{*/ @@ -200,40 +228,31 @@ bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl, this->FileName = FileName; if (GetCurStat() == false) - { return false; - } OldStat = CurStat; if (GetFileStat(checkMtime) == false) - { - CloseFile(); return false; - } - /* if mtime changed, update CurStat from disk */ - if (checkMtime == true && OldStat.mtime != CurStat.mtime) - CurStat.Flags = FlSize; - - Stats.Bytes += CurStat.FileSize; - Stats.Packages++; - - if ((DoControl && LoadControl() == false) - || (DoContents && LoadContents(GenContentsOnly) == false) - || (DoSource && LoadSource() == false) - || (DoMD5 && GetMD5(false) == false) - || (DoSHA1 && GetSHA1(false) == false) - || (DoSHA256 && GetSHA256(false) == false) - || (DoSHA512 && GetSHA512(false) == false) - ) - { - result = false; - } - - CloseFile(); - CloseDebFile(); + /* if mtime changed, update CurStat from disk */ + if (checkMtime == true && OldStat.mtime != CurStat.mtime) + CurStat.Flags = FlSize; + + Stats.Bytes += CurStat.FileSize; + Stats.Packages++; + + if ((DoControl && LoadControl() == false) + || (DoContents && LoadContents(GenContentsOnly) == false) + || (DoSource && LoadSource() == false) + || (DoMD5 && GetMD5(false) == false) + || (DoSHA1 && GetSHA1(false) == false) + || (DoSHA256 && GetSHA256(false) == false) + || (DoSHA512 && GetSHA512(false) == false) ) + { + result = false; + } - return result; + return result; } /*}}}*/ @@ -249,18 +268,9 @@ bool CacheDB::LoadSource() CurStat.Flags &= ~FlSource; } - if (Fd == NULL && OpenFile() == false) - { + if (OpenFile() == false) return false; - } - // Read the .dsc file - if (Fd == NULL) - { - if(OpenFile() == false) - return false; - } - Stats.Misses++; if (Dsc.Read(FileName) == false) return false; @@ -291,16 +301,8 @@ bool CacheDB::LoadControl() CurStat.Flags &= ~FlControl; } - if (Fd == NULL && OpenFile() == false) - { + if(OpenDebFile() == false) return false; - } - // Create a deb instance to read the archive - if (DebFile == NULL) - { - if(OpenDebFile() == false) - return false; - } Stats.Misses++; if (Control.Read(*DebFile) == false) @@ -338,16 +340,8 @@ bool CacheDB::LoadContents(bool const &GenOnly) CurStat.Flags &= ~FlContents; } - if (Fd == NULL && OpenFile() == false) - { + if(OpenDebFile() == false) return false; - } - // Create a deb instance to read the archive - if (DebFile == 0) - { - if(OpenDebFile() == false) - return false; - } Stats.Misses++; if (Contents.Read(*DebFile) == false) @@ -404,14 +398,13 @@ bool CacheDB::GetMD5(bool const &GenOnly) MD5Res = bytes2hex(CurStat.MD5, sizeof(CurStat.MD5)); return true; - } + } Stats.MD5Bytes += CurStat.FileSize; - if (Fd == NULL && OpenFile() == false) - { + if (OpenFile() == false) return false; - } + MD5Summation MD5; if (Fd->Seek(0) == false || MD5.AddFD(*Fd, CurStat.FileSize) == false) return false; @@ -439,10 +432,9 @@ bool CacheDB::GetSHA1(bool const &GenOnly) Stats.SHA1Bytes += CurStat.FileSize; - if (Fd == NULL && OpenFile() == false) - { + if (OpenFile() == false) return false; - } + SHA1Summation SHA1; if (Fd->Seek(0) == false || SHA1.AddFD(*Fd, CurStat.FileSize) == false) return false; @@ -470,10 +462,9 @@ bool CacheDB::GetSHA256(bool const &GenOnly) Stats.SHA256Bytes += CurStat.FileSize; - if (Fd == NULL && OpenFile() == false) - { + if (OpenFile() == false) return false; - } + SHA256Summation SHA256; if (Fd->Seek(0) == false || SHA256.AddFD(*Fd, CurStat.FileSize) == false) return false; @@ -501,10 +492,9 @@ bool CacheDB::GetSHA512(bool const &GenOnly) Stats.SHA512Bytes += CurStat.FileSize; - if (Fd == NULL && OpenFile() == false) - { + if (OpenFile() == false) return false; - } + SHA512Summation SHA512; if (Fd->Seek(0) == false || SHA512.AddFD(*Fd, CurStat.FileSize) == false) return false; -- cgit v1.2.3 From 53ba4e2c2dd29758be0a911489ca5c23e5107513 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Fri, 4 Apr 2014 17:09:43 +0200 Subject: ensure clean works --- ftparchive/apt-ftparchive.cc | 20 +++++++++++++++----- ftparchive/cachedb.cc | 19 ++++++++++++++----- test/integration/test-apt-ftparchive-cachedb | 7 +++++++ test/integration/test-apt-ftparchive-src-cachedb | 6 ++++++ 4 files changed, 42 insertions(+), 10 deletions(-) diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index c1614398b..74fc7681f 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -957,23 +957,33 @@ static bool Clean(CommandLine &CmdL) // Sort by cache DB to improve IO locality. stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); string CacheDir = Setup.FindDir("Dir::CacheDir"); for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ) { - c0out << I->BinCacheDB << endl; + if(I->BinCacheDB != "") + c0out << I->BinCacheDB << endl; + if(I->SrcCacheDB != "") + c0out << I->SrcCacheDB << endl; CacheDB DB(flCombine(CacheDir,I->BinCacheDB)); + CacheDB DB_SRC(flCombine(CacheDir,I->SrcCacheDB)); if (DB.Clean() == false) _error->DumpErrors(); + if (DB_SRC.Clean() == false) + _error->DumpErrors(); string CacheDB = I->BinCacheDB; - for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I) - ; + string SrcCacheDB = I->SrcCacheDB; + while(I != PkgList.end() && + I->BinCacheDB == CacheDB && + I->SrcCacheDB == SrcCacheDB) + ++I; + } - // FIXME: clean for the SourcesDB - + return true; } /*}}}*/ diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index 539ed671b..f63aa88ab 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -264,10 +264,11 @@ bool CacheDB::LoadSource() // Lookup the control information InitQuery("cs"); if (Get() == true && Dsc.TakeDsc(Data.data, Data.size) == true) + { return true; + } CurStat.Flags &= ~FlSource; } - if (OpenFile() == false) return false; @@ -551,16 +552,24 @@ bool CacheDB::Clean() { if (stringcmp(Colon + 1, (char *)Key.data+Key.size,"st") == 0 || stringcmp(Colon + 1, (char *)Key.data+Key.size,"cl") == 0 || + stringcmp(Colon + 1, (char *)Key.data+Key.size,"cs") == 0 || stringcmp(Colon + 1, (char *)Key.data+Key.size,"cn") == 0) { - if (FileExists(std::string((const char *)Key.data,Colon)) == true) - continue; + std::string FileName = std::string((const char *)Key.data,Colon); + if (FileExists(FileName) == true) { + continue; + } } } - Cursor->c_del(Cursor,0); } - Dbp->compact(Dbp, NULL, NULL, NULL, NULL, DB_FREE_SPACE, NULL); + int res = Dbp->compact(Dbp, NULL, NULL, NULL, NULL, DB_FREE_SPACE, NULL); + if (res < 0) + _error->Warning("compact failed with result %i", res); + + if(_config->FindB("Debug::APT::FTPArchive::Clean", false) == true) + Dbp->stat_print(Dbp, 0); + return true; } diff --git a/test/integration/test-apt-ftparchive-cachedb b/test/integration/test-apt-ftparchive-cachedb index 2a3bfce99..147272a2c 100755 --- a/test/integration/test-apt-ftparchive-cachedb +++ b/test/integration/test-apt-ftparchive-cachedb @@ -91,3 +91,10 @@ ensure_correct_contents_file testequal " Misses in Cache: 0 dists/test/Contents-i386: New 402 B Misses in Cache: 0" grep Misses stats-out.txt +# and clean +rm -rf aptarchive/pool/main/* +testequal "packages-main-i386.db" aptftparchive clean ftparchive.conf +aptftparchive clean ftparchive.conf -o Debug::APT::FTPArchive::Clean=1 > clean-out.txt 2>&1 +testequal "0 Number of unique keys in the tree" grep unique clean-out.txt +testequal "packages-main-i386.db" grep packages-main-i386.db clean-out.txt + diff --git a/test/integration/test-apt-ftparchive-src-cachedb b/test/integration/test-apt-ftparchive-src-cachedb index 3a5507c21..9cc0a98de 100755 --- a/test/integration/test-apt-ftparchive-src-cachedb +++ b/test/integration/test-apt-ftparchive-src-cachedb @@ -159,4 +159,10 @@ dd if=/dev/zero of="aptarchive/pool/invalid/toobig_1.0.dsc" bs=1k count=129 2>/d testequal " E: DSC file 'aptarchive/pool/invalid/toobig_1.0.dsc' is too large!" aptftparchive sources aptarchive/pool/invalid +# ensure clean works +rm -f aptarchive/pool/main/* +aptftparchive clean apt-ftparchive.conf -o Debug::APT::FTPArchive::Clean=1 > clean-out.txt 2>&1 +testequal "0 Number of unique keys in the tree" grep unique clean-out.txt +testequal "sources-main.db" grep sources-main.db clean-out.txt + -- cgit v1.2.3 From 37497bd5fa0f070e12c1c28d849aef1af8f369b5 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Fri, 4 Apr 2014 17:21:40 +0200 Subject: refactor _InitQuery() --- ftparchive/cachedb.cc | 16 ++++++++-------- ftparchive/cachedb.h | 15 ++++++++++++++- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index f63aa88ab..4feb7bbfb 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -199,7 +199,7 @@ bool CacheDB::GetCurStat() in the database */ /* Get the flags (and mtime) */ - InitQuery("st"); + InitQueryStats(); // Ensure alignment of the returned structure Data.data = &CurStat; Data.ulen = sizeof(CurStat); @@ -262,7 +262,7 @@ bool CacheDB::LoadSource() if ((CurStat.Flags & FlSource) == FlSource) { // Lookup the control information - InitQuery("cs"); + InitQuerySource(); if (Get() == true && Dsc.TakeDsc(Data.data, Data.size) == true) { return true; @@ -280,7 +280,7 @@ bool CacheDB::LoadSource() return _error->Error(_("Failed to read .dsc")); // Write back the control information - InitQuery("cs"); + InitQuerySource(); if (Put(Dsc.Data, Dsc.Length) == true) CurStat.Flags |= FlSource; @@ -296,7 +296,7 @@ bool CacheDB::LoadControl() if ((CurStat.Flags & FlControl) == FlControl) { // Lookup the control information - InitQuery("cl"); + InitQueryControl(); if (Get() == true && Control.TakeControl(Data.data,Data.size) == true) return true; CurStat.Flags &= ~FlControl; @@ -313,7 +313,7 @@ bool CacheDB::LoadControl() return _error->Error(_("Archive has no control record")); // Write back the control information - InitQuery("cl"); + InitQueryControl(); if (Put(Control.Control,Control.Length) == true) CurStat.Flags |= FlControl; return true; @@ -331,7 +331,7 @@ bool CacheDB::LoadContents(bool const &GenOnly) return true; // Lookup the contents information - InitQuery("cn"); + InitQueryContent(); if (Get() == true) { if (Contents.TakeContents(Data.data,Data.size) == true) @@ -349,7 +349,7 @@ bool CacheDB::LoadContents(bool const &GenOnly) return false; // Write back the control information - InitQuery("cn"); + InitQueryContent(); if (Put(Contents.Data,Contents.CurSize) == true) CurStat.Flags |= FlContents; return true; @@ -519,7 +519,7 @@ bool CacheDB::Finish() // Write the stat information CurStat.Flags = htonl(CurStat.Flags); CurStat.FileSize = htonl(CurStat.FileSize); - InitQuery("st"); + InitQueryStats(); Put(&CurStat,sizeof(CurStat)); CurStat.Flags = ntohl(CurStat.Flags); CurStat.FileSize = ntohl(CurStat.FileSize); diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h index 4e33c8635..54a274944 100644 --- a/ftparchive/cachedb.h +++ b/ftparchive/cachedb.h @@ -41,7 +41,7 @@ class CacheDB std::string DBFile; // Generate a key for the DB of a given type - inline void InitQuery(const char *Type) + void _InitQuery(const char *Type) { memset(&Key,0,sizeof(Key)); memset(&Data,0,sizeof(Data)); @@ -49,6 +49,19 @@ class CacheDB Key.size = snprintf(TmpKey,sizeof(TmpKey),"%s:%s",FileName.c_str(), Type); } + void InitQueryStats() { + _InitQuery("st"); + } + void InitQuerySource() { + _InitQuery("cs"); + } + void InitQueryControl() { + _InitQuery("cl"); + } + void InitQueryContent() { + _InitQuery("cn"); + } + inline bool Get() { return Dbp->get(Dbp,0,&Key,&Data,0) == 0; -- cgit v1.2.3 From cf6bbca0a93b21ab7d3378f26dd9b57951a1d987 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Mon, 7 Apr 2014 09:41:20 +0200 Subject: ensure "--db" also works with the new srcpkgdb --- ftparchive/apt-ftparchive.cc | 8 +++++++ ftparchive/cachedb.cc | 2 +- test/integration/test-apt-ftparchive-src-cachedb | 27 +++++++++++++++++++++++- 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index 74fc7681f..ba71ee225 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -690,6 +690,10 @@ static bool SimpleGenPackages(CommandLine &CmdL) if (Packages.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; + return true; } /*}}}*/ @@ -746,6 +750,10 @@ static bool SimpleGenSources(CommandLine &CmdL) if (Sources.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses<< endl; + return true; } /*}}}*/ diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index 4feb7bbfb..d589c4c5a 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -515,7 +515,7 @@ bool CacheDB::Finish() if (CurStat.Flags == OldStat.Flags && CurStat.mtime == OldStat.mtime) return true; - + // Write the stat information CurStat.Flags = htonl(CurStat.Flags); CurStat.FileSize = htonl(CurStat.FileSize); diff --git a/test/integration/test-apt-ftparchive-src-cachedb b/test/integration/test-apt-ftparchive-src-cachedb index 9cc0a98de..1af193632 100755 --- a/test/integration/test-apt-ftparchive-src-cachedb +++ b/test/integration/test-apt-ftparchive-src-cachedb @@ -105,6 +105,30 @@ mkdir -p aptarchive/dists/test/main/source/ mkdir aptarchive-overrides mkdir aptarchive-cache + + + +# generate with --db option +(cd aptarchive && aptftparchive --db ./test.db sources pool/main/ \ + -o APT::FTPArchive::ShowCacheMisses=1 \ + > dists/test/main/source/Sources \ + 2> stats-out.txt + testequal " Misses in Cache: 2" grep Misses stats-out.txt +) +assert_correct_sources_file + +# generate with --db option (again to ensure its in the cache) +(cd aptarchive && aptftparchive --db ./test.db sources pool/main/ \ + -o APT::FTPArchive::ShowCacheMisses=1 \ + > dists/test/main/source/Sources \ + 2> stats-out.txt + testequal " Misses in Cache: 0" grep Misses stats-out.txt +) +assert_correct_sources_file + + + +# get ready for the "apt-ftparchive generate" command cat > apt-ftparchive.conf <<"EOF" Dir { ArchiveDir "./aptarchive"; @@ -135,7 +159,6 @@ Tree "dists/test" { }; EOF - # generate (empty cachedb) aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 2> stats-out.txt testequal " Misses in Cache: 2" grep Misses stats-out.txt @@ -148,6 +171,8 @@ aptftparchive generate apt-ftparchive.conf -o APT::FTPArchive::ShowCacheMisses=1 testequal " Misses in Cache: 0" grep Misses stats-out.txt assert_correct_sources_file + + # generate invalid files mkdir aptarchive/pool/invalid printf "meep" > aptarchive/pool/invalid/invalid_1.0.dsc -- cgit v1.2.3 From acea28d0a3a55c4df1390c42288043002610fbc9 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Thu, 8 May 2014 11:46:29 +0200 Subject: fix regression from commit 215b0faf --- ftparchive/cachedb.cc | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index d589c4c5a..e56deae1e 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -97,13 +97,8 @@ bool CacheDB::ReadyDB(std::string const &DB) /* */ bool CacheDB::OpenFile() { - // its open already - if(Fd && Fd->Name() == this->FileName) - return true; - - // a different file is open, close it first - if(Fd && Fd->Name() != this->FileName) - CloseFile(); + // always close existing file first + CloseFile(); // open a new file Fd = new FileFd(FileName,FileFd::ReadOnly); @@ -128,13 +123,8 @@ void CacheDB::CloseFile() // CacheDB::OpenDebFile - Open a debfile /*{{{*/ bool CacheDB::OpenDebFile() { - // debfile is already open - if(DebFile && &DebFile->GetFile() == Fd) - return true; - - // a different debfile is open, close it first - if(DebFile && &DebFile->GetFile() != Fd) - CloseDebFile(); + // always close existing file first + CloseDebFile(); // first open the fd, then pass it to the debDebFile if(OpenFile() == false) -- cgit v1.2.3