diff options
author | Michael Vogt <mvo@debian.org> | 2015-08-18 11:54:05 +0200 |
---|---|---|
committer | Michael Vogt <mvo@debian.org> | 2015-08-18 11:54:05 +0200 |
commit | 21248c0f00ee71412dbadc6ebf84011cf974346d (patch) | |
tree | 7dc1f5904399482d2128765b5b86d57a4ac5b3e1 /ftparchive | |
parent | e5f34ad3b043abf033c1626eb8449b75955d6760 (diff) | |
parent | 4fc6b7570c3e97b65c118b58cdf6729fa94c9b03 (diff) |
Merge branch 'debian/experimental' into feature/srv-records
Conflicts:
cmdline/apt-helper.cc
cmdline/makefile
Diffstat (limited to 'ftparchive')
-rw-r--r-- | ftparchive/apt-ftparchive.cc | 239 | ||||
-rw-r--r-- | ftparchive/cachedb.cc | 514 | ||||
-rw-r--r-- | ftparchive/cachedb.h | 82 | ||||
-rw-r--r-- | ftparchive/contents.cc | 47 | ||||
-rw-r--r-- | ftparchive/contents.h | 11 | ||||
-rw-r--r-- | ftparchive/makefile | 6 | ||||
-rw-r--r-- | ftparchive/multicompress.cc | 19 | ||||
-rw-r--r-- | ftparchive/multicompress.h | 2 | ||||
-rw-r--r-- | ftparchive/sources.cc | 60 | ||||
-rw-r--r-- | ftparchive/sources.h | 24 | ||||
-rw-r--r-- | ftparchive/writer.cc | 456 | ||||
-rw-r--r-- | ftparchive/writer.h | 65 |
12 files changed, 857 insertions, 668 deletions
diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index 692f19e25..62108f7ca 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -19,6 +19,9 @@ #include <apt-pkg/init.h> #include <apt-pkg/fileutl.h> +#include <apt-private/private-cmndline.h> +#include <apt-private/private-output.h> + #include <algorithm> #include <climits> #include <sys/time.h> @@ -40,11 +43,7 @@ #include <apti18n.h> /*}}}*/ -using namespace std; -ostream c0out(0); -ostream c1out(0); -ostream c2out(0); -ofstream devnull("/dev/null"); +using namespace std; unsigned Quiet = 0; // struct PackageMap - List of all package files in the config file /*{{{*/ @@ -62,6 +61,7 @@ struct PackageMap // Stuff for the Package File string PkgFile; string BinCacheDB; + string SrcCacheDB; string BinOverride; string ExtraOverride; @@ -106,6 +106,12 @@ struct PackageMap inline bool operator() (const PackageMap &x,const PackageMap &y) {return x.BinCacheDB < y.BinCacheDB;}; }; + + struct SrcDBCompare : public binary_function<PackageMap,PackageMap,bool> + { + inline bool operator() (const PackageMap &x,const PackageMap &y) + {return x.SrcCacheDB < y.SrcCacheDB;}; + }; void GetGeneral(Configuration &Setup,Configuration &Block); bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats); @@ -172,7 +178,9 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) PkgDone = true; // Create a package writer object. - PackagesWriter Packages(flCombine(CacheDir,BinCacheDB), + MultiCompress Comp(flCombine(ArchiveDir,PkgFile), + PkgCompress,Permissions); + PackagesWriter Packages(&Comp.Input, flCombine(CacheDir,BinCacheDB), flCombine(OverrideDir,BinOverride), flCombine(OverrideDir,ExtraOverride), Arch); @@ -191,10 +199,6 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) Packages.Stats.DeLinkBytes = Stats.DeLinkBytes; Packages.DeLinkLimit = DeLinkLimit; - // Create a compressor object - MultiCompress Comp(flCombine(ArchiveDir,PkgFile), - PkgCompress,Permissions); - Packages.Output = Comp.Input; if (_error->PendingError() == true) return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); @@ -232,11 +236,14 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) gettimeofday(&NewTime,0); double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - + c0out << Packages.Stats.Packages << " files " << /* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */ SizeToStr(Packages.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; + + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; Stats.Add(Packages.Stats); Stats.DeLinkBytes = Packages.Stats.DeLinkBytes; @@ -263,7 +270,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) SrcDone = true; // Create a package writer object. - SourcesWriter Sources(_config->Find("APT::FTPArchive::DB"), + MultiCompress Comp(flCombine(ArchiveDir,SrcFile), + SrcCompress,Permissions); + SourcesWriter Sources(&Comp.Input, flCombine(CacheDir, SrcCacheDB), flCombine(OverrideDir,BinOverride), flCombine(OverrideDir,SrcOverride), flCombine(OverrideDir,SrcExtraOverride)); @@ -278,11 +287,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) Sources.DeLinkLimit = DeLinkLimit; Sources.Stats.DeLinkBytes = Stats.DeLinkBytes; - - // Create a compressor object - MultiCompress Comp(flCombine(ArchiveDir,SrcFile), - SrcCompress,Permissions); - Sources.Output = Comp.Input; + if (_error->PendingError() == true) return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); @@ -323,6 +328,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) c0out << Sources.Stats.Packages << " pkgs in " << TimeToStr((long)Delta) << endl; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses << endl; + Stats.Add(Sources.Stats); Stats.DeLinkBytes = Sources.Stats.DeLinkBytes; @@ -353,16 +361,15 @@ bool PackageMap::GenContents(Configuration &Setup, gettimeofday(&StartTime,0); // Create a package writer object. - ContentsWriter Contents("", Arch); + MultiCompress Comp(flCombine(ArchiveDir,this->Contents), + CntCompress,Permissions); + Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60; + ContentsWriter Contents(&Comp.Input, "", Arch); if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false) return _error->Error(_("Package extension list is too long")); if (_error->PendingError() == true) return false; - MultiCompress Comp(flCombine(ArchiveDir,this->Contents), - CntCompress,Permissions); - Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60; - Contents.Output = Comp.Input; if (_error->PendingError() == true) return false; @@ -372,7 +379,7 @@ bool PackageMap::GenContents(Configuration &Setup, FileFd Head(flCombine(OverrideDir,ContentsHead),FileFd::ReadOnly); if (_error->PendingError() == true) return false; - + unsigned long long Size = Head.Size(); unsigned char Buf[4096]; while (Size != 0) @@ -380,17 +387,17 @@ bool PackageMap::GenContents(Configuration &Setup, unsigned long long ToRead = Size; if (Size > sizeof(Buf)) ToRead = sizeof(Buf); - + if (Head.Read(Buf,ToRead) == false) return false; - - if (fwrite(Buf,1,ToRead,Comp.Input) != ToRead) + + if (Comp.Input.Write(Buf, ToRead) == false) return _error->Errno("fwrite",_("Error writing header to contents file")); - + Size -= ToRead; - } - } - + } + } + /* Go over all the package file records and parse all the package files associated with this contents file into one great big honking memory structure, then dump the sorted version */ @@ -435,6 +442,9 @@ bool PackageMap::GenContents(Configuration &Setup, double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Contents.Stats.Misses<< endl; + c0out << Contents.Stats.Packages << " files " << SizeToStr(Contents.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; @@ -465,6 +475,8 @@ static void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup) string DContentsH = Setup.Find("TreeDefault::Contents::Header",""); string DBCache = Setup.Find("TreeDefault::BinCacheDB", "packages-$(ARCH).db"); + string SrcDBCache = Setup.Find("TreeDefault::SrcCacheDB", + "sources-$(SECTION).db"); string DSources = Setup.Find("TreeDefault::Sources", "$(DIST)/$(SECTION)/source/Sources"); string DFLFile = Setup.Find("TreeDefault::FileList", ""); @@ -524,6 +536,7 @@ static void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup) Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars); Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars); Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars); + Itm.SrcCacheDB = SubstVar(Block.Find("SrcCacheDB",SrcDBCache.c_str()),Vars); } else { @@ -573,6 +586,7 @@ static void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup) Itm.PkgFile = Block.Find("Packages"); Itm.SrcFile = Block.Find("Sources"); Itm.BinCacheDB = Block.Find("BinCacheDB"); + Itm.SrcCacheDB = Block.Find("SrcCacheDB"); Itm.BinOverride = Block.Find("BinOverride"); Itm.ExtraOverride = Block.Find("ExtraOverride"); Itm.SrcExtraOverride = Block.Find("SrcExtraOverride"); @@ -597,8 +611,7 @@ static void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup) /* */ static bool ShowHelp(CommandLine &) { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, - COMMON_ARCH,__DATE__,__TIME__); + ioprintf(cout, "%s %s (%s)\n", PACKAGE, PACKAGE_VERSION, COMMON_ARCH); if (_config->FindB("version") == true) return true; @@ -658,7 +671,7 @@ static bool SimpleGenPackages(CommandLine &CmdL) Override = CmdL.FileList[2]; // Create a package writer object. - PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"), + PackagesWriter Packages(NULL, _config->Find("APT::FTPArchive::DB"), Override, "", _config->Find("APT::FTPArchive::Architecture")); if (_error->PendingError() == true) return false; @@ -670,6 +683,10 @@ static bool SimpleGenPackages(CommandLine &CmdL) if (Packages.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; + return true; } /*}}}*/ @@ -682,7 +699,7 @@ static bool SimpleGenContents(CommandLine &CmdL) return ShowHelp(CmdL); // Create a package writer object. - ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture")); + ContentsWriter Contents(NULL, _config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture")); if (_error->PendingError() == true) return false; @@ -715,7 +732,7 @@ static bool SimpleGenSources(CommandLine &CmdL) SOverride.c_str()); // Create a package writer object. - SourcesWriter Sources(_config->Find("APT::FTPArchive::DB"),Override,SOverride); + SourcesWriter Sources(NULL, _config->Find("APT::FTPArchive::DB"),Override,SOverride); if (_error->PendingError() == true) return false; @@ -726,6 +743,10 @@ static bool SimpleGenSources(CommandLine &CmdL) if (Sources.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses<< endl; + return true; } /*}}}*/ @@ -738,7 +759,7 @@ static bool SimpleGenRelease(CommandLine &CmdL) string Dir = CmdL.FileList[1]; - ReleaseWriter Release(""); + ReleaseWriter Release(NULL, ""); Release.DirStrip = Dir; if (_error->PendingError() == true) @@ -753,32 +774,14 @@ static bool SimpleGenRelease(CommandLine &CmdL) } /*}}}*/ -// Generate - Full generate, using a config file /*{{{*/ +// DoGeneratePackagesAndSources - Helper for Generate /*{{{*/ // --------------------------------------------------------------------- -/* */ -static bool Generate(CommandLine &CmdL) +static bool DoGeneratePackagesAndSources(Configuration &Setup, + vector<PackageMap> &PkgList, + struct CacheDB::Stats &SrcStats, + struct CacheDB::Stats &Stats, + CommandLine &CmdL) { - struct CacheDB::Stats SrcStats; - if (CmdL.FileSize() < 2) - return ShowHelp(CmdL); - - struct timeval StartTime; - gettimeofday(&StartTime,0); - struct CacheDB::Stats Stats; - - // Read the configuration file. - Configuration Setup; - if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) - return false; - - vector<PackageMap> PkgList; - LoadTree(PkgList,Setup); - LoadBinDir(PkgList,Setup); - - // Sort by cache DB to improve IO locality. - stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); - - // Generate packages if (CmdL.FileSize() <= 2) { for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I) @@ -847,9 +850,16 @@ static bool Generate(CommandLine &CmdL) if (I->TransWriter != NULL && I->TransWriter->DecreaseRefCounter() == 0) delete I->TransWriter; - if (_config->FindB("APT::FTPArchive::Contents",true) == false) - return true; - + return true; +} + + /*}}}*/ +// DoGenerateContents - Helper for Generate to generate the Contents /*{{{*/ +// --------------------------------------------------------------------- +static bool DoGenerateContents(Configuration &Setup, + vector<PackageMap> &PkgList, + CommandLine &CmdL) +{ c1out << "Packages done, Starting contents." << endl; // Sort the contents file list by date @@ -906,17 +916,62 @@ static bool Generate(CommandLine &CmdL) break; } } + + return true; +} + + /*}}}*/ +// Generate - Full generate, using a config file /*{{{*/ +// --------------------------------------------------------------------- +/* */ +static bool Generate(CommandLine &CmdL) +{ + struct CacheDB::Stats SrcStats; + if (CmdL.FileSize() < 2) + return ShowHelp(CmdL); + + struct timeval StartTime; + gettimeofday(&StartTime,0); + struct CacheDB::Stats Stats; + // Read the configuration file. + Configuration Setup; + if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) + return false; + + vector<PackageMap> PkgList; + LoadTree(PkgList,Setup); + LoadBinDir(PkgList,Setup); + + // Sort by cache DB to improve IO locality. + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); + + // Generate packages + if (_config->FindB("APT::FTPArchive::ContentsOnly", false) == false) + { + if(DoGeneratePackagesAndSources(Setup, PkgList, SrcStats, Stats, CmdL) == false) + return false; + } else { + c1out << "Skipping Packages/Sources generation" << endl; + } + + // do Contents if needed + if (_config->FindB("APT::FTPArchive::Contents", true) == true) + if (DoGenerateContents(Setup, PkgList, CmdL) == false) + return false; + struct timeval NewTime; - gettimeofday(&NewTime,0); - double Delta = NewTime.tv_sec - StartTime.tv_sec + + gettimeofday(&NewTime,0); + double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages + c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages << " archives. Took " << TimeToStr((long)Delta) << endl; - + return true; } - /*}}}*/ + + /*}}}*/ // Clean - Clean out the databases /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -936,20 +991,33 @@ static bool Clean(CommandLine &CmdL) // Sort by cache DB to improve IO locality. stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); string CacheDir = Setup.FindDir("Dir::CacheDir"); for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ) { - c0out << I->BinCacheDB << endl; + if(I->BinCacheDB != "") + c0out << I->BinCacheDB << endl; + if(I->SrcCacheDB != "") + c0out << I->SrcCacheDB << endl; CacheDB DB(flCombine(CacheDir,I->BinCacheDB)); + CacheDB DB_SRC(flCombine(CacheDir,I->SrcCacheDB)); if (DB.Clean() == false) _error->DumpErrors(); + if (DB_SRC.Clean() == false) + _error->DumpErrors(); string CacheDB = I->BinCacheDB; - for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I); + string SrcCacheDB = I->SrcCacheDB; + while(I != PkgList.end() && + I->BinCacheDB == CacheDB && + I->SrcCacheDB == SrcCacheDB) + ++I; + } - + + return true; } /*}}}*/ @@ -985,31 +1053,12 @@ int main(int argc, const char *argv[]) // Parse the command line and initialize the package library CommandLine CmdL(Args,_config); - if (pkgInitConfig(*_config) == false || CmdL.Parse(argc,argv) == false) - { - _error->DumpErrors(); - return 100; - } - - // See if the help should be shown - if (_config->FindB("help") == true || - _config->FindB("version") == true || - CmdL.FileSize() == 0) - { - ShowHelp(CmdL); - return 0; - } - - // Setup the output streams - c0out.rdbuf(clog.rdbuf()); - c1out.rdbuf(clog.rdbuf()); - c2out.rdbuf(clog.rdbuf()); + ParseCommandLine(CmdL, Cmds, Args, &_config, NULL, argc, argv, ShowHelp); + + _config->CndSet("quiet",0); Quiet = _config->FindI("quiet",0); - if (Quiet > 0) - c0out.rdbuf(devnull.rdbuf()); - if (Quiet > 1) - c1out.rdbuf(devnull.rdbuf()); - + InitOutput(clog.rdbuf()); + // Match the operation CmdL.DispatchArg(Cmds); diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index 523c6b5fa..cc3527ea4 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -20,17 +20,33 @@ #include <apt-pkg/configuration.h> #include <apt-pkg/fileutl.h> #include <apt-pkg/debfile.h> +#include <apt-pkg/gpgv.h> +#include <apt-pkg/hashes.h> #include <netinet/in.h> // htonl, etc #include <ctype.h> #include <stddef.h> #include <sys/stat.h> +#include <strings.h> #include "cachedb.h" #include <apti18n.h> /*}}}*/ +CacheDB::CacheDB(std::string const &DB) + : Dbp(0), Fd(NULL), DebFile(0) +{ + TmpKey[0]='\0'; + ReadyDB(DB); +} + +CacheDB::~CacheDB() +{ + ReadyDB(); + delete DebFile; +} + // CacheDB::ReadyDB - Ready the DB2 /*{{{*/ // --------------------------------------------------------------------- /* This opens the DB2 file for caching package information */ @@ -85,7 +101,7 @@ bool CacheDB::ReadyDB(std::string const &DB) return _error->Error(_("Unable to open DB file %s: %s"),DB.c_str(), db_strerror(err)); } } - + DBFile = DB; DBLoaded = true; return true; @@ -96,14 +112,54 @@ bool CacheDB::ReadyDB(std::string const &DB) /* */ bool CacheDB::OpenFile() { - Fd = new FileFd(FileName,FileFd::ReadOnly); - if (_error->PendingError() == true) - { - delete Fd; - Fd = NULL; - return false; - } - return true; + // always close existing file first + CloseFile(); + + // open a new file + Fd = new FileFd(FileName,FileFd::ReadOnly); + if (_error->PendingError() == true) + { + CloseFile(); + return false; + } + return true; +} + /*}}}*/ +// CacheDB::CloseFile - Close the file /*{{{*/ +void CacheDB::CloseFile() +{ + if(Fd != NULL) + { + delete Fd; + Fd = NULL; + } +} + /*}}}*/ +// CacheDB::OpenDebFile - Open a debfile /*{{{*/ +bool CacheDB::OpenDebFile() +{ + // always close existing file first + CloseDebFile(); + + // first open the fd, then pass it to the debDebFile + if(OpenFile() == false) + return false; + DebFile = new debDebFile(*Fd); + if (_error->PendingError() == true) + return false; + return true; +} + /*}}}*/ +// CacheDB::CloseDebFile - Close a debfile again /*{{{*/ +void CacheDB::CloseDebFile() +{ + CloseFile(); + + if(DebFile != NULL) + { + delete DebFile; + DebFile = NULL; + } } /*}}}*/ // CacheDB::GetFileStat - Get stats from the file /*{{{*/ @@ -112,29 +168,65 @@ bool CacheDB::OpenFile() * to look at the file, also get the mtime from the file. */ bool CacheDB::GetFileStat(bool const &doStat) { - if ((CurStat.Flags & FlSize) == FlSize && doStat == false) - { - /* Already worked out the file size */ - } - else - { - /* Get it from the file. */ - if (Fd == NULL && OpenFile() == false) - { - return false; - } - // Stat the file - struct stat St; - if (fstat(Fd->Fd(),&St) != 0) - { - return _error->Errno("fstat", - _("Failed to stat %s"),FileName.c_str()); - } - CurStat.FileSize = St.st_size; - CurStat.mtime = htonl(St.st_mtime); - CurStat.Flags |= FlSize; - } - return true; + if ((CurStat.Flags & FlSize) == FlSize && doStat == false) + return true; + + /* Get it from the file. */ + if (OpenFile() == false) + return false; + + // Stat the file + struct stat St; + if (fstat(Fd->Fd(),&St) != 0) + { + CloseFile(); + return _error->Errno("fstat", + _("Failed to stat %s"),FileName.c_str()); + } + CurStat.FileSize = St.st_size; + CurStat.mtime = htonl(St.st_mtime); + CurStat.Flags |= FlSize; + + return true; +} + /*}}}*/ +// CacheDB::GetCurStatCompatOldFormat /*{{{*/ +// --------------------------------------------------------------------- +/* Read the old (32bit FileSize) StateStore format from disk */ +bool CacheDB::GetCurStatCompatOldFormat() +{ + InitQueryStats(); + Data.data = &CurStatOldFormat; + Data.flags = DB_DBT_USERMEM; + Data.ulen = sizeof(CurStatOldFormat); + if (Get() == false) + { + CurStat.Flags = 0; + } else { + CurStat.Flags = CurStatOldFormat.Flags; + CurStat.mtime = CurStatOldFormat.mtime; + CurStat.FileSize = CurStatOldFormat.FileSize; + memcpy(CurStat.MD5, CurStatOldFormat.MD5, sizeof(CurStat.MD5)); + memcpy(CurStat.SHA1, CurStatOldFormat.SHA1, sizeof(CurStat.SHA1)); + memcpy(CurStat.SHA256, CurStatOldFormat.SHA256, sizeof(CurStat.SHA256)); + } + return true; +} + /*}}}*/ +// CacheDB::GetCurStatCompatOldFormat /*{{{*/ +// --------------------------------------------------------------------- +/* Read the new (64bit FileSize) StateStore format from disk */ +bool CacheDB::GetCurStatCompatNewFormat() +{ + InitQueryStats(); + Data.data = &CurStat; + Data.flags = DB_DBT_USERMEM; + Data.ulen = sizeof(CurStat); + if (Get() == false) + { + CurStat.Flags = 0; + } + return true; } /*}}}*/ // CacheDB::GetCurStat - Set the CurStat variable. /*{{{*/ @@ -145,75 +237,98 @@ bool CacheDB::GetCurStat() { memset(&CurStat,0,sizeof(CurStat)); - if (DBLoaded) - { - /* First see if there is anything about it - in the database */ - - /* Get the flags (and mtime) */ - InitQuery("st"); - // Ensure alignment of the returned structure - Data.data = &CurStat; - Data.ulen = sizeof(CurStat); - Data.flags = DB_DBT_USERMEM; - if (Get() == false) + if (DBLoaded) + { + // do a first query to just get the size of the data on disk + InitQueryStats(); + Data.data = &CurStat; + Data.flags = DB_DBT_USERMEM; + Data.ulen = 0; + Get(); + + if (Data.size == 0) { - CurStat.Flags = 0; - } - CurStat.Flags = ntohl(CurStat.Flags); - CurStat.FileSize = ntohl(CurStat.FileSize); + // nothing needs to be done, we just have not data for this deb + } + // check if the record is written in the old format (32bit filesize) + else if(Data.size == sizeof(CurStatOldFormat)) + { + GetCurStatCompatOldFormat(); + } + else if(Data.size == sizeof(CurStat)) + { + GetCurStatCompatNewFormat(); + } else { + return _error->Error("Cache record size mismatch (%ul)", Data.size); + } + + CurStat.Flags = ntohl(CurStat.Flags); + CurStat.FileSize = ntohl(CurStat.FileSize); } - return true; + return true; } /*}}}*/ // CacheDB::GetFileInfo - Get all the info about the file /*{{{*/ // --------------------------------------------------------------------- bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents, - bool const &GenContentsOnly, bool const &DoMD5, bool const &DoSHA1, - bool const &DoSHA256, bool const &DoSHA512, + bool const &GenContentsOnly, bool const DoSource, unsigned int const DoHashes, bool const &checkMtime) { - this->FileName = FileName; + this->FileName = FileName; - if (GetCurStat() == false) - { - return false; - } + if (GetCurStat() == false) + return false; OldStat = CurStat; - if (GetFileStat(checkMtime) == false) - { - delete Fd; - Fd = NULL; - return false; - } - - /* if mtime changed, update CurStat from disk */ - if (checkMtime == true && OldStat.mtime != CurStat.mtime) - CurStat.Flags = FlSize; - - Stats.Bytes += CurStat.FileSize; - Stats.Packages++; - - if ((DoControl && LoadControl() == false) - || (DoContents && LoadContents(GenContentsOnly) == false) - || (DoMD5 && GetMD5(false) == false) - || (DoSHA1 && GetSHA1(false) == false) - || (DoSHA256 && GetSHA256(false) == false) - || (DoSHA512 && GetSHA512(false) == false) - ) - { - delete Fd; - Fd = NULL; - delete DebFile; - DebFile = NULL; - return false; - } - - delete Fd; - Fd = NULL; - delete DebFile; - DebFile = NULL; + if (GetFileStat(checkMtime) == false) + return false; + + /* if mtime changed, update CurStat from disk */ + if (checkMtime == true && OldStat.mtime != CurStat.mtime) + CurStat.Flags = FlSize; + + Stats.Bytes += CurStat.FileSize; + ++Stats.Packages; + + if ((DoControl && LoadControl() == false) + || (DoContents && LoadContents(GenContentsOnly) == false) + || (DoSource && LoadSource() == false) + || (DoHashes != 0 && GetHashes(false, DoHashes) == false) + ) + { + return false; + } + + return true; +} + /*}}}*/ +bool CacheDB::LoadSource() /*{{{*/ +{ + // Try to read the control information out of the DB. + if ((CurStat.Flags & FlSource) == FlSource) + { + // Lookup the control information + InitQuerySource(); + if (Get() == true && Dsc.TakeDsc(Data.data, Data.size) == true) + { + return true; + } + CurStat.Flags &= ~FlSource; + } + if (OpenFile() == false) + return false; + + Stats.Misses++; + if (Dsc.Read(FileName) == false) + return false; + + if (Dsc.Length == 0) + return _error->Error(_("Failed to read .dsc")); + + // Write back the control information + InitQuerySource(); + if (Put(Dsc.Data.c_str(), Dsc.Length) == true) + CurStat.Flags |= FlSource; return true; } @@ -227,23 +342,14 @@ bool CacheDB::LoadControl() if ((CurStat.Flags & FlControl) == FlControl) { // Lookup the control information - InitQuery("cl"); + InitQueryControl(); if (Get() == true && Control.TakeControl(Data.data,Data.size) == true) return true; CurStat.Flags &= ~FlControl; } - if (Fd == NULL && OpenFile() == false) - { + if(OpenDebFile() == false) return false; - } - // Create a deb instance to read the archive - if (DebFile == 0) - { - DebFile = new debDebFile(*Fd); - if (_error->PendingError() == true) - return false; - } Stats.Misses++; if (Control.Read(*DebFile) == false) @@ -253,7 +359,7 @@ bool CacheDB::LoadControl() return _error->Error(_("Archive has no control record")); // Write back the control information - InitQuery("cl"); + InitQueryControl(); if (Put(Control.Control,Control.Length) == true) CurStat.Flags |= FlControl; return true; @@ -271,7 +377,7 @@ bool CacheDB::LoadContents(bool const &GenOnly) return true; // Lookup the contents information - InitQuery("cn"); + InitQueryContent(); if (Get() == true) { if (Contents.TakeContents(Data.data,Data.size) == true) @@ -281,29 +387,21 @@ bool CacheDB::LoadContents(bool const &GenOnly) CurStat.Flags &= ~FlContents; } - if (Fd == NULL && OpenFile() == false) - { + if(OpenDebFile() == false) return false; - } - // Create a deb instance to read the archive - if (DebFile == 0) - { - DebFile = new debDebFile(*Fd); - if (_error->PendingError() == true) - return false; - } + Stats.Misses++; if (Contents.Read(*DebFile) == false) return false; // Write back the control information - InitQuery("cn"); + InitQueryContent(); if (Put(Contents.Data,Contents.CurSize) == true) CurStat.Flags |= FlContents; return true; } /*}}}*/ - +// CacheDB::GetHashes - Get the hashs /*{{{*/ static std::string bytes2hex(uint8_t *bytes, size_t length) { char buf[3]; std::string space; @@ -333,129 +431,63 @@ static void hex2bytes(uint8_t *bytes, const char *hex, int length) { bytes++; } } - -// CacheDB::GetMD5 - Get the MD5 hash /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool CacheDB::GetMD5(bool const &GenOnly) -{ - // Try to read the control information out of the DB. - if ((CurStat.Flags & FlMD5) == FlMD5) - { - if (GenOnly == true) - return true; - - MD5Res = bytes2hex(CurStat.MD5, sizeof(CurStat.MD5)); - return true; - } - - Stats.MD5Bytes += CurStat.FileSize; - - if (Fd == NULL && OpenFile() == false) - { - return false; - } - MD5Summation MD5; - if (Fd->Seek(0) == false || MD5.AddFD(*Fd, CurStat.FileSize) == false) - return false; - - MD5Res = MD5.Result(); - hex2bytes(CurStat.MD5, MD5Res.data(), sizeof(CurStat.MD5)); - CurStat.Flags |= FlMD5; - return true; -} - /*}}}*/ -// CacheDB::GetSHA1 - Get the SHA1 hash /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool CacheDB::GetSHA1(bool const &GenOnly) +bool CacheDB::GetHashes(bool const GenOnly, unsigned int const DoHashes) { - // Try to read the control information out of the DB. - if ((CurStat.Flags & FlSHA1) == FlSHA1) - { - if (GenOnly == true) - return true; + unsigned int FlHashes = DoHashes & (Hashes::MD5SUM | Hashes::SHA1SUM | Hashes::SHA256SUM | Hashes::SHA512SUM); + HashesList.clear(); - SHA1Res = bytes2hex(CurStat.SHA1, sizeof(CurStat.SHA1)); - return true; - } - - Stats.SHA1Bytes += CurStat.FileSize; - - if (Fd == NULL && OpenFile() == false) - { - return false; - } - SHA1Summation SHA1; - if (Fd->Seek(0) == false || SHA1.AddFD(*Fd, CurStat.FileSize) == false) - return false; - - SHA1Res = SHA1.Result(); - hex2bytes(CurStat.SHA1, SHA1Res.data(), sizeof(CurStat.SHA1)); - CurStat.Flags |= FlSHA1; - return true; -} - /*}}}*/ -// CacheDB::GetSHA256 - Get the SHA256 hash /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool CacheDB::GetSHA256(bool const &GenOnly) -{ - // Try to read the control information out of the DB. - if ((CurStat.Flags & FlSHA256) == FlSHA256) + if (FlHashes != 0) { - if (GenOnly == true) - return true; + if (OpenFile() == false) + return false; - SHA256Res = bytes2hex(CurStat.SHA256, sizeof(CurStat.SHA256)); - return true; - } - - Stats.SHA256Bytes += CurStat.FileSize; - - if (Fd == NULL && OpenFile() == false) - { - return false; - } - SHA256Summation SHA256; - if (Fd->Seek(0) == false || SHA256.AddFD(*Fd, CurStat.FileSize) == false) - return false; - - SHA256Res = SHA256.Result(); - hex2bytes(CurStat.SHA256, SHA256Res.data(), sizeof(CurStat.SHA256)); - CurStat.Flags |= FlSHA256; - return true; -} - /*}}}*/ -// CacheDB::GetSHA256 - Get the SHA256 hash /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool CacheDB::GetSHA512(bool const &GenOnly) -{ - // Try to read the control information out of the DB. - if ((CurStat.Flags & FlSHA512) == FlSHA512) - { - if (GenOnly == true) - return true; + Hashes hashes(FlHashes); + if (Fd->Seek(0) == false || hashes.AddFD(*Fd, CurStat.FileSize) == false) + return false; - SHA512Res = bytes2hex(CurStat.SHA512, sizeof(CurStat.SHA512)); - return true; - } - - Stats.SHA512Bytes += CurStat.FileSize; - - if (Fd == NULL && OpenFile() == false) - { - return false; + HashStringList hl = hashes.GetHashStringList(); + for (HashStringList::const_iterator hs = hl.begin(); hs != hl.end(); ++hs) + { + HashesList.push_back(*hs); + if (strcasecmp(hs->HashType().c_str(), "SHA512") == 0) + { + Stats.SHA512Bytes += CurStat.FileSize; + hex2bytes(CurStat.SHA512, hs->HashValue().data(), sizeof(CurStat.SHA512)); + CurStat.Flags |= FlSHA512; + } + else if (strcasecmp(hs->HashType().c_str(), "SHA256") == 0) + { + Stats.SHA256Bytes += CurStat.FileSize; + hex2bytes(CurStat.SHA256, hs->HashValue().data(), sizeof(CurStat.SHA256)); + CurStat.Flags |= FlSHA256; + } + else if (strcasecmp(hs->HashType().c_str(), "SHA1") == 0) + { + Stats.SHA1Bytes += CurStat.FileSize; + hex2bytes(CurStat.SHA1, hs->HashValue().data(), sizeof(CurStat.SHA1)); + CurStat.Flags |= FlSHA1; + } + else if (strcasecmp(hs->HashType().c_str(), "MD5Sum") == 0) + { + Stats.MD5Bytes += CurStat.FileSize; + hex2bytes(CurStat.MD5, hs->HashValue().data(), sizeof(CurStat.MD5)); + CurStat.Flags |= FlMD5; + } + else if (strcasecmp(hs->HashType().c_str(), "Checksum-FileSize") == 0) + { + // we store it in a different field already + } + else + return _error->Error("Got unknown unrequested hashtype %s", hs->HashType().c_str()); + } } - SHA512Summation SHA512; - if (Fd->Seek(0) == false || SHA512.AddFD(*Fd, CurStat.FileSize) == false) - return false; - - SHA512Res = SHA512.Result(); - hex2bytes(CurStat.SHA512, SHA512Res.data(), sizeof(CurStat.SHA512)); - CurStat.Flags |= FlSHA512; - return true; + if (GenOnly == true) + return true; + + return HashesList.push_back(HashString("MD5Sum", bytes2hex(CurStat.MD5, sizeof(CurStat.MD5)))) && + HashesList.push_back(HashString("SHA1", bytes2hex(CurStat.SHA1, sizeof(CurStat.SHA1)))) && + HashesList.push_back(HashString("SHA256", bytes2hex(CurStat.SHA256, sizeof(CurStat.SHA256)))) && + HashesList.push_back(HashString("SHA512", bytes2hex(CurStat.SHA512, sizeof(CurStat.SHA512)))); } /*}}}*/ // CacheDB::Finish - Write back the cache structure /*{{{*/ @@ -467,11 +499,11 @@ bool CacheDB::Finish() if (CurStat.Flags == OldStat.Flags && CurStat.mtime == OldStat.mtime) return true; - + // Write the stat information CurStat.Flags = htonl(CurStat.Flags); CurStat.FileSize = htonl(CurStat.FileSize); - InitQuery("st"); + InitQueryStats(); Put(&CurStat,sizeof(CurStat)); CurStat.Flags = ntohl(CurStat.Flags); CurStat.FileSize = ntohl(CurStat.FileSize); @@ -504,16 +536,24 @@ bool CacheDB::Clean() { if (stringcmp(Colon + 1, (char *)Key.data+Key.size,"st") == 0 || stringcmp(Colon + 1, (char *)Key.data+Key.size,"cl") == 0 || + stringcmp(Colon + 1, (char *)Key.data+Key.size,"cs") == 0 || stringcmp(Colon + 1, (char *)Key.data+Key.size,"cn") == 0) { - if (FileExists(std::string((const char *)Key.data,Colon)) == true) - continue; + std::string FileName = std::string((const char *)Key.data,Colon); + if (FileExists(FileName) == true) { + continue; + } } } - Cursor->c_del(Cursor,0); } - Dbp->compact(Dbp, NULL, NULL, NULL, NULL, DB_FREE_SPACE, NULL); + int res = Dbp->compact(Dbp, NULL, NULL, NULL, NULL, DB_FREE_SPACE, NULL); + if (res < 0) + _error->Warning("compact failed with result %i", res); + + if(_config->FindB("Debug::APT::FTPArchive::Clean", false) == true) + Dbp->stat_print(Dbp, 0); + return true; } diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h index 49b9a0ef5..613963f6f 100644 --- a/ftparchive/cachedb.h +++ b/ftparchive/cachedb.h @@ -12,6 +12,7 @@ #ifndef CACHEDB_H #define CACHEDB_H +#include <apt-pkg/hashes.h> #include <apt-pkg/debfile.h> #include <db.h> @@ -22,9 +23,11 @@ #include <stdio.h> #include "contents.h" +#include "sources.h" class FileFd; + class CacheDB { protected: @@ -39,7 +42,7 @@ class CacheDB std::string DBFile; // Generate a key for the DB of a given type - inline void InitQuery(const char *Type) + void _InitQuery(const char *Type) { memset(&Key,0,sizeof(Key)); memset(&Data,0,sizeof(Data)); @@ -47,6 +50,19 @@ class CacheDB Key.size = snprintf(TmpKey,sizeof(TmpKey),"%s:%s",FileName.c_str(), Type); } + void InitQueryStats() { + _InitQuery("st"); + } + void InitQuerySource() { + _InitQuery("cs"); + } + void InitQueryControl() { + _InitQuery("cl"); + } + void InitQueryContent() { + _InitQuery("cn"); + } + inline bool Get() { return Dbp->get(Dbp,0,&Key,&Data,0) == 0; @@ -65,20 +81,42 @@ class CacheDB return true; } bool OpenFile(); - bool GetFileStat(bool const &doStat = false); + void CloseFile(); + + bool OpenDebFile(); + void CloseDebFile(); + + // GetCurStat needs some compat code, see lp #1274466) + bool GetCurStatCompatOldFormat(); + bool GetCurStatCompatNewFormat(); bool GetCurStat(); + + bool GetFileStat(bool const &doStat = false); bool LoadControl(); bool LoadContents(bool const &GenOnly); - bool GetMD5(bool const &GenOnly); - bool GetSHA1(bool const &GenOnly); - bool GetSHA256(bool const &GenOnly); - bool GetSHA512(bool const &GenOnly); - + bool LoadSource(); + bool GetHashes(bool const GenOnly, unsigned int const DoHashes); + // Stat info stored in the DB, Fixed types since it is written to disk. enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2), - FlSize=(1<<3), FlSHA1=(1<<4), FlSHA256=(1<<5), - FlSHA512=(1<<6)}; + FlSize=(1<<3), FlSHA1=(1<<4), FlSHA256=(1<<5), + FlSHA512=(1<<6), FlSource=(1<<7) + }; + + // the on-disk format changed (FileSize increased to 64bit) in + // commit 650faab0 which will lead to corruption with old caches + struct StatStoreOldFormat + { + uint32_t Flags; + uint32_t mtime; + uint32_t FileSize; + uint8_t MD5[16]; + uint8_t SHA1[20]; + uint8_t SHA256[32]; + } CurStatOldFormat; + // WARNING: this struct is read/written to the DB so do not change the + // layout of the fields (see lp #1274466), only append to it struct StatStore { uint32_t Flags; @@ -101,11 +139,9 @@ class CacheDB // Data collection helpers debDebFile::MemControlExtract Control; ContentsExtract Contents; - std::string MD5Res; - std::string SHA1Res; - std::string SHA256Res; - std::string SHA512Res; - + DscExtract Dsc; + HashStringList HashesList; + // Runtime statistics struct Stats { @@ -132,21 +168,29 @@ class CacheDB SHA512Bytes(0),Packages(0), Misses(0), DeLinkBytes(0) {}; } Stats; - bool ReadyDB(std::string const &DB); + bool ReadyDB(std::string const &DB = ""); inline bool DBFailed() {return Dbp != 0 && DBLoaded == false;}; inline bool Loaded() {return DBLoaded == true;}; inline unsigned long long GetFileSize(void) {return CurStat.FileSize;} bool SetFile(std::string const &FileName,struct stat St,FileFd *Fd); - bool GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents, bool const &GenContentsOnly, - bool const &DoMD5, bool const &DoSHA1, bool const &DoSHA256, bool const &DoSHA512, bool const &checkMtime = false); + + // terrible old overloaded interface + bool GetFileInfo(std::string const &FileName, + bool const &DoControl, + bool const &DoContents, + bool const &GenContentsOnly, + bool const DoSource, + unsigned int const DoHashes, + bool const &checkMtime = false); + bool Finish(); bool Clean(); - CacheDB(std::string const &DB) : Dbp(0), Fd(NULL), DebFile(0) {TmpKey[0]='\0'; ReadyDB(DB);}; - ~CacheDB() {ReadyDB(std::string()); delete DebFile;}; + CacheDB(std::string const &DB); + ~CacheDB(); }; #endif diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc index 7a1fb779e..145f3910e 100644 --- a/ftparchive/contents.cc +++ b/ftparchive/contents.cc @@ -38,6 +38,7 @@ #include <apt-pkg/debfile.h> #include <apt-pkg/dirstream.h> #include <apt-pkg/error.h> +#include <apt-pkg/fileutl.h> #include <stdio.h> #include <stdlib.h> @@ -238,19 +239,19 @@ void GenContents::Add(const char *Dir,const char *Package) // GenContents::WriteSpace - Write a given number of white space chars /*{{{*/ // --------------------------------------------------------------------- /* We mod 8 it and write tabs where possible. */ -void GenContents::WriteSpace(FILE *Out,unsigned int Current,unsigned int Target) +void GenContents::WriteSpace(std::string &out, size_t Current, size_t Target) { if (Target <= Current) Target = Current + 1; - + /* Now we write tabs so long as the next tab stop would not pass the target */ for (; (Current/8 + 1)*8 < Target; Current = (Current/8 + 1)*8) - fputc('\t',Out); + out.append("\t"); // Fill the last bit with spaces for (; Current < Target; Current++) - fputc(' ',Out); + out.append(" "); } /*}}}*/ // GenContents::Print - Display the tree /*{{{*/ @@ -259,13 +260,13 @@ void GenContents::WriteSpace(FILE *Out,unsigned int Current,unsigned int Target) calls itself and runs over each section of the tree printing out the pathname and the hit packages. We use Buf to build the pathname summed over all the directory parents of this node. */ -void GenContents::Print(FILE *Out) +void GenContents::Print(FileFd &Out) { char Buffer[1024]; Buffer[0] = 0; DoPrint(Out,&Root,Buffer); } -void GenContents::DoPrint(FILE *Out,GenContents::Node *Top, char *Buf) +void GenContents::DoPrint(FileFd &Out,GenContents::Node *Top, char *Buf) { if (Top == 0) return; @@ -278,31 +279,43 @@ void GenContents::DoPrint(FILE *Out,GenContents::Node *Top, char *Buf) if (Top->Path != 0) { strcat(Buf,Top->Path); - + // Do not show the item if it is a directory with dups if (Top->Path[strlen(Top->Path)-1] != '/' /*|| Top->Dups == 0*/) { - fputs(Buf,Out); - WriteSpace(Out,strlen(Buf),60); + std::string out = Buf; + WriteSpace(out, out.length(), 60); for (Node *I = Top; I != 0; I = I->Dups) { if (I != Top) - fputc(',',Out); - fputs(I->Package,Out); + out.append(","); + out.append(I->Package); } - fputc('\n',Out); - } - } - + out.append("\n"); + Out.Write(out.c_str(), out.length()); + } + } + // Go along the directory link DoPrint(Out,Top->DirDown,Buf); *OldEnd = 0; - + // Go right DoPrint(Out,Top->BTreeRight,Buf); } /*}}}*/ - +// ContentsExtract Constructor /*{{{*/ +ContentsExtract::ContentsExtract() + : Data(0), MaxSize(0), CurSize(0) +{ +} + /*}}}*/ +// ContentsExtract Destructor /*{{{*/ +ContentsExtract::~ContentsExtract() +{ + free(Data); +} + /*}}}*/ // ContentsExtract::Read - Read the archive /*{{{*/ // --------------------------------------------------------------------- /* */ diff --git a/ftparchive/contents.h b/ftparchive/contents.h index dbbb83350..953d0d54b 100644 --- a/ftparchive/contents.h +++ b/ftparchive/contents.h @@ -17,6 +17,7 @@ #include <string> class debDebFile; +class FileFd; class GenContents { @@ -54,14 +55,14 @@ class GenContents unsigned long NodeLeft; Node *Grab(Node *Top,const char *Name,const char *Package); - void WriteSpace(FILE *Out,unsigned int Current,unsigned int Target); - void DoPrint(FILE *Out,Node *Top, char *Buf); + void WriteSpace(std::string &out, size_t Current, size_t Target); + void DoPrint(FileFd &Out,Node *Top, char *Buf); public: char *Mystrdup(const char *From); void Add(const char *Dir,const char *Package); - void Print(FILE *Out); + void Print(FileFd &Out); GenContents() : BlockList(0), StrPool(0), StrLeft(0), NodePool(0), NodeLeft(0) {}; @@ -85,8 +86,8 @@ class ContentsExtract : public pkgDirStream bool TakeContents(const void *Data,unsigned long long Length); void Add(GenContents &Contents,std::string const &Package); - ContentsExtract() : Data(0), MaxSize(0), CurSize(0) {}; - virtual ~ContentsExtract() {delete [] Data;}; + ContentsExtract(); + virtual ~ContentsExtract(); }; #endif diff --git a/ftparchive/makefile b/ftparchive/makefile index c53ecff72..e67272e1e 100644 --- a/ftparchive/makefile +++ b/ftparchive/makefile @@ -9,10 +9,10 @@ include ../buildlib/defaults.mak ifdef BDBLIB APT_DOMAIN:=apt-utils PROGRAM=apt-ftparchive -SLIBS = -lapt-pkg -lapt-inst $(BDBLIB) $(INTLLIBS) -LIB_MAKES = apt-pkg/makefile apt-inst/makefile +SLIBS = -lapt-pkg -lapt-inst -lapt-private $(BDBLIB) $(INTLLIBS) +LIB_MAKES = apt-pkg/makefile apt-inst/makefile apt-private/makefile SOURCE = apt-ftparchive.cc cachedb.cc writer.cc contents.cc override.cc \ - multicompress.cc + multicompress.cc sources.cc include $(PROGRAM_H) else PROGRAM=apt-ftparchive diff --git a/ftparchive/multicompress.cc b/ftparchive/multicompress.cc index f35d5304a..08a3cff5a 100644 --- a/ftparchive/multicompress.cc +++ b/ftparchive/multicompress.cc @@ -46,7 +46,6 @@ MultiCompress::MultiCompress(string const &Output,string const &Compress, { Outputs = 0; Outputter = -1; - Input = 0; UpdateMTime = 0; /* Parse the compression string, a space separated lists of compresison @@ -187,12 +186,11 @@ bool MultiCompress::Start() }; close(Pipe[0]); - Input = fdopen(Pipe[1],"w"); - if (Input == 0) - return _error->Errno("fdopen",_("Failed to create FILE*")); - + if (Input.OpenDescriptor(Pipe[1], FileFd::WriteOnly, true) == false) + return false; + if (Outputter == -1) - return _error->Errno("fork",_("Failed to fork")); + return _error->Errno("fork",_("Failed to fork")); return true; } /*}}}*/ @@ -201,11 +199,10 @@ bool MultiCompress::Start() /* */ bool MultiCompress::Die() { - if (Input == 0) + if (Input.IsOpen() == false) return true; - - fclose(Input); - Input = 0; + + Input.Close(); bool Res = ExecWait(Outputter,_("Compress child"),false); Outputter = -1; return Res; @@ -217,7 +214,7 @@ bool MultiCompress::Die() bool MultiCompress::Finalize(unsigned long long &OutSize) { OutSize = 0; - if (Input == 0 || Die() == false) + if (Input.IsOpen() == false || Die() == false) return false; time_t Now; diff --git a/ftparchive/multicompress.h b/ftparchive/multicompress.h index ddd1815a3..161716b86 100644 --- a/ftparchive/multicompress.h +++ b/ftparchive/multicompress.h @@ -48,7 +48,7 @@ class MultiCompress public: // The FD to write to for compression. - FILE *Input; + FileFd Input; unsigned long UpdateMTime; bool Finalize(unsigned long long &OutSize); diff --git a/ftparchive/sources.cc b/ftparchive/sources.cc new file mode 100644 index 000000000..ab976b490 --- /dev/null +++ b/ftparchive/sources.cc @@ -0,0 +1,60 @@ +#include <string> +#include <sstream> + +// for memcpy +#include <cstring> + +#include <apt-pkg/error.h> +#include <apt-pkg/gpgv.h> + +#include "sources.h" + +bool DscExtract::TakeDsc(const void *newData, unsigned long long newSize) +{ + if (newSize == 0) + { + // adding two newlines 'off record' for pkgTagSection.Scan() calls + Data = "\n\n"; + Length = 0; + return true; + } + + Data = std::string((const char*)newData, newSize); + // adding two newlines 'off record' for pkgTagSection.Scan() calls + Data.append("\n\n"); + Length = newSize; + + return true; +} + +bool DscExtract::Read(std::string FileName) +{ + Data.clear(); + Length = 0; + + FileFd F; + if (OpenMaybeClearSignedFile(FileName, F) == false) + return false; + + IsClearSigned = (FileName != F.Name()); + + std::ostringstream data; + char buffer[1024]; + do { + unsigned long long actual = 0; + if (F.Read(buffer, sizeof(buffer)-1, &actual) == false) + return _error->Errno("read", "Failed to read dsc file %s", FileName.c_str()); + if (actual == 0) + break; + Length += actual; + buffer[actual] = '\0'; + data << buffer; + } while(true); + + // adding two newlines 'off record' for pkgTagSection.Scan() calls + data << "\n\n"; + Data = data.str(); + return true; +} + + diff --git a/ftparchive/sources.h b/ftparchive/sources.h new file mode 100644 index 000000000..a125ec6a4 --- /dev/null +++ b/ftparchive/sources.h @@ -0,0 +1,24 @@ +#ifndef SOURCES_H +#define SOURCES_H + +#include <apt-pkg/tagfile.h> + +#include <string> + +class DscExtract +{ + public: + std::string Data; + pkgTagSection Section; + unsigned long long Length; + bool IsClearSigned; + + bool TakeDsc(const void *Data, unsigned long long Size); + bool Read(std::string FileName); + + DscExtract() : Length(0), IsClearSigned(false) {}; + ~DscExtract() {}; +}; + + +#endif diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 153c4fb42..7cf7e6efc 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -51,32 +51,38 @@ using namespace std; FTWScanner *FTWScanner::Owner; -// SetTFRewriteData - Helper for setting rewrite lists /*{{{*/ -// --------------------------------------------------------------------- -/* */ -inline void SetTFRewriteData(struct TFRewriteData &tfrd, - const char *tag, - const char *rewrite, - const char *newtag = 0) +// ConfigToDoHashes - which hashes to generate /*{{{*/ +static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag) +{ + if (_config->FindB(Conf, true) == true) + DoHashes |= Flag; + else + DoHashes &= ~Flag; +} +static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf) { - tfrd.Tag = tag; - tfrd.Rewrite = rewrite; - tfrd.NewTag = newtag; + SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM); + SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM); + SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM); + SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM); } /*}}}*/ // FTWScanner::FTWScanner - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -FTWScanner::FTWScanner(string const &Arch): Arch(Arch) +FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch): Arch(Arch), DoHashes(~0) { + if (GivenOutput == NULL) + { + Output = new FileFd; + Output->OpenDescriptor(STDOUT_FILENO, FileFd::WriteOnly, false); + } + else + Output = GivenOutput; ErrorPrinted = false; NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true); - - DoMD5 = _config->FindB("APT::FTPArchive::MD5",true); - DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true); - DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true); - DoSHA512 = _config->FindB("APT::FTPArchive::SHA512",true); + ConfigToDoHashes(DoHashes, "APT::FTPArchive"); } /*}}}*/ // FTWScanner::Scanner - FTW Scanner /*{{{*/ @@ -318,19 +324,15 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, // PackagesWriter::PackagesWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides, +PackagesWriter::PackagesWriter(FileFd * const GivenOutput, string const &DB,string const &Overrides,string const &ExtOverrides, string const &Arch) : - FTWScanner(Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL) + FTWScanner(GivenOutput, Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL) { - Output = stdout; SetExts(".deb .udeb"); DeLinkLimit = 0; // Process the command line options - DoMD5 = _config->FindB("APT::FTPArchive::Packages::MD5",DoMD5); - DoSHA1 = _config->FindB("APT::FTPArchive::Packages::SHA1",DoSHA1); - DoSHA256 = _config->FindB("APT::FTPArchive::Packages::SHA256",DoSHA256); - DoSHA512 = _config->FindB("APT::FTPArchive::Packages::SHA512",DoSHA512); + ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages"); DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false); DoContents = _config->FindB("APT::FTPArchive::Contents",true); NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false); @@ -385,10 +387,14 @@ bool FTWScanner::SetExts(string const &Vals) bool PackagesWriter::DoPackage(string FileName) { // Pull all the data we need form the DB - if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) - == false) + if (Db.GetFileInfo(FileName, + true, /* DoControl */ + DoContents, + true, /* GenContentsOnly */ + false, /* DoSource */ + DoHashes, DoAlwaysStat) == false) { - return false; + return false; } unsigned long long FileSize = Db.GetFileSize(); @@ -426,9 +432,6 @@ bool PackagesWriter::DoPackage(string FileName) OverItem->Priority = Tags.FindS("Priority"); } - char Size[40]; - sprintf(Size,"%llu", (unsigned long long) FileSize); - // Strip the DirStrip prefix from the FileName and add the PathPrefix string NewFileName; if (DirStrip.empty() == false && @@ -450,30 +453,32 @@ bool PackagesWriter::DoPackage(string FileName) } // This lists all the changes to the fields we are going to make. - // (7 hardcoded + maintainer + suggests + end marker) - TFRewriteData Changes[6+2+OverItem->FieldOverride.size()+1+1]; - - unsigned int End = 0; - SetTFRewriteData(Changes[End++], "Size", Size); - if (DoMD5 == true) - SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str()); - if (DoSHA1 == true) - SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str()); - if (DoSHA256 == true) - SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str()); - if (DoSHA512 == true) - SetTFRewriteData(Changes[End++], "SHA512", Db.SHA512Res.c_str()); - SetTFRewriteData(Changes[End++], "Filename", NewFileName.c_str()); - SetTFRewriteData(Changes[End++], "Priority", OverItem->Priority.c_str()); - SetTFRewriteData(Changes[End++], "Status", 0); - SetTFRewriteData(Changes[End++], "Optional", 0); + std::vector<pkgTagSection::Tag> Changes; + + std::string Size; + strprintf(Size, "%llu", (unsigned long long) FileSize); + Changes.push_back(pkgTagSection::Tag::Rewrite("Size", Size)); + + for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs) + { + if (hs->HashType() == "MD5Sum") + Changes.push_back(pkgTagSection::Tag::Rewrite("MD5sum", hs->HashValue())); + else if (hs->HashType() == "Checksum-FileSize") + continue; + else + Changes.push_back(pkgTagSection::Tag::Rewrite(hs->HashType(), hs->HashValue())); + } + Changes.push_back(pkgTagSection::Tag::Rewrite("Filename", NewFileName)); + Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", OverItem->Priority)); + Changes.push_back(pkgTagSection::Tag::Remove("Status")); + Changes.push_back(pkgTagSection::Tag::Remove("Optional")); string DescriptionMd5; if (LongDescription == false) { MD5Summation descmd5; descmd5.Add(desc.c_str()); DescriptionMd5 = descmd5.Result().Value(); - SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str()); + Changes.push_back(pkgTagSection::Tag::Rewrite("Description-md5", DescriptionMd5)); if (TransWriter != NULL) TransWriter->DoPackage(Package, desc, DescriptionMd5); } @@ -488,12 +493,12 @@ bool PackagesWriter::DoPackage(string FileName) NewLine(1); ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str()); - } + } } - + if (NewMaint.empty() == false) - SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str()); - + Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint)); + /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming but dpkg does this append bit. So we do the append bit, at least that way the @@ -504,19 +509,17 @@ bool PackagesWriter::DoPackage(string FileName) { if (Tags.FindS("Suggests").empty() == false) OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr; - SetTFRewriteData(Changes[End++], "Suggests", OptionalStr.c_str()); + Changes.push_back(pkgTagSection::Tag::Rewrite("Suggests", OptionalStr)); } - for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin(); + for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin(); I != OverItem->FieldOverride.end(); ++I) - SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); - - SetTFRewriteData(Changes[End++], 0, 0); + Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second)); // Rewrite and store the fields. - if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes) == false) + if (Tags.Write(*Output, TFRewritePackageOrder, Changes) == false || + Output->Write("\n", 1) == false) return false; - fprintf(Output,"\n"); return Db.Finish(); } @@ -526,14 +529,13 @@ bool PackagesWriter::DoPackage(string FileName) // --------------------------------------------------------------------- /* Create a Translation-Master file for this Packages file */ TranslationWriter::TranslationWriter(string const &File, string const &TransCompress, - mode_t const &Permissions) : Output(NULL), - RefCounter(0) + mode_t const &Permissions) : RefCounter(0) { if (File.empty() == true) return; Comp = new MultiCompress(File, TransCompress, Permissions); - Output = Comp->Input; + Output = &Comp->Input; } /*}}}*/ // TranslationWriter::DoPackage - Process a single package /*{{{*/ @@ -552,8 +554,10 @@ bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc, if (Included.find(Record) != Included.end()) return true; - fprintf(Output, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n", + std::string out; + strprintf(out, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n", Pkg.c_str(), MD5.c_str(), Desc.c_str()); + Output->Write(out.c_str(), out.length()); Included.insert(Record); return true; @@ -574,21 +578,17 @@ TranslationWriter::~TranslationWriter() // SourcesWriter::SourcesWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string const &SOverrides, +SourcesWriter::SourcesWriter(FileFd * const GivenOutput, string const &DB, string const &BOverrides,string const &SOverrides, string const &ExtOverrides) : - Db(DB), Stats(Db.Stats) + FTWScanner(GivenOutput), Db(DB), Stats(Db.Stats) { - Output = stdout; AddPattern("*.dsc"); DeLinkLimit = 0; Buffer = 0; BufSize = 0; // Process the command line options - DoMD5 = _config->FindB("APT::FTPArchive::Sources::MD5",DoMD5); - DoSHA1 = _config->FindB("APT::FTPArchive::Sources::SHA1",DoSHA1); - DoSHA256 = _config->FindB("APT::FTPArchive::Sources::SHA256",DoSHA256); - DoSHA512 = _config->FindB("APT::FTPArchive::Sources::SHA512",DoSHA512); + ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources"); NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false); DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false); @@ -610,63 +610,41 @@ SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string c } /*}}}*/ // SourcesWriter::DoPackage - Process a single package /*{{{*/ -// --------------------------------------------------------------------- -/* */ +static std::string getDscHash(unsigned int const DoHashes, + Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName, + HashString const * const Hash, unsigned long long Size, std::string FileName) +{ + if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL) + return ""; + std::ostringstream out; + out << "\n " << Hash->HashValue() << " " << Size << " " << FileName + << "\n " << Tags.FindS(FieldName); + return out.str(); +} bool SourcesWriter::DoPackage(string FileName) { - // Open the archive - FileFd F; - if (OpenMaybeClearSignedFile(FileName, F) == false) - return false; - - unsigned long long const FSize = F.FileSize(); - //FIXME: do we really need to enforce a maximum size of the dsc file? - if (FSize > 128*1024) - return _error->Error("DSC file '%s' is too large!",FileName.c_str()); - - if (BufSize < FSize + 2) + // Pull all the data we need form the DB + if (Db.GetFileInfo(FileName, + false, /* DoControl */ + false, /* DoContents */ + false, /* GenContentsOnly */ + true, /* DoSource */ + DoHashes, DoAlwaysStat) == false) { - BufSize = FSize + 2; - Buffer = (char *)realloc(Buffer , BufSize); - } - - if (F.Read(Buffer, FSize) == false) return false; - - // Stat the file for later (F might be clearsigned, so not F.FileSize()) - struct stat St; - if (stat(FileName.c_str(), &St) != 0) - return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); - - // Hash the file - char *Start = Buffer; - char *BlkEnd = Buffer + FSize; - - Hashes DscHashes; - if (FSize == (unsigned long long) St.st_size) - { - if (DoMD5 == true) - DscHashes.MD5.Add((unsigned char *)Start,BlkEnd - Start); - if (DoSHA1 == true) - DscHashes.SHA1.Add((unsigned char *)Start,BlkEnd - Start); - if (DoSHA256 == true) - DscHashes.SHA256.Add((unsigned char *)Start,BlkEnd - Start); - if (DoSHA512 == true) - DscHashes.SHA512.Add((unsigned char *)Start,BlkEnd - Start); - } - else - { - FileFd DscFile(FileName, FileFd::ReadOnly); - DscHashes.AddFD(DscFile, St.st_size, DoMD5, DoSHA1, DoSHA256, DoSHA512); } - // Add extra \n to the end, just in case (as in clearsigned they are missing) - *BlkEnd++ = '\n'; - *BlkEnd++ = '\n'; + // we need to perform a "write" here (this is what finish is doing) + // because the call to Db.GetFileInfo() in the loop will change + // the "db cursor" + Db.Finish(); pkgTagSection Tags; - if (Tags.Scan(Start,BlkEnd - Start) == false || Tags.Exists("Source") == false) + if (Tags.Scan(Db.Dsc.Data.c_str(), Db.Dsc.Data.length()) == false) return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str()); + + if (Tags.Exists("Source") == false) + return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str()); Tags.Trim(); // Lookup the overide information, finding first the best priority. @@ -714,6 +692,10 @@ bool SourcesWriter::DoPackage(string FileName) OverItem = auto_ptr<Override::Item>(new Override::Item); } + struct stat St; + if (stat(FileName.c_str(), &St) != 0) + return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); + auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source"))); // const auto_ptr<Override::Item> autoSOverItem(SOverItem); if (SOverItem.get() == 0) @@ -727,29 +709,13 @@ bool SourcesWriter::DoPackage(string FileName) *SOverItem = *OverItem; } } - + // Add the dsc to the files hash list string const strippedName = flNotDir(FileName); - std::ostringstream ostreamFiles; - if (DoMD5 == true && Tags.Exists("Files")) - ostreamFiles << "\n " << string(DscHashes.MD5.Result()) << " " << St.st_size << " " - << strippedName << "\n " << Tags.FindS("Files"); - string const Files = ostreamFiles.str(); - - std::ostringstream ostreamSha1; - if (DoSHA1 == true && Tags.Exists("Checksums-Sha1")) - ostreamSha1 << "\n " << string(DscHashes.SHA1.Result()) << " " << St.st_size << " " - << strippedName << "\n " << Tags.FindS("Checksums-Sha1"); - - std::ostringstream ostreamSha256; - if (DoSHA256 == true && Tags.Exists("Checksums-Sha256")) - ostreamSha256 << "\n " << string(DscHashes.SHA256.Result()) << " " << St.st_size << " " - << strippedName << "\n " << Tags.FindS("Checksums-Sha256"); - - std::ostringstream ostreamSha512; - if (DoSHA512 == true && Tags.Exists("Checksums-Sha512")) - ostreamSha512 << "\n " << string(DscHashes.SHA512.Result()) << " " << St.st_size << " " - << strippedName << "\n " << Tags.FindS("Checksums-Sha512"); + std::string const Files = getDscHash(DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName); + std::string ChecksumsSha1 = getDscHash(DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName); + std::string ChecksumsSha256 = getDscHash(DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName); + std::string ChecksumsSha512 = getDscHash(DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName); // Strip the DirStrip prefix from the FileName and add the PathPrefix string NewFileName; @@ -771,7 +737,7 @@ bool SourcesWriter::DoPackage(string FileName) char *RealPath = NULL; for (;isspace(*C); C++); while (*C != 0) - { + { // Parse each of the elements if (ParseQuoteWord(C,ParseJnk) == false || ParseQuoteWord(C,ParseJnk) == false || @@ -781,27 +747,56 @@ bool SourcesWriter::DoPackage(string FileName) string OriginalPath = Directory + ParseJnk; // Add missing hashes to source files - if ((DoSHA1 == true && !Tags.Exists("Checksums-Sha1")) || - (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) || - (DoSHA512 == true && !Tags.Exists("Checksums-Sha512"))) + if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) || + ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) || + ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512"))) { - if (Db.GetFileInfo(OriginalPath, false, false, false, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) - == false) + if (Db.GetFileInfo(OriginalPath, + false, /* DoControl */ + false, /* DoContents */ + false, /* GenContentsOnly */ + false, /* DoSource */ + DoHashes, + DoAlwaysStat) == false) { return _error->Error("Error getting file info"); } - if (DoSHA1 == true && !Tags.Exists("Checksums-Sha1")) - ostreamSha1 << "\n " << string(Db.SHA1Res) << " " - << Db.GetFileSize() << " " << ParseJnk; - - if (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) - ostreamSha256 << "\n " << string(Db.SHA256Res) << " " - << Db.GetFileSize() << " " << ParseJnk; + for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs) + { + if (hs->HashType() == "MD5Sum" || hs->HashType() == "Checksum-FileSize") + continue; + char const * fieldname; + std::string * out; + if (hs->HashType() == "SHA1") + { + fieldname = "Checksums-Sha1"; + out = &ChecksumsSha1; + } + else if (hs->HashType() == "SHA256") + { + fieldname = "Checksums-Sha256"; + out = &ChecksumsSha256; + } + else if (hs->HashType() == "SHA512") + { + fieldname = "Checksums-Sha512"; + out = &ChecksumsSha512; + } + else + { + _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str()); + continue; + } + if (Tags.Exists(fieldname) == true) + continue; + std::ostringstream streamout; + streamout << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk; + out->append(streamout.str()); + } - if (DoSHA512 == true && !Tags.Exists("Checksums-Sha512")) - ostreamSha512 << "\n " << string(Db.SHA512Res) << " " - << Db.GetFileSize() << " " << ParseJnk; + // write back the GetFileInfo() stats data + Db.Finish(); } // Perform the delinking operation @@ -821,70 +816,63 @@ bool SourcesWriter::DoPackage(string FileName) if (Directory.length() > 2) Directory.erase(Directory.end()-1); - string const ChecksumsSha1 = ostreamSha1.str(); - string const ChecksumsSha256 = ostreamSha256.str(); - string const ChecksumsSha512 = ostreamSha512.str(); - // This lists all the changes to the fields we are going to make. // (5 hardcoded + checksums + maintainer + end marker) - TFRewriteData Changes[5+2+1+SOverItem->FieldOverride.size()+1]; + std::vector<pkgTagSection::Tag> Changes; - unsigned int End = 0; - SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package"); + Changes.push_back(pkgTagSection::Tag::Remove("Source")); + Changes.push_back(pkgTagSection::Tag::Rewrite("Package", Package)); if (Files.empty() == false) - SetTFRewriteData(Changes[End++],"Files",Files.c_str()); + Changes.push_back(pkgTagSection::Tag::Rewrite("Files", Files)); if (ChecksumsSha1.empty() == false) - SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1.c_str()); + Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha1", ChecksumsSha1)); if (ChecksumsSha256.empty() == false) - SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256.c_str()); + Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha256", ChecksumsSha256)); if (ChecksumsSha512.empty() == false) - SetTFRewriteData(Changes[End++],"Checksums-Sha512",ChecksumsSha512.c_str()); + Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha512", ChecksumsSha512)); if (Directory != "./") - SetTFRewriteData(Changes[End++],"Directory",Directory.c_str()); - SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str()); - SetTFRewriteData(Changes[End++],"Status",0); + Changes.push_back(pkgTagSection::Tag::Rewrite("Directory", Directory)); + Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", BestPrio)); + Changes.push_back(pkgTagSection::Tag::Remove("Status")); // Rewrite the maintainer field if necessary bool MaintFailed; - string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed); + string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"), MaintFailed); if (MaintFailed == true) { if (NoOverride == false) { - NewLine(1); + NewLine(1); ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str()); - } + } } if (NewMaint.empty() == false) - SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str()); - - for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin(); + Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint.c_str())); + + for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin(); I != SOverItem->FieldOverride.end(); ++I) - SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); + Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second)); - SetTFRewriteData(Changes[End++], 0, 0); - // Rewrite and store the fields. - if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes) == false) + if (Tags.Write(*Output, TFRewriteSourceOrder, Changes) == false || + Output->Write("\n", 1) == false) return false; - fprintf(Output,"\n"); Stats.Packages++; - return Db.Finish(); + return true; } /*}}}*/ // ContentsWriter::ContentsWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -ContentsWriter::ContentsWriter(string const &DB, string const &Arch) : - FTWScanner(Arch), Db(DB), Stats(Db.Stats) +ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB, string const &Arch) : + FTWScanner(GivenOutput, Arch), Db(DB), Stats(Db.Stats) { SetExts(".deb"); - Output = stdout; } /*}}}*/ // ContentsWriter::DoPackage - Process a single package /*{{{*/ @@ -893,7 +881,13 @@ ContentsWriter::ContentsWriter(string const &DB, string const &Arch) : determine what the package name is. */ bool ContentsWriter::DoPackage(string FileName, string Package) { - if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false)) + if (!Db.GetFileInfo(FileName, + Package.empty(), /* DoControl */ + true, /* DoContents */ + false, /* GenContentsOnly */ + false, /* DoSource */ + 0, /* DoHashes */ + false /* checkMtime */)) { return false; } @@ -960,7 +954,7 @@ bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompres // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -ReleaseWriter::ReleaseWriter(string const &/*DB*/) +ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) : FTWScanner(GivenOutput) { if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true) { @@ -982,7 +976,6 @@ ReleaseWriter::ReleaseWriter(string const &/*DB*/) } AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns")); - Output = stdout; time_t const now = time(NULL); setlocale(LC_TIME, "C"); @@ -1026,12 +1019,11 @@ ReleaseWriter::ReleaseWriter(string const &/*DB*/) if (Value == "") continue; - fprintf(Output, "%s: %s\n", (*I).first.c_str(), Value.c_str()); + std::string const out = I->first + ": " + Value + "\n"; + Output->Write(out.c_str(), out.length()); } - DoMD5 = _config->FindB("APT::FTPArchive::Release::MD5",DoMD5); - DoSHA1 = _config->FindB("APT::FTPArchive::Release::SHA1",DoSHA1); - DoSHA256 = _config->FindB("APT::FTPArchive::Release::SHA256",DoSHA256); + ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release"); } /*}}}*/ // ReleaseWriter::DoPackage - Process a single package /*{{{*/ @@ -1064,16 +1056,9 @@ bool ReleaseWriter::DoPackage(string FileName) CheckSums[NewFileName].size = fd.Size(); - Hashes hs; - hs.AddFD(fd, 0, DoMD5, DoSHA1, DoSHA256, DoSHA512); - if (DoMD5 == true) - CheckSums[NewFileName].MD5 = hs.MD5.Result(); - if (DoSHA1 == true) - CheckSums[NewFileName].SHA1 = hs.SHA1.Result(); - if (DoSHA256 == true) - CheckSums[NewFileName].SHA256 = hs.SHA256.Result(); - if (DoSHA512 == true) - CheckSums[NewFileName].SHA512 = hs.SHA512.Result(); + Hashes hs(DoHashes); + hs.AddFD(fd); + CheckSums[NewFileName].Hashes = hs.GetHashStringList(); fd.Close(); return true; @@ -1082,54 +1067,35 @@ bool ReleaseWriter::DoPackage(string FileName) /*}}}*/ // ReleaseWriter::Finish - Output the checksums /*{{{*/ // --------------------------------------------------------------------- -void ReleaseWriter::Finish() +static void printChecksumTypeRecord(FileFd &Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums) { - if (DoMD5 == true) - { - fprintf(Output, "MD5Sum:\n"); - for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); - I != CheckSums.end(); ++I) - { - fprintf(Output, " %s %16llu %s\n", - (*I).second.MD5.c_str(), - (*I).second.size, - (*I).first.c_str()); - } - } - if (DoSHA1 == true) - { - fprintf(Output, "SHA1:\n"); - for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); - I != CheckSums.end(); ++I) - { - fprintf(Output, " %s %16llu %s\n", - (*I).second.SHA1.c_str(), - (*I).second.size, - (*I).first.c_str()); - } - } - if (DoSHA256 == true) { - fprintf(Output, "SHA256:\n"); - for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); - I != CheckSums.end(); ++I) - { - fprintf(Output, " %s %16llu %s\n", - (*I).second.SHA256.c_str(), - (*I).second.size, - (*I).first.c_str()); - } + std::string out; + strprintf(out, "%s:\n", Type); + Output.Write(out.c_str(), out.length()); } - - fprintf(Output, "SHA512:\n"); - for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); - I != CheckSums.end(); - ++I) + for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin(); + I != CheckSums.end(); ++I) { - fprintf(Output, " %s %16llu %s\n", - (*I).second.SHA512.c_str(), - (*I).second.size, - (*I).first.c_str()); + HashString const * const hs = I->second.Hashes.find(Type); + if (hs == NULL) + continue; + std::string out; + strprintf(out, " %s %16llu %s\n", + hs->HashValue().c_str(), + (*I).second.size, + (*I).first.c_str()); + Output.Write(out.c_str(), out.length()); } - +} +void ReleaseWriter::Finish() +{ + if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM) + printChecksumTypeRecord(*Output, "MD5Sum", CheckSums); + if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM) + printChecksumTypeRecord(*Output, "SHA1", CheckSums); + if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM) + printChecksumTypeRecord(*Output, "SHA256", CheckSums); + if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM) + printChecksumTypeRecord(*Output, "SHA512", CheckSums); } diff --git a/ftparchive/writer.h b/ftparchive/writer.h index 86884dcfc..0ba60db5e 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -13,6 +13,8 @@ #ifndef WRITER_H #define WRITER_H +#include <apt-pkg/hashes.h> + #include <string> #include <stdio.h> #include <iostream> @@ -40,10 +42,10 @@ class FTWScanner string Arch; const char *OriginalPath; bool ErrorPrinted; - + // Stuff for the delinker bool NoLinkAct; - + static FTWScanner *Owner; static int ScannerFTW(const char *File,const struct stat *sb,int Flag); static int ScannerFile(const char *File, bool const &ReadLink); @@ -57,14 +59,12 @@ class FTWScanner { c1out << endl; ErrorPrinted = true; - } + } } - + public: - bool DoMD5; - bool DoSHA1; - bool DoSHA256; - bool DoSHA512; + FileFd *Output; + unsigned int DoHashes; unsigned long DeLinkLimit; string InternalPrefix; @@ -77,8 +77,8 @@ class FTWScanner void AddPattern(char const *Pattern) { Patterns.push_back(Pattern); }; void AddPatterns(std::vector<std::string> const &patterns) { Patterns.insert(Patterns.end(), patterns.begin(), patterns.end()); }; bool SetExts(string const &Vals); - - FTWScanner(string const &Arch = string()); + + FTWScanner(FileFd * const Output, string const &Arch = string()); virtual ~FTWScanner() {}; }; @@ -87,9 +87,9 @@ class MultiCompress; class TranslationWriter { MultiCompress *Comp; - FILE *Output; std::set<string> Included; unsigned short RefCounter; + FileFd *Output; public: void IncreaseRefCounter() { ++RefCounter; }; @@ -98,7 +98,7 @@ class TranslationWriter bool DoPackage(string const &Pkg, string const &Desc, string const &MD5); TranslationWriter(string const &File, string const &TransCompress, mode_t const &Permissions); - TranslationWriter() : Comp(NULL), Output(NULL), RefCounter(0) {}; + TranslationWriter() : Comp(NULL), RefCounter(0) {}; ~TranslationWriter(); }; @@ -106,7 +106,7 @@ class PackagesWriter : public FTWScanner { Override Over; CacheDB Db; - + public: // Some flags @@ -118,7 +118,6 @@ class PackagesWriter : public FTWScanner // General options string PathPrefix; string DirStrip; - FILE *Output; struct CacheDB::Stats &Stats; TranslationWriter *TransWriter; @@ -127,33 +126,34 @@ class PackagesWriter : public FTWScanner {return Over.ReadExtraOverride(File);}; virtual bool DoPackage(string FileName); - PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides=string(), - string const &Arch=string()); + PackagesWriter(FileFd * const Output, string const &DB, + string const &Overrides, + string const &ExtOverrides = "", + string const &Arch = ""); virtual ~PackagesWriter() {}; }; class ContentsWriter : public FTWScanner { CacheDB Db; - + GenContents Gen; - + public: // General options - FILE *Output; struct CacheDB::Stats &Stats; string Prefix; - + bool DoPackage(string FileName,string Package); virtual bool DoPackage(string FileName) {return DoPackage(FileName,string());}; bool ReadFromPkgs(string const &PkgFile,string const &PkgCompress); - void Finish() {Gen.Print(Output);}; + void Finish() {Gen.Print(*Output);}; inline bool ReadyDB(string const &DB) {return Db.ReadyDB(DB);}; - - ContentsWriter(string const &DB, string const &Arch = string()); + + ContentsWriter(FileFd * const Output, string const &DB, string const &Arch = string()); virtual ~ContentsWriter() {}; }; @@ -164,21 +164,20 @@ class SourcesWriter : public FTWScanner Override SOver; char *Buffer; unsigned long long BufSize; - + public: bool NoOverride; bool DoAlwaysStat; - + // General options string PathPrefix; string DirStrip; - FILE *Output; - struct CacheDB::Stats Stats; + struct CacheDB::Stats &Stats; virtual bool DoPackage(string FileName); - SourcesWriter(string const &DB,string const &BOverrides,string const &SOverrides, + SourcesWriter(FileFd * const Output, string const &DB,string const &BOverrides,string const &SOverrides, string const &ExtOverrides=string()); virtual ~SourcesWriter() {free(Buffer);}; }; @@ -186,26 +185,22 @@ class SourcesWriter : public FTWScanner class ReleaseWriter : public FTWScanner { public: - ReleaseWriter(string const &DB); + ReleaseWriter(FileFd * const Output, string const &DB); virtual bool DoPackage(string FileName); void Finish(); - FILE *Output; // General options string PathPrefix; string DirStrip; -protected: struct CheckSum { - string MD5; - string SHA1; - string SHA256; - string SHA512; + HashStringList Hashes; // Limited by FileFd::Size() unsigned long long size; ~CheckSum() {}; }; +protected: map<string,struct CheckSum> CheckSums; }; |