diff options
author | Julian Andres Klode <juliank@ubuntu.com> | 2018-04-09 15:32:09 +0200 |
---|---|---|
committer | Julian Andres Klode <julian.klode@canonical.com> | 2019-01-18 16:32:45 +0100 |
commit | 03af77d4ca60a21f3dca1ab10ef2ba17ec2f96c9 (patch) | |
tree | 7867cfa7a2ead40aeb5f9020d0e0f1b8c56719b1 /ftparchive | |
parent | e4ad2101c39020f18ccd8bb522eeb6b5dead0e5d (diff) |
Import Debian version 1.0.1ubuntu2.18
apt (1.0.1ubuntu2.18) trusty; urgency=medium
* ExecFork: Use /proc/self/fd to determine which files to close
(Closes: #764204) (LP: #1332440).
apt (1.0.1ubuntu2.17) trusty-security; urgency=high
* SECURITY UPDATE: gpgv: Check for errors when splitting files (CVE-2016-1252)
Thanks to Jann Horn, Google Project Zero for reporting the issue
(LP: #1647467)
apt (1.0.1ubuntu2.15) trusty; urgency=medium
* Fixes failure to download the Package index file when using
mirror:// URL in sources.list and the archive fails to profile
a file. APT would try the next archive in the list for .deb
packages but did not retry when the index file failed to download.
(LP: #1625667)
apt (1.0.1ubuntu2.14) trusty; urgency=medium
* When using the https transport mechanism, $no_proxy is ignored if apt is
getting it's proxy information from $https_proxy (as opposed to
Acquire::https::Proxy somewhere in apt config). If the source of proxy
information is Acquire::https::Proxy set in apt.conf (or apt.conf.d),
then $no_proxy is honored. This patch makes the behavior similar for
both methods of setting the proxy. (LP: #1575877)
apt (1.0.1ubuntu2.13) trusty; urgency=medium
* Recheck Pre-Depends satisfaction in SmartConfigure, to avoid unconfigured
Pre-Depends (which dpkg later fails on). Fixes upgrade failures of
systemd, util-linux, and other packages with Pre-Depends. Many thanks to
David Kalnischkies for figuring out the patch and Winfried PLappert for
testing! Patch taken from Debian git. (LP: #1560797)
apt (1.0.1ubuntu2.12) trusty; urgency=medium
[ Colin Watson ]
* Fix lzma write support to handle "try again" case (closes: #751688,
LP: #1553770).
[ David Kalnischkies ]
* Handle moved mmap after UniqFindTagWrite call (closes: #753941,
LP: #1445436).
apt (1.0.1ubuntu2.11) trusty; urgency=medium
* apt-pkg/packagemanager.cc:
- fix incorrect configure ordering in the SmartConfigure step by skipping
packages that do not need immediate action. (LP: #1347721, #1497688)
apt (1.0.1ubuntu2.10) trusty; urgency=medium
* Fix regression from the previous upload by ensuring we're actually
testing for the right member before iterating on it (LP: #1480592)
apt (1.0.1ubuntu2.9) trusty; urgency=medium
* Fix regression in the Never-MarkAuto-Sections feature caused by the
previous auto-removal fix, with inspiration drawn from the patches
and conversation from http://bugs.debian.org/793360 (LP: #1479207)
apt (1.0.1ubuntu2.8) trusty-proposed; urgency=low
* fix crash for packages that have no section in their instVersion
(LP: #1449394)
apt (1.0.1ubuntu2.7) trusty-proposed; urgency=low
* fix auto-removal behavior (thanks to Adam Conrad)
LP: #1429041
apt (1.0.1ubuntu2.6) trusty-proposed; urgency=medium
* apt-pkg/deb/dpkgpm.cc:
- update string matching for dpkg I/O errors. (LP: #1363257)
- properly parse the dpkg status line so that package name is properly set
and an apport report is created. Thanks to Anders Kaseorg for the patch.
(LP: #1353171)
apt (1.0.1ubuntu2.5) trusty-security; urgency=low
* SECURITY UPDATE:
- cmdline/apt-get.cc: fix insecure tempfile handling in
apt-get changelog (CVE-2014-7206). Thanks to Guillem Jover
apt (1.0.1ubuntu2.4.1) trusty-security; urgency=low
* SECURITY UPDATE:
- fix potential buffer overflow, thanks to the
Google Security Team (CVE-2014-6273)
* Fix regression from the previous upload when file:/// sources
are used and those are on a different partition than
the apt state directory
* Fix regression when Dir::state::lists is set to a relative path
* Fix regression when cdrom: sources got rewriten by apt-cdrom add
apt (1.0.1ubuntu2.3) trusty-security; urgency=low
* SECURITY UPDATE:
- incorrect invalidating of unauthenticated data (CVE-2014-0488)
- incorect verification of 304 reply (CVE-2014-0487)
- incorrect verification of Acquire::Gzip indexes (CVE-2014-0489)
Diffstat (limited to 'ftparchive')
-rw-r--r-- | ftparchive/apt-ftparchive.cc | 52 | ||||
-rw-r--r-- | ftparchive/cachedb.cc | 363 | ||||
-rw-r--r-- | ftparchive/cachedb.h | 63 | ||||
-rw-r--r-- | ftparchive/makefile | 2 | ||||
-rw-r--r-- | ftparchive/sources.cc | 47 | ||||
-rw-r--r-- | ftparchive/sources.h | 32 | ||||
-rw-r--r-- | ftparchive/writer.cc | 107 | ||||
-rw-r--r-- | ftparchive/writer.h | 2 |
8 files changed, 199 insertions, 469 deletions
diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index ba71ee225..692f19e25 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -62,7 +62,6 @@ struct PackageMap // Stuff for the Package File string PkgFile; string BinCacheDB; - string SrcCacheDB; string BinOverride; string ExtraOverride; @@ -107,12 +106,6 @@ struct PackageMap inline bool operator() (const PackageMap &x,const PackageMap &y) {return x.BinCacheDB < y.BinCacheDB;}; }; - - struct SrcDBCompare : public binary_function<PackageMap,PackageMap,bool> - { - inline bool operator() (const PackageMap &x,const PackageMap &y) - {return x.SrcCacheDB < y.SrcCacheDB;}; - }; void GetGeneral(Configuration &Setup,Configuration &Block); bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats); @@ -239,14 +232,11 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) gettimeofday(&NewTime,0); double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - + c0out << Packages.Stats.Packages << " files " << /* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */ SizeToStr(Packages.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; - - if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) - c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; Stats.Add(Packages.Stats); Stats.DeLinkBytes = Packages.Stats.DeLinkBytes; @@ -273,7 +263,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) SrcDone = true; // Create a package writer object. - SourcesWriter Sources(flCombine(CacheDir, SrcCacheDB), + SourcesWriter Sources(_config->Find("APT::FTPArchive::DB"), flCombine(OverrideDir,BinOverride), flCombine(OverrideDir,SrcOverride), flCombine(OverrideDir,SrcExtraOverride)); @@ -333,9 +323,6 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) c0out << Sources.Stats.Packages << " pkgs in " << TimeToStr((long)Delta) << endl; - if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) - c0out << " Misses in Cache: " << Sources.Stats.Misses << endl; - Stats.Add(Sources.Stats); Stats.DeLinkBytes = Sources.Stats.DeLinkBytes; @@ -448,9 +435,6 @@ bool PackageMap::GenContents(Configuration &Setup, double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) - c0out << " Misses in Cache: " << Contents.Stats.Misses<< endl; - c0out << Contents.Stats.Packages << " files " << SizeToStr(Contents.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; @@ -481,8 +465,6 @@ static void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup) string DContentsH = Setup.Find("TreeDefault::Contents::Header",""); string DBCache = Setup.Find("TreeDefault::BinCacheDB", "packages-$(ARCH).db"); - string SrcDBCache = Setup.Find("TreeDefault::SrcCacheDB", - "sources-$(SECTION).db"); string DSources = Setup.Find("TreeDefault::Sources", "$(DIST)/$(SECTION)/source/Sources"); string DFLFile = Setup.Find("TreeDefault::FileList", ""); @@ -542,7 +524,6 @@ static void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup) Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars); Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars); Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars); - Itm.SrcCacheDB = SubstVar(Block.Find("SrcCacheDB",SrcDBCache.c_str()),Vars); } else { @@ -592,7 +573,6 @@ static void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup) Itm.PkgFile = Block.Find("Packages"); Itm.SrcFile = Block.Find("Sources"); Itm.BinCacheDB = Block.Find("BinCacheDB"); - Itm.SrcCacheDB = Block.Find("SrcCacheDB"); Itm.BinOverride = Block.Find("BinOverride"); Itm.ExtraOverride = Block.Find("ExtraOverride"); Itm.SrcExtraOverride = Block.Find("SrcExtraOverride"); @@ -690,10 +670,6 @@ static bool SimpleGenPackages(CommandLine &CmdL) if (Packages.RecursiveScan(CmdL.FileList[1]) == false) return false; - // Give some stats if asked for - if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) - c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; - return true; } /*}}}*/ @@ -750,10 +726,6 @@ static bool SimpleGenSources(CommandLine &CmdL) if (Sources.RecursiveScan(CmdL.FileList[1]) == false) return false; - // Give some stats if asked for - if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) - c0out << " Misses in Cache: " << Sources.Stats.Misses<< endl; - return true; } /*}}}*/ @@ -805,7 +777,6 @@ static bool Generate(CommandLine &CmdL) // Sort by cache DB to improve IO locality. stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); - stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); // Generate packages if (CmdL.FileSize() <= 2) @@ -965,33 +936,20 @@ static bool Clean(CommandLine &CmdL) // Sort by cache DB to improve IO locality. stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); - stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); string CacheDir = Setup.FindDir("Dir::CacheDir"); for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ) { - if(I->BinCacheDB != "") - c0out << I->BinCacheDB << endl; - if(I->SrcCacheDB != "") - c0out << I->SrcCacheDB << endl; + c0out << I->BinCacheDB << endl; CacheDB DB(flCombine(CacheDir,I->BinCacheDB)); - CacheDB DB_SRC(flCombine(CacheDir,I->SrcCacheDB)); if (DB.Clean() == false) _error->DumpErrors(); - if (DB_SRC.Clean() == false) - _error->DumpErrors(); string CacheDB = I->BinCacheDB; - string SrcCacheDB = I->SrcCacheDB; - while(I != PkgList.end() && - I->BinCacheDB == CacheDB && - I->SrcCacheDB == SrcCacheDB) - ++I; - + for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I); } - - + return true; } /*}}}*/ diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index c3a4adcbc..523c6b5fa 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -20,7 +20,6 @@ #include <apt-pkg/configuration.h> #include <apt-pkg/fileutl.h> #include <apt-pkg/debfile.h> -#include <apt-pkg/gpgv.h> #include <netinet/in.h> // htonl, etc #include <ctype.h> @@ -86,7 +85,7 @@ bool CacheDB::ReadyDB(std::string const &DB) return _error->Error(_("Unable to open DB file %s: %s"),DB.c_str(), db_strerror(err)); } } - + DBFile = DB; DBLoaded = true; return true; @@ -97,54 +96,14 @@ bool CacheDB::ReadyDB(std::string const &DB) /* */ bool CacheDB::OpenFile() { - // always close existing file first - CloseFile(); - - // open a new file - Fd = new FileFd(FileName,FileFd::ReadOnly); - if (_error->PendingError() == true) - { - CloseFile(); - return false; - } - return true; -} - /*}}}*/ -// CacheDB::CloseFile - Close the file /*{{{*/ -void CacheDB::CloseFile() -{ - if(Fd != NULL) - { - delete Fd; - Fd = NULL; - } -} - /*}}}*/ -// CacheDB::OpenDebFile - Open a debfile /*{{{*/ -bool CacheDB::OpenDebFile() -{ - // always close existing file first - CloseDebFile(); - - // first open the fd, then pass it to the debDebFile - if(OpenFile() == false) - return false; - DebFile = new debDebFile(*Fd); - if (_error->PendingError() == true) - return false; - return true; -} - /*}}}*/ -// CacheDB::CloseDebFile - Close a debfile again /*{{{*/ -void CacheDB::CloseDebFile() -{ - CloseFile(); - - if(DebFile != NULL) - { - delete DebFile; - DebFile = NULL; - } + Fd = new FileFd(FileName,FileFd::ReadOnly); + if (_error->PendingError() == true) + { + delete Fd; + Fd = NULL; + return false; + } + return true; } /*}}}*/ // CacheDB::GetFileStat - Get stats from the file /*{{{*/ @@ -153,65 +112,29 @@ void CacheDB::CloseDebFile() * to look at the file, also get the mtime from the file. */ bool CacheDB::GetFileStat(bool const &doStat) { - if ((CurStat.Flags & FlSize) == FlSize && doStat == false) - return true; - - /* Get it from the file. */ - if (OpenFile() == false) - return false; - - // Stat the file - struct stat St; - if (fstat(Fd->Fd(),&St) != 0) - { - CloseFile(); - return _error->Errno("fstat", - _("Failed to stat %s"),FileName.c_str()); - } - CurStat.FileSize = St.st_size; - CurStat.mtime = htonl(St.st_mtime); - CurStat.Flags |= FlSize; - - return true; -} - /*}}}*/ -// CacheDB::GetCurStatCompatOldFormat /*{{{*/ -// --------------------------------------------------------------------- -/* Read the old (32bit FileSize) StateStore format from disk */ -bool CacheDB::GetCurStatCompatOldFormat() -{ - InitQueryStats(); - Data.data = &CurStatOldFormat; - Data.flags = DB_DBT_USERMEM; - Data.ulen = sizeof(CurStatOldFormat); - if (Get() == false) - { - CurStat.Flags = 0; - } else { - CurStat.Flags = CurStatOldFormat.Flags; - CurStat.mtime = CurStatOldFormat.mtime; - CurStat.FileSize = CurStatOldFormat.FileSize; - memcpy(CurStat.MD5, CurStatOldFormat.MD5, sizeof(CurStat.MD5)); - memcpy(CurStat.SHA1, CurStatOldFormat.SHA1, sizeof(CurStat.SHA1)); - memcpy(CurStat.SHA256, CurStatOldFormat.SHA256, sizeof(CurStat.SHA256)); - } - return true; -} - /*}}}*/ -// CacheDB::GetCurStatCompatOldFormat /*{{{*/ -// --------------------------------------------------------------------- -/* Read the new (64bit FileSize) StateStore format from disk */ -bool CacheDB::GetCurStatCompatNewFormat() -{ - InitQueryStats(); - Data.data = &CurStat; - Data.flags = DB_DBT_USERMEM; - Data.ulen = sizeof(CurStat); - if (Get() == false) - { - CurStat.Flags = 0; - } - return true; + if ((CurStat.Flags & FlSize) == FlSize && doStat == false) + { + /* Already worked out the file size */ + } + else + { + /* Get it from the file. */ + if (Fd == NULL && OpenFile() == false) + { + return false; + } + // Stat the file + struct stat St; + if (fstat(Fd->Fd(),&St) != 0) + { + return _error->Errno("fstat", + _("Failed to stat %s"),FileName.c_str()); + } + CurStat.FileSize = St.st_size; + CurStat.mtime = htonl(St.st_mtime); + CurStat.Flags |= FlSize; + } + return true; } /*}}}*/ // CacheDB::GetCurStat - Set the CurStat variable. /*{{{*/ @@ -222,110 +145,79 @@ bool CacheDB::GetCurStat() { memset(&CurStat,0,sizeof(CurStat)); - if (DBLoaded) - { - // do a first query to just get the size of the data on disk - InitQueryStats(); - Data.data = &CurStat; - Data.flags = DB_DBT_USERMEM; - Data.ulen = 0; - Get(); - - if (Data.size == 0) - { - // nothing needs to be done, we just have not data for this deb - } - // check if the record is written in the old format (32bit filesize) - else if(Data.size == sizeof(CurStatOldFormat)) - { - GetCurStatCompatOldFormat(); - } - else if(Data.size == sizeof(CurStat)) + if (DBLoaded) + { + /* First see if there is anything about it + in the database */ + + /* Get the flags (and mtime) */ + InitQuery("st"); + // Ensure alignment of the returned structure + Data.data = &CurStat; + Data.ulen = sizeof(CurStat); + Data.flags = DB_DBT_USERMEM; + if (Get() == false) { - GetCurStatCompatNewFormat(); - } else { - return _error->Error("Cache record size mismatch (%ul)", Data.size); - } - - CurStat.Flags = ntohl(CurStat.Flags); - CurStat.FileSize = ntohl(CurStat.FileSize); + CurStat.Flags = 0; + } + CurStat.Flags = ntohl(CurStat.Flags); + CurStat.FileSize = ntohl(CurStat.FileSize); } - return true; + return true; } /*}}}*/ // CacheDB::GetFileInfo - Get all the info about the file /*{{{*/ // --------------------------------------------------------------------- -bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl, - bool const &DoContents, - bool const &GenContentsOnly, - bool const &DoSource, - bool const &DoMD5, bool const &DoSHA1, - bool const &DoSHA256, bool const &DoSHA512, +bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents, + bool const &GenContentsOnly, bool const &DoMD5, bool const &DoSHA1, + bool const &DoSHA256, bool const &DoSHA512, bool const &checkMtime) { - bool result = true; - this->FileName = FileName; - - if (GetCurStat() == false) - return false; - OldStat = CurStat; - - if (GetFileStat(checkMtime) == false) - return false; - - /* if mtime changed, update CurStat from disk */ - if (checkMtime == true && OldStat.mtime != CurStat.mtime) - CurStat.Flags = FlSize; - - Stats.Bytes += CurStat.FileSize; - Stats.Packages++; - - if ((DoControl && LoadControl() == false) - || (DoContents && LoadContents(GenContentsOnly) == false) - || (DoSource && LoadSource() == false) - || (DoMD5 && GetMD5(false) == false) - || (DoSHA1 && GetSHA1(false) == false) - || (DoSHA256 && GetSHA256(false) == false) - || (DoSHA512 && GetSHA512(false) == false) ) - { - result = false; - } - - return result; -} - /*}}}*/ + this->FileName = FileName; -bool CacheDB::LoadSource() -{ - // Try to read the control information out of the DB. - if ((CurStat.Flags & FlSource) == FlSource) + if (GetCurStat() == false) { - // Lookup the control information - InitQuerySource(); - if (Get() == true && Dsc.TakeDsc(Data.data, Data.size) == true) - { - return true; - } - CurStat.Flags &= ~FlSource; - } - if (OpenFile() == false) - return false; + return false; + } + OldStat = CurStat; - Stats.Misses++; - if (Dsc.Read(FileName) == false) - return false; - - if (Dsc.Data == 0) - return _error->Error(_("Failed to read .dsc")); - - // Write back the control information - InitQuerySource(); - if (Put(Dsc.Data, Dsc.Length) == true) - CurStat.Flags |= FlSource; + if (GetFileStat(checkMtime) == false) + { + delete Fd; + Fd = NULL; + return false; + } + + /* if mtime changed, update CurStat from disk */ + if (checkMtime == true && OldStat.mtime != CurStat.mtime) + CurStat.Flags = FlSize; + + Stats.Bytes += CurStat.FileSize; + Stats.Packages++; + + if ((DoControl && LoadControl() == false) + || (DoContents && LoadContents(GenContentsOnly) == false) + || (DoMD5 && GetMD5(false) == false) + || (DoSHA1 && GetSHA1(false) == false) + || (DoSHA256 && GetSHA256(false) == false) + || (DoSHA512 && GetSHA512(false) == false) + ) + { + delete Fd; + Fd = NULL; + delete DebFile; + DebFile = NULL; + return false; + } + + delete Fd; + Fd = NULL; + delete DebFile; + DebFile = NULL; return true; } - + /*}}}*/ // CacheDB::LoadControl - Load Control information /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -335,14 +227,23 @@ bool CacheDB::LoadControl() if ((CurStat.Flags & FlControl) == FlControl) { // Lookup the control information - InitQueryControl(); + InitQuery("cl"); if (Get() == true && Control.TakeControl(Data.data,Data.size) == true) return true; CurStat.Flags &= ~FlControl; } - if(OpenDebFile() == false) + if (Fd == NULL && OpenFile() == false) + { return false; + } + // Create a deb instance to read the archive + if (DebFile == 0) + { + DebFile = new debDebFile(*Fd); + if (_error->PendingError() == true) + return false; + } Stats.Misses++; if (Control.Read(*DebFile) == false) @@ -352,7 +253,7 @@ bool CacheDB::LoadControl() return _error->Error(_("Archive has no control record")); // Write back the control information - InitQueryControl(); + InitQuery("cl"); if (Put(Control.Control,Control.Length) == true) CurStat.Flags |= FlControl; return true; @@ -370,7 +271,7 @@ bool CacheDB::LoadContents(bool const &GenOnly) return true; // Lookup the contents information - InitQueryContent(); + InitQuery("cn"); if (Get() == true) { if (Contents.TakeContents(Data.data,Data.size) == true) @@ -380,15 +281,23 @@ bool CacheDB::LoadContents(bool const &GenOnly) CurStat.Flags &= ~FlContents; } - if(OpenDebFile() == false) + if (Fd == NULL && OpenFile() == false) + { return false; + } + // Create a deb instance to read the archive + if (DebFile == 0) + { + DebFile = new debDebFile(*Fd); + if (_error->PendingError() == true) + return false; + } - Stats.Misses++; if (Contents.Read(*DebFile) == false) return false; // Write back the control information - InitQueryContent(); + InitQuery("cn"); if (Put(Contents.Data,Contents.CurSize) == true) CurStat.Flags |= FlContents; return true; @@ -438,13 +347,14 @@ bool CacheDB::GetMD5(bool const &GenOnly) MD5Res = bytes2hex(CurStat.MD5, sizeof(CurStat.MD5)); return true; - } + } Stats.MD5Bytes += CurStat.FileSize; - if (OpenFile() == false) + if (Fd == NULL && OpenFile() == false) + { return false; - + } MD5Summation MD5; if (Fd->Seek(0) == false || MD5.AddFD(*Fd, CurStat.FileSize) == false) return false; @@ -472,9 +382,10 @@ bool CacheDB::GetSHA1(bool const &GenOnly) Stats.SHA1Bytes += CurStat.FileSize; - if (OpenFile() == false) + if (Fd == NULL && OpenFile() == false) + { return false; - + } SHA1Summation SHA1; if (Fd->Seek(0) == false || SHA1.AddFD(*Fd, CurStat.FileSize) == false) return false; @@ -502,9 +413,10 @@ bool CacheDB::GetSHA256(bool const &GenOnly) Stats.SHA256Bytes += CurStat.FileSize; - if (OpenFile() == false) + if (Fd == NULL && OpenFile() == false) + { return false; - + } SHA256Summation SHA256; if (Fd->Seek(0) == false || SHA256.AddFD(*Fd, CurStat.FileSize) == false) return false; @@ -532,9 +444,10 @@ bool CacheDB::GetSHA512(bool const &GenOnly) Stats.SHA512Bytes += CurStat.FileSize; - if (OpenFile() == false) + if (Fd == NULL && OpenFile() == false) + { return false; - + } SHA512Summation SHA512; if (Fd->Seek(0) == false || SHA512.AddFD(*Fd, CurStat.FileSize) == false) return false; @@ -554,11 +467,11 @@ bool CacheDB::Finish() if (CurStat.Flags == OldStat.Flags && CurStat.mtime == OldStat.mtime) return true; - + // Write the stat information CurStat.Flags = htonl(CurStat.Flags); CurStat.FileSize = htonl(CurStat.FileSize); - InitQueryStats(); + InitQuery("st"); Put(&CurStat,sizeof(CurStat)); CurStat.Flags = ntohl(CurStat.Flags); CurStat.FileSize = ntohl(CurStat.FileSize); @@ -591,24 +504,16 @@ bool CacheDB::Clean() { if (stringcmp(Colon + 1, (char *)Key.data+Key.size,"st") == 0 || stringcmp(Colon + 1, (char *)Key.data+Key.size,"cl") == 0 || - stringcmp(Colon + 1, (char *)Key.data+Key.size,"cs") == 0 || stringcmp(Colon + 1, (char *)Key.data+Key.size,"cn") == 0) { - std::string FileName = std::string((const char *)Key.data,Colon); - if (FileExists(FileName) == true) { - continue; - } + if (FileExists(std::string((const char *)Key.data,Colon)) == true) + continue; } } + Cursor->c_del(Cursor,0); } - int res = Dbp->compact(Dbp, NULL, NULL, NULL, NULL, DB_FREE_SPACE, NULL); - if (res < 0) - _error->Warning("compact failed with result %i", res); - - if(_config->FindB("Debug::APT::FTPArchive::Clean", false) == true) - Dbp->stat_print(Dbp, 0); - + Dbp->compact(Dbp, NULL, NULL, NULL, NULL, DB_FREE_SPACE, NULL); return true; } diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h index 169a46b2c..49b9a0ef5 100644 --- a/ftparchive/cachedb.h +++ b/ftparchive/cachedb.h @@ -22,11 +22,9 @@ #include <stdio.h> #include "contents.h" -#include "sources.h" class FileFd; - class CacheDB { protected: @@ -41,7 +39,7 @@ class CacheDB std::string DBFile; // Generate a key for the DB of a given type - void _InitQuery(const char *Type) + inline void InitQuery(const char *Type) { memset(&Key,0,sizeof(Key)); memset(&Data,0,sizeof(Data)); @@ -49,19 +47,6 @@ class CacheDB Key.size = snprintf(TmpKey,sizeof(TmpKey),"%s:%s",FileName.c_str(), Type); } - void InitQueryStats() { - _InitQuery("st"); - } - void InitQuerySource() { - _InitQuery("cs"); - } - void InitQueryControl() { - _InitQuery("cl"); - } - void InitQueryContent() { - _InitQuery("cn"); - } - inline bool Get() { return Dbp->get(Dbp,0,&Key,&Data,0) == 0; @@ -80,20 +65,10 @@ class CacheDB return true; } bool OpenFile(); - void CloseFile(); - - bool OpenDebFile(); - void CloseDebFile(); - - // GetCurStat needs some compat code, see lp #1274466) - bool GetCurStatCompatOldFormat(); - bool GetCurStatCompatNewFormat(); - bool GetCurStat(); - bool GetFileStat(bool const &doStat = false); + bool GetCurStat(); bool LoadControl(); bool LoadContents(bool const &GenOnly); - bool LoadSource(); bool GetMD5(bool const &GenOnly); bool GetSHA1(bool const &GenOnly); bool GetSHA256(bool const &GenOnly); @@ -102,23 +77,8 @@ class CacheDB // Stat info stored in the DB, Fixed types since it is written to disk. enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2), FlSize=(1<<3), FlSHA1=(1<<4), FlSHA256=(1<<5), - FlSHA512=(1<<6), FlSource=(1<<7), - }; - - // the on-disk format changed (FileSize increased to 64bit) in - // commit 650faab0 which will lead to corruption with old caches - struct StatStoreOldFormat - { - uint32_t Flags; - uint32_t mtime; - uint32_t FileSize; - uint8_t MD5[16]; - uint8_t SHA1[20]; - uint8_t SHA256[32]; - } CurStatOldFormat; + FlSHA512=(1<<6)}; - // WARNING: this struct is read/written to the DB so do not change the - // layout of the fields (see lp #1274466), only append to it struct StatStore { uint32_t Flags; @@ -141,8 +101,6 @@ class CacheDB // Data collection helpers debDebFile::MemControlExtract Control; ContentsExtract Contents; - DscExtract Dsc; - std::string MD5Res; std::string SHA1Res; std::string SHA256Res; @@ -181,19 +139,8 @@ class CacheDB inline unsigned long long GetFileSize(void) {return CurStat.FileSize;} bool SetFile(std::string const &FileName,struct stat St,FileFd *Fd); - - // terrible old overloaded interface - bool GetFileInfo(std::string const &FileName, - bool const &DoControl, - bool const &DoContents, - bool const &GenContentsOnly, - bool const &DoSource, - bool const &DoMD5, - bool const &DoSHA1, - bool const &DoSHA256, - bool const &DoSHA512, - bool const &checkMtime = false); - + bool GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents, bool const &GenContentsOnly, + bool const &DoMD5, bool const &DoSHA1, bool const &DoSHA256, bool const &DoSHA512, bool const &checkMtime = false); bool Finish(); bool Clean(); diff --git a/ftparchive/makefile b/ftparchive/makefile index d1ffe182a..c53ecff72 100644 --- a/ftparchive/makefile +++ b/ftparchive/makefile @@ -12,7 +12,7 @@ PROGRAM=apt-ftparchive SLIBS = -lapt-pkg -lapt-inst $(BDBLIB) $(INTLLIBS) LIB_MAKES = apt-pkg/makefile apt-inst/makefile SOURCE = apt-ftparchive.cc cachedb.cc writer.cc contents.cc override.cc \ - multicompress.cc sources.cc + multicompress.cc include $(PROGRAM_H) else PROGRAM=apt-ftparchive diff --git a/ftparchive/sources.cc b/ftparchive/sources.cc deleted file mode 100644 index d0878a70a..000000000 --- a/ftparchive/sources.cc +++ /dev/null @@ -1,47 +0,0 @@ -#include <string> -#include <iostream> - -// for memcpy -#include <cstring> - -#include <apt-pkg/error.h> -#include <apt-pkg/gpgv.h> - -#include "sources.h" - -bool DscExtract::TakeDsc(const void *newData, unsigned long newSize) -{ - if(newSize > maxSize) - return _error->Error("DSC data is too large %lu!", newSize); - - if (newSize == 0) - { - Length = 0; - return true; - } - memcpy(Data, newData, newSize); - Length = newSize; - - return true; -} - -bool DscExtract::Read(std::string FileName) -{ - FileFd F; - if (OpenMaybeClearSignedFile(FileName, F) == false) - return false; - - unsigned long long const FSize = F.FileSize(); - if(FSize > maxSize) - return _error->Error("DSC file '%s' is too large!",FileName.c_str()); - - if (F.Read(Data, FSize) == false) - return false; - Length = FSize; - - IsClearSigned = (FileName != F.Name()); - - return true; -} - - diff --git a/ftparchive/sources.h b/ftparchive/sources.h deleted file mode 100644 index 91e0b1376..000000000 --- a/ftparchive/sources.h +++ /dev/null @@ -1,32 +0,0 @@ -#ifndef SOURCES_H -#define SOURCES_H - -#include <apt-pkg/tagfile.h> - -class DscExtract -{ - public: - //FIXME: do we really need to enforce a maximum size of the dsc file? - static const int maxSize = 128*1024; - - char *Data; - pkgTagSection Section; - unsigned long Length; - bool IsClearSigned; - - bool TakeDsc(const void *Data, unsigned long Size); - bool Read(std::string FileName); - - DscExtract() : Data(0), Length(0) { - Data = new char[maxSize]; - }; - ~DscExtract() { - if(Data != NULL) { - delete [] Data; - Data = NULL; - } - }; -}; - - -#endif diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 7c1c9cc03..153c4fb42 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -385,14 +385,10 @@ bool FTWScanner::SetExts(string const &Vals) bool PackagesWriter::DoPackage(string FileName) { // Pull all the data we need form the DB - if (Db.GetFileInfo(FileName, - true, /* DoControl */ - DoContents, - true, /* GenContentsOnly */ - false, /* DoSource */ - DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) == false) + if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) + == false) { - return false; + return false; } unsigned long long FileSize = Db.GetFileSize(); @@ -618,36 +614,59 @@ SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string c /* */ bool SourcesWriter::DoPackage(string FileName) { - // Pull all the data we need form the DB - if (Db.GetFileInfo(FileName, - false, /* DoControl */ - false, /* DoContents */ - false, /* GenContentsOnly */ - true, /* DoSource */ - DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) == false) - { + // Open the archive + FileFd F; + if (OpenMaybeClearSignedFile(FileName, F) == false) return false; + + unsigned long long const FSize = F.FileSize(); + //FIXME: do we really need to enforce a maximum size of the dsc file? + if (FSize > 128*1024) + return _error->Error("DSC file '%s' is too large!",FileName.c_str()); + + if (BufSize < FSize + 2) + { + BufSize = FSize + 2; + Buffer = (char *)realloc(Buffer , BufSize); } - // we need to perform a "write" here (this is what finish is doing) - // because the call to Db.GetFileInfo() in the loop will change - // the "db cursor" - Db.Finish(); + if (F.Read(Buffer, FSize) == false) + return false; - // read stuff - char *Start = Db.Dsc.Data; - char *BlkEnd = Db.Dsc.Data + Db.Dsc.Length; + // Stat the file for later (F might be clearsigned, so not F.FileSize()) + struct stat St; + if (stat(FileName.c_str(), &St) != 0) + return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); + + // Hash the file + char *Start = Buffer; + char *BlkEnd = Buffer + FSize; + + Hashes DscHashes; + if (FSize == (unsigned long long) St.st_size) + { + if (DoMD5 == true) + DscHashes.MD5.Add((unsigned char *)Start,BlkEnd - Start); + if (DoSHA1 == true) + DscHashes.SHA1.Add((unsigned char *)Start,BlkEnd - Start); + if (DoSHA256 == true) + DscHashes.SHA256.Add((unsigned char *)Start,BlkEnd - Start); + if (DoSHA512 == true) + DscHashes.SHA512.Add((unsigned char *)Start,BlkEnd - Start); + } + else + { + FileFd DscFile(FileName, FileFd::ReadOnly); + DscHashes.AddFD(DscFile, St.st_size, DoMD5, DoSHA1, DoSHA256, DoSHA512); + } // Add extra \n to the end, just in case (as in clearsigned they are missing) *BlkEnd++ = '\n'; *BlkEnd++ = '\n'; pkgTagSection Tags; - if (Tags.Scan(Start,BlkEnd - Start) == false) + if (Tags.Scan(Start,BlkEnd - Start) == false || Tags.Exists("Source") == false) return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str()); - - if (Tags.Exists("Source") == false) - return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str()); Tags.Trim(); // Lookup the overide information, finding first the best priority. @@ -695,10 +714,6 @@ bool SourcesWriter::DoPackage(string FileName) OverItem = auto_ptr<Override::Item>(new Override::Item); } - struct stat St; - if (stat(FileName.c_str(), &St) != 0) - return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); - auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source"))); // const auto_ptr<Override::Item> autoSOverItem(SOverItem); if (SOverItem.get() == 0) @@ -717,23 +732,23 @@ bool SourcesWriter::DoPackage(string FileName) string const strippedName = flNotDir(FileName); std::ostringstream ostreamFiles; if (DoMD5 == true && Tags.Exists("Files")) - ostreamFiles << "\n " << Db.MD5Res.c_str() << " " << St.st_size << " " + ostreamFiles << "\n " << string(DscHashes.MD5.Result()) << " " << St.st_size << " " << strippedName << "\n " << Tags.FindS("Files"); string const Files = ostreamFiles.str(); std::ostringstream ostreamSha1; if (DoSHA1 == true && Tags.Exists("Checksums-Sha1")) - ostreamSha1 << "\n " << string(Db.SHA1Res.c_str()) << " " << St.st_size << " " + ostreamSha1 << "\n " << string(DscHashes.SHA1.Result()) << " " << St.st_size << " " << strippedName << "\n " << Tags.FindS("Checksums-Sha1"); std::ostringstream ostreamSha256; if (DoSHA256 == true && Tags.Exists("Checksums-Sha256")) - ostreamSha256 << "\n " << string(Db.SHA256Res.c_str()) << " " << St.st_size << " " + ostreamSha256 << "\n " << string(DscHashes.SHA256.Result()) << " " << St.st_size << " " << strippedName << "\n " << Tags.FindS("Checksums-Sha256"); std::ostringstream ostreamSha512; if (DoSHA512 == true && Tags.Exists("Checksums-Sha512")) - ostreamSha512 << "\n " << string(Db.SHA512Res.c_str()) << " " << St.st_size << " " + ostreamSha512 << "\n " << string(DscHashes.SHA512.Result()) << " " << St.st_size << " " << strippedName << "\n " << Tags.FindS("Checksums-Sha512"); // Strip the DirStrip prefix from the FileName and add the PathPrefix @@ -770,13 +785,8 @@ bool SourcesWriter::DoPackage(string FileName) (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) || (DoSHA512 == true && !Tags.Exists("Checksums-Sha512"))) { - if (Db.GetFileInfo(OriginalPath, - false, /* DoControl */ - false, /* DoContents */ - false, /* GenContentsOnly */ - false, /* DoSource */ - DoMD5, DoSHA1, DoSHA256, DoSHA512, - DoAlwaysStat) == false) + if (Db.GetFileInfo(OriginalPath, false, false, false, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) + == false) { return _error->Error("Error getting file info"); } @@ -792,9 +802,6 @@ bool SourcesWriter::DoPackage(string FileName) if (DoSHA512 == true && !Tags.Exists("Checksums-Sha512")) ostreamSha512 << "\n " << string(Db.SHA512Res) << " " << Db.GetFileSize() << " " << ParseJnk; - - // write back the GetFileInfo() stats data - Db.Finish(); } // Perform the delinking operation @@ -865,7 +872,7 @@ bool SourcesWriter::DoPackage(string FileName) Stats.Packages++; - return true; + return Db.Finish(); } /*}}}*/ @@ -886,15 +893,7 @@ ContentsWriter::ContentsWriter(string const &DB, string const &Arch) : determine what the package name is. */ bool ContentsWriter::DoPackage(string FileName, string Package) { - if (!Db.GetFileInfo(FileName, - Package.empty(), /* DoControl */ - true, /* DoContents */ - false, /* GenContentsOnly */ - false, /* DoSource */ - false, /* DoMD5 */ - false, /* DoSHA1 */ - false, /* DoSHA256 */ - false)) /* DoSHA512 */ + if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false)) { return false; } diff --git a/ftparchive/writer.h b/ftparchive/writer.h index b1a653e7d..86884dcfc 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -174,7 +174,7 @@ class SourcesWriter : public FTWScanner string PathPrefix; string DirStrip; FILE *Output; - struct CacheDB::Stats &Stats; + struct CacheDB::Stats Stats; virtual bool DoPackage(string FileName); |