summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--apt-pkg/acquire-item.cc4
-rw-r--r--apt-pkg/acquire.cc15
-rw-r--r--apt-pkg/cachefile.cc17
-rw-r--r--apt-pkg/contrib/error.cc28
-rw-r--r--apt-pkg/contrib/error.h2
-rw-r--r--apt-pkg/contrib/fileutl.cc2
-rw-r--r--apt-pkg/contrib/mmap.cc16
-rw-r--r--apt-pkg/deb/debmetaindex.cc9
-rw-r--r--apt-pkg/deb/dpkgpm.cc2
-rw-r--r--apt-pkg/indexcopy.cc8
-rw-r--r--apt-pkg/indexfile.cc1
-rw-r--r--apt-pkg/packagemanager.cc2
-rw-r--r--apt-pkg/pkgcachegen.cc51
-rw-r--r--apt-pkg/policy.cc9
-rw-r--r--apt-pkg/sourcelist.cc2
-rw-r--r--apt-pkg/srcrecords.cc19
-rw-r--r--apt-private/private-sources.cc2
-rw-r--r--methods/rred.cc2
-rwxr-xr-xtest/integration/test-apt-get-update-unauth-warning1
-rwxr-xr-xtest/integration/test-apt-update-ims3
-rwxr-xr-xtest/integration/test-apt-update-not-modified2
-rwxr-xr-xtest/integration/test-bug-595691-empty-and-broken-archive-files2
22 files changed, 111 insertions, 88 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index b6b6d8e48..c5b701fdc 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -1699,7 +1699,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
FileFd Fd(IndexDiffFile,FileFd::ReadOnly);
pkgTagFile TF(&Fd);
- if (_error->PendingError() == true)
+ if (Fd.IsOpen() == false || Fd.Failed())
return false;
pkgTagSection Tags;
@@ -3236,7 +3236,7 @@ void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes,
_error->PushToStack();
_error->Errno("pkgAcqFile::Done", "Symlinking file %s failed", DestFile.c_str());
std::stringstream msg;
- _error->DumpErrors(msg);
+ _error->DumpErrors(msg, GlobalError::DEBUG, false);
_error->RevertToStack();
ErrorText = msg.str();
Status = StatError;
diff --git a/apt-pkg/acquire.cc b/apt-pkg/acquire.cc
index c7bc00e0b..433a2a6fa 100644
--- a/apt-pkg/acquire.cc
+++ b/apt-pkg/acquire.cc
@@ -511,6 +511,7 @@ static void CheckDropPrivsMustBeDisabled(pkgAcquire const &Fetcher)
}
pkgAcquire::RunResult pkgAcquire::Run(int PulseIntervall)
{
+ _error->PushToStack();
CheckDropPrivsMustBeDisabled(*this);
Running = true;
@@ -548,11 +549,9 @@ pkgAcquire::RunResult pkgAcquire::Run(int PulseIntervall)
_error->Errno("select","Select has failed");
break;
}
-
+
RunFds(&RFds,&WFds);
- if (_error->PendingError() == true)
- break;
-
+
// Timeout, notify the log class
if (Res == 0 || (Log != 0 && Log->Update == true))
{
@@ -577,9 +576,11 @@ pkgAcquire::RunResult pkgAcquire::Run(int PulseIntervall)
// Shut down the items
for (ItemIterator I = Items.begin(); I != Items.end(); ++I)
- (*I)->Finished();
-
- if (_error->PendingError())
+ (*I)->Finished();
+
+ bool const newError = _error->PendingError();
+ _error->MergeWithStack();
+ if (newError)
return Failed;
if (WasCancelled)
return Cancelled;
diff --git a/apt-pkg/cachefile.cc b/apt-pkg/cachefile.cc
index 567e0ea9a..1c9bc694b 100644
--- a/apt-pkg/cachefile.cc
+++ b/apt-pkg/cachefile.cc
@@ -63,24 +63,27 @@ pkgCacheFile::~pkgCacheFile()
}
/*}}}*/
// CacheFile::BuildCaches - Open and build the cache files /*{{{*/
-// ---------------------------------------------------------------------
-/* */
+class APT_HIDDEN ScopedErrorMerge {
+public:
+ ScopedErrorMerge() { _error->PushToStack(); }
+ ~ScopedErrorMerge() { _error->MergeWithStack(); }
+};
bool pkgCacheFile::BuildCaches(OpProgress *Progress, bool WithLock)
{
if (Cache != NULL)
return true;
+ ScopedErrorMerge sem;
if (_config->FindB("pkgCacheFile::Generate", true) == false)
{
FileFd file(_config->FindFile("Dir::Cache::pkgcache"), FileFd::ReadOnly);
+ if (file.IsOpen() == false || file.Failed())
+ return false;
Map = new MMap(file, MMap::Public|MMap::ReadOnly);
Cache = new pkgCache(Map);
- if (_error->PendingError() == true)
- return false;
- return true;
+ return _error->PendingError() == false;
}
- const bool ErrorWasEmpty = _error->empty();
if (WithLock == true)
if (_system->Lock() == false)
return false;
@@ -98,7 +101,7 @@ bool pkgCacheFile::BuildCaches(OpProgress *Progress, bool WithLock)
return _error->Error(_("The package lists or status file could not be parsed or opened."));
/* This sux, remove it someday */
- if (ErrorWasEmpty == true && _error->empty() == false)
+ if (_error->PendingError() == true)
_error->Warning(_("You may want to run apt-get update to correct these problems"));
Cache = new pkgCache(Map);
diff --git a/apt-pkg/contrib/error.cc b/apt-pkg/contrib/error.cc
index 892cd4874..8a87e16e9 100644
--- a/apt-pkg/contrib/error.cc
+++ b/apt-pkg/contrib/error.cc
@@ -27,6 +27,7 @@
#include <unistd.h>
#include <string>
#include <cstring>
+#include <algorithm>
/*}}}*/
@@ -212,12 +213,13 @@ void GlobalError::DumpErrors(std::ostream &out, MsgType const &threshold,
if (mergeStack == true)
for (std::list<MsgStack>::const_reverse_iterator s = Stacks.rbegin();
s != Stacks.rend(); ++s)
- Messages.insert(Messages.begin(), s->Messages.begin(), s->Messages.end());
+ std::copy(s->Messages.begin(), s->Messages.end(), std::front_inserter(Messages));
+
+ std::for_each(Messages.begin(), Messages.end(), [&threshold, &out](Item const &m) {
+ if (m.Type >= threshold)
+ out << m << std::endl;
+ });
- for (std::list<Item>::const_iterator m = Messages.begin();
- m != Messages.end(); ++m)
- if (m->Type >= threshold)
- out << (*m) << std::endl;
Discard();
}
/*}}}*/
@@ -228,25 +230,21 @@ void GlobalError::Discard() {
}
/*}}}*/
// GlobalError::empty - does our error list include anything? /*{{{*/
-bool GlobalError::empty(MsgType const &trashhold) const {
+bool GlobalError::empty(MsgType const &threshold) const {
if (PendingFlag == true)
return false;
if (Messages.empty() == true)
return true;
- for (std::list<Item>::const_iterator m = Messages.begin();
- m != Messages.end(); ++m)
- if (m->Type >= trashhold)
- return false;
-
- return true;
+ return std::find_if(Messages.begin(), Messages.end(), [&threshold](Item const &m) {
+ return m.Type >= threshold;
+ }) == Messages.end();
}
/*}}}*/
// GlobalError::PushToStack /*{{{*/
void GlobalError::PushToStack() {
- MsgStack pack(Messages, PendingFlag);
- Stacks.push_back(pack);
+ Stacks.emplace_back(Messages, PendingFlag);
Discard();
}
/*}}}*/
@@ -262,7 +260,7 @@ void GlobalError::RevertToStack() {
// GlobalError::MergeWithStack /*{{{*/
void GlobalError::MergeWithStack() {
MsgStack pack = Stacks.back();
- Messages.insert(Messages.begin(), pack.Messages.begin(), pack.Messages.end());
+ Messages.splice(Messages.begin(), pack.Messages);
PendingFlag = PendingFlag || pack.PendingFlag;
Stacks.pop_back();
}
diff --git a/apt-pkg/contrib/error.h b/apt-pkg/contrib/error.h
index ed8c19153..1fb0ede4a 100644
--- a/apt-pkg/contrib/error.h
+++ b/apt-pkg/contrib/error.h
@@ -333,7 +333,7 @@ private: /*{{{*/
bool PendingFlag;
struct MsgStack {
- std::list<Item> const Messages;
+ std::list<Item> Messages;
bool const PendingFlag;
MsgStack(std::list<Item> const &Messages, bool const &Pending) :
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index 52fedce8f..02b27f5cf 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -665,7 +665,7 @@ string flAbsPath(string File)
char *p = realpath(File.c_str(), NULL);
if (p == NULL)
{
- _error->Errno("realpath", "flAbsPath failed");
+ _error->Errno("realpath", "flAbsPath on %s failed", File.c_str());
return "";
}
std::string AbsPath(p);
diff --git a/apt-pkg/contrib/mmap.cc b/apt-pkg/contrib/mmap.cc
index b2a53a6cb..8e169027e 100644
--- a/apt-pkg/contrib/mmap.cc
+++ b/apt-pkg/contrib/mmap.cc
@@ -215,9 +215,6 @@ DynamicMMap::DynamicMMap(FileFd &F,unsigned long Flags,unsigned long const &Work
MMap(F,Flags | NoImmMap), Fd(&F), WorkSpace(Workspace),
GrowFactor(Grow), Limit(Limit)
{
- if (_error->PendingError() == true)
- return;
-
// disable Moveable if we don't grow
if (Grow == 0)
this->Flags &= ~Moveable;
@@ -252,9 +249,6 @@ DynamicMMap::DynamicMMap(unsigned long Flags,unsigned long const &WorkSpace,
MMap(Flags | NoImmMap | UnMapped), Fd(0), WorkSpace(WorkSpace),
GrowFactor(Grow), Limit(Limit)
{
- if (_error->PendingError() == true)
- return;
-
// disable Moveable if we don't grow
if (Grow == 0)
this->Flags &= ~Moveable;
@@ -390,12 +384,15 @@ unsigned long DynamicMMap::Allocate(unsigned long ItemSize)
const unsigned long size = 20*1024;
I->Count = size/ItemSize;
Pool* oldPools = Pools;
+ _error->PushToStack();
Result = RawAllocate(size,ItemSize);
+ bool const newError = _error->PendingError();
+ _error->MergeWithStack();
if (Pools != oldPools)
I += Pools - oldPools;
// Does the allocation failed ?
- if (Result == 0 && _error->PendingError())
+ if (Result == 0 && newError)
return 0;
I->Start = Result;
}
@@ -416,9 +413,12 @@ unsigned long DynamicMMap::WriteString(const char *String,
if (Len == (unsigned long)-1)
Len = strlen(String);
+ _error->PushToStack();
unsigned long const Result = RawAllocate(Len+1,0);
+ bool const newError = _error->PendingError();
+ _error->MergeWithStack();
- if (Base == NULL || (Result == 0 && _error->PendingError()))
+ if (Base == NULL || (Result == 0 && newError))
return 0;
memcpy((char *)Base + Result,String,Len);
diff --git a/apt-pkg/deb/debmetaindex.cc b/apt-pkg/deb/debmetaindex.cc
index b381f5f85..6ed722e68 100644
--- a/apt-pkg/deb/debmetaindex.cc
+++ b/apt-pkg/deb/debmetaindex.cc
@@ -308,7 +308,7 @@ bool debReleaseIndex::Load(std::string const &Filename, std::string * const Erro
return false;
pkgTagFile TagFile(&Fd, Fd.Size());
- if (_error->PendingError() == true)
+ if (Fd.IsOpen() == false || Fd.Failed())
{
if (ErrorText != NULL)
strprintf(*ErrorText, _("Unable to parse Release file %s"),Filename.c_str());
@@ -641,8 +641,6 @@ bool debReleaseIndex::Merge(pkgCacheGenerator &Gen,OpProgress * /*Prog*/) const/
// signature for an 'InRelease' file couldn't be checked
if (OpenMaybeClearSignedFile(ReleaseFile, Rel) == false)
return false;
- if (_error->PendingError() == true)
- return false;
// Store the IMS information
pkgCache::RlsFileIterator File = Gen.GetCurRlsFile();
@@ -656,7 +654,7 @@ bool debReleaseIndex::Merge(pkgCacheGenerator &Gen,OpProgress * /*Prog*/) const/
pkgTagFile TagFile(&Rel, Rel.Size());
pkgTagSection Section;
- if (_error->PendingError() == true || TagFile.Step(Section) == false)
+ if (Rel.IsOpen() == false || Rel.Failed() || TagFile.Step(Section) == false)
return false;
std::string data;
@@ -665,6 +663,7 @@ bool debReleaseIndex::Merge(pkgCacheGenerator &Gen,OpProgress * /*Prog*/) const/
if (data.empty() == false) \
{ \
map_stringitem_t const storage = Gen.StoreString(pkgCacheGenerator::TYPE, data); \
+ if (storage == 0) return false; \
STORE = storage; \
}
APT_INRELEASE(MIXED, "Suite", File->Archive)
@@ -676,7 +675,7 @@ bool debReleaseIndex::Merge(pkgCacheGenerator &Gen,OpProgress * /*Prog*/) const/
Section.FindFlag("NotAutomatic", File->Flags, pkgCache::Flag::NotAutomatic);
Section.FindFlag("ButAutomaticUpgrades", File->Flags, pkgCache::Flag::ButAutomaticUpgrades);
- return !_error->PendingError();
+ return true;
}
/*}}}*/
// ReleaseIndex::FindInCache - Find this index /*{{{*/
diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc
index 6ae85d2ea..8f652b3df 100644
--- a/apt-pkg/deb/dpkgpm.cc
+++ b/apt-pkg/deb/dpkgpm.cc
@@ -1148,7 +1148,7 @@ void pkgDPkgPM::StartPtyMagic()
free(d->slave);
d->slave = NULL;
}
- _error->DumpErrors(std::cerr);
+ _error->DumpErrors(std::cerr, GlobalError::DEBUG, false);
}
_error->RevertToStack();
}
diff --git a/apt-pkg/indexcopy.cc b/apt-pkg/indexcopy.cc
index 8a7df2eb3..c54b365dc 100644
--- a/apt-pkg/indexcopy.cc
+++ b/apt-pkg/indexcopy.cc
@@ -90,7 +90,7 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List,
off_t const FileSize = Pkg.Size();
pkgTagFile Parser(&Pkg);
- if (_error->PendingError() == true)
+ if (Pkg.IsOpen() == false || Pkg.Failed())
return false;
// Open the output file
@@ -107,7 +107,7 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List,
} else {
Target.Open(TargetF,FileFd::WriteAtomic);
}
- if (_error->PendingError() == true)
+ if (Target.IsOpen() == false || Target.Failed())
return false;
// Setup the progress meter
@@ -683,7 +683,7 @@ bool TranslationsCopy::CopyTranslations(string CDROM,string Name, /*{{{*/
off_t const FileSize = Pkg.Size();
pkgTagFile Parser(&Pkg);
- if (_error->PendingError() == true)
+ if (Pkg.IsOpen() == false || Pkg.Failed())
return false;
// Open the output file
@@ -700,7 +700,7 @@ bool TranslationsCopy::CopyTranslations(string CDROM,string Name, /*{{{*/
} else {
Target.Open(TargetF,FileFd::WriteAtomic);
}
- if (_error->PendingError() == true)
+ if (Pkg.IsOpen() == false || Pkg.Failed())
return false;
// Setup the progress meter
diff --git a/apt-pkg/indexfile.cc b/apt-pkg/indexfile.cc
index c3c0e74ae..fad339197 100644
--- a/apt-pkg/indexfile.cc
+++ b/apt-pkg/indexfile.cc
@@ -342,6 +342,7 @@ bool pkgDebianIndexFile::Merge(pkgCacheGenerator &Gen,OpProgress * const Prog)
_error->PushToStack();
std::unique_ptr<pkgCacheListParser> Parser(CreateListParser(Pkg));
bool const newError = _error->PendingError();
+ _error->MergeWithStack();
if (newError == false && Parser == nullptr)
return true;
if (Parser == NULL)
diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc
index ceeb60a03..de63c1aa8 100644
--- a/apt-pkg/packagemanager.cc
+++ b/apt-pkg/packagemanager.cc
@@ -904,7 +904,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
if (Debug)
{
clog << OutputInDepth(Depth) << "Avoidance unpack of " << ConflictPkg.FullName() << " failed for " << End << " ignoring:" << std::endl;
- _error->DumpErrors(std::clog);
+ _error->DumpErrors(std::clog, GlobalError::DEBUG, false);
}
_error->RevertToStack();
// ignorance can only happen if a) one of the offenders is already gone
diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc
index 10d3fcf21..89b4c4bae 100644
--- a/apt-pkg/pkgcachegen.cc
+++ b/apt-pkg/pkgcachegen.cc
@@ -57,14 +57,15 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
Map(*pMap), Cache(pMap,false), Progress(Prog),
CurrentRlsFile(NULL), CurrentFile(NULL), d(NULL)
{
- if (_error->PendingError() == true)
- return;
-
if (Map.Size() == 0)
{
// Setup the map interface..
Cache.HeaderP = (pkgCache::Header *)Map.Data();
- if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
+ _error->PushToStack();
+ Map.RawAllocate(sizeof(pkgCache::Header));
+ bool const newError = _error->PendingError();
+ _error->MergeWithStack();
+ if (newError)
return;
Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
@@ -382,7 +383,7 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
// Add a new version
map_pointer_t const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer);
- if (verindex == 0 && _error->PendingError())
+ if (unlikely(verindex == 0))
return _error->Error(_("Error occurred while processing %s (%s%d)"),
Pkg.Name(), "NewVersion", 1);
@@ -470,7 +471,7 @@ bool pkgCacheGenerator::AddNewDescription(ListParser &List, pkgCache::VerIterato
Dynamic<pkgCache::DescIterator> DynDesc(Desc);
map_pointer_t const descindex = NewDescription(Desc, lang, CurMd5, md5idx);
- if (unlikely(descindex == 0 && _error->PendingError()))
+ if (unlikely(descindex == 0))
return _error->Error(_("Error occurred while processing %s (%s%d)"),
Ver.ParentPkg().Name(), "NewDescription", 1);
@@ -1274,12 +1275,18 @@ map_stringitem_t pkgCacheGenerator::StoreString(enum StringType const type, cons
/* This just verifies that each file in the list of index files exists,
has matching attributes with the cache and the cache does not have
any extra files. */
+class APT_HIDDEN ScopedErrorRevert {
+public:
+ ScopedErrorRevert() { _error->PushToStack(); }
+ ~ScopedErrorRevert() { _error->RevertToStack(); }
+};
static bool CheckValidity(const string &CacheFile,
pkgSourceList &List,
FileIterator const Start,
FileIterator const End,
MMap **OutMap = 0)
{
+ ScopedErrorRevert ser;
bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
// No file, certainly invalid
if (CacheFile.empty() == true || FileExists(CacheFile) == false)
@@ -1300,11 +1307,10 @@ static bool CheckValidity(const string &CacheFile,
FileFd CacheF(CacheFile,FileFd::ReadOnly);
std::unique_ptr<MMap> Map(new MMap(CacheF,0));
pkgCache Cache(Map.get());
- if (_error->PendingError() == true || Map->Size() == 0)
+ if (_error->PendingError() || Map->Size() == 0)
{
if (Debug == true)
std::clog << "Errors are pending or Map is empty() for " << CacheFile << std::endl;
- _error->Discard();
return false;
}
@@ -1383,12 +1389,11 @@ static bool CheckValidity(const string &CacheFile,
if (Debug == true)
{
std::clog << "Validity failed because of pending errors:" << std::endl;
- _error->DumpErrors();
+ _error->DumpErrors(std::clog, GlobalError::DEBUG, false);
}
- _error->Discard();
return false;
}
-
+
if (OutMap != 0)
*OutMap = Map.release();
return true;
@@ -1511,7 +1516,7 @@ static bool writeBackMMapToFile(pkgCacheGenerator * const Gen, DynamicMMap * con
std::string const &FileName)
{
FileFd SCacheF(FileName, FileFd::WriteAtomic);
- if (_error->PendingError() == true)
+ if (SCacheF.IsOpen() == false || SCacheF.Failed())
return false;
fchmod(SCacheF.Fd(),0644);
@@ -1535,10 +1540,15 @@ static bool loadBackMMapFromFile(std::unique_ptr<pkgCacheGenerator> &Gen,
{
Map.reset(CreateDynamicMMap(NULL, 0));
FileFd CacheF(FileName, FileFd::ReadOnly);
+ if (CacheF.IsOpen() == false || CacheF.Failed())
+ return false;
+ _error->PushToStack();
map_pointer_t const alloc = Map->RawAllocate(CacheF.Size());
- if ((alloc == 0 && _error->PendingError())
- || CacheF.Read((unsigned char *)Map->Data() + alloc,
- CacheF.Size()) == false)
+ bool const newError = _error->PendingError();
+ _error->MergeWithStack();
+ if (alloc == 0 && newError)
+ return false;
+ if (CacheF.Read((unsigned char *)Map->Data() + alloc, CacheF.Size()) == false)
return false;
Gen.reset(new pkgCacheGenerator(Map.get(),Progress));
return true;
@@ -1691,8 +1701,11 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress
}
/*}}}*/
// CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
-// ---------------------------------------------------------------------
-/* */
+class APT_HIDDEN ScopedErrorMerge {
+public:
+ ScopedErrorMerge() { _error->PushToStack(); }
+ ~ScopedErrorMerge() { _error->MergeWithStack(); }
+};
APT_DEPRECATED bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
{ return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
@@ -1701,12 +1714,12 @@ bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **O
if (_system->AddStatusFiles(Files) == false)
return false;
+ ScopedErrorMerge sem;
std::unique_ptr<DynamicMMap> Map(CreateDynamicMMap(NULL, 0));
map_filesize_t CurrentSize = 0;
map_filesize_t TotalSize = 0;
-
TotalSize = ComputeSize(NULL, Files.begin(), Files.end());
-
+
// Build the status cache
if (Progress != NULL)
Progress->OverallProgress(0,1,1,_("Reading package lists"));
diff --git a/apt-pkg/policy.cc b/apt-pkg/policy.cc
index 8441bc465..bea4bec89 100644
--- a/apt-pkg/policy.cc
+++ b/apt-pkg/policy.cc
@@ -439,7 +439,7 @@ bool ReadPinFile(pkgPolicy &Plcy,string File)
FileFd Fd(File,FileFd::ReadOnly);
pkgTagFile TF(&Fd);
- if (_error->PendingError() == true)
+ if (Fd.IsOpen() == false || Fd.Failed())
return false;
pkgUserTagSection Tags;
@@ -478,10 +478,13 @@ bool ReadPinFile(pkgPolicy &Plcy,string File)
}
for (; Word != End && isspace(*Word) != 0; Word++);
- int priority = Tags.FindI("Pin-Priority", 0);
+ _error->PushToStack();
+ int const priority = Tags.FindI("Pin-Priority", 0);
+ bool const newError = _error->PendingError();
+ _error->MergeWithStack();
if (priority < std::numeric_limits<short>::min() ||
priority > std::numeric_limits<short>::max() ||
- _error->PendingError()) {
+ newError) {
return _error->Error(_("%s: Value %s is outside the range of valid pin priorities (%d to %d)"),
File.c_str(), Tags.FindS("Pin-Priority").c_str(),
std::numeric_limits<short>::min(),
diff --git a/apt-pkg/sourcelist.cc b/apt-pkg/sourcelist.cc
index 31d87a403..4e5ff0578 100644
--- a/apt-pkg/sourcelist.cc
+++ b/apt-pkg/sourcelist.cc
@@ -421,7 +421,7 @@ bool pkgSourceList::ParseFileDeb822(string const &File)
// see if we can read the file
FileFd Fd(File, FileFd::ReadOnly);
pkgTagFile Sources(&Fd);
- if (_error->PendingError() == true)
+ if (Fd.IsOpen() == false || Fd.Failed())
return _error->Error(_("Malformed stanza %u in source list %s (type)"),i,File.c_str());
// read step by step
diff --git a/apt-pkg/srcrecords.cc b/apt-pkg/srcrecords.cc
index 942f11569..53d7e604d 100644
--- a/apt-pkg/srcrecords.cc
+++ b/apt-pkg/srcrecords.cc
@@ -39,11 +39,14 @@ pkgSrcRecords::pkgSrcRecords(pkgSourceList &List) : d(NULL), Files(0)
for (std::vector<pkgIndexFile *>::const_iterator J = Indexes->begin();
J != Indexes->end(); ++J)
{
- Parser* P = (*J)->CreateSrcParser();
- if (_error->PendingError() == true)
- return;
- if (P != 0)
- Files.push_back(P);
+ _error->PushToStack();
+ Parser* P = (*J)->CreateSrcParser();
+ bool const newError = _error->PendingError();
+ _error->MergeWithStack();
+ if (newError)
+ return;
+ if (P != 0)
+ Files.push_back(P);
}
}
@@ -93,8 +96,6 @@ const pkgSrcRecords::Parser* pkgSrcRecords::Step()
// Step to the next record, possibly switching files
while ((*Current)->Step() == false)
{
- if (_error->PendingError() == true)
- return 0;
++Current;
if (Current == Files.end())
return 0;
@@ -115,10 +116,6 @@ pkgSrcRecords::Parser *pkgSrcRecords::Find(const char *Package,bool const &SrcOn
if(Step() == 0)
return 0;
- // IO error somehow
- if (_error->PendingError() == true)
- return 0;
-
// Source name hit
if ((*Current)->Package() == Package)
return *Current;
diff --git a/apt-private/private-sources.cc b/apt-private/private-sources.cc
index 301936b9d..6a32931ba 100644
--- a/apt-private/private-sources.cc
+++ b/apt-private/private-sources.cc
@@ -54,7 +54,7 @@ bool EditSources(CommandLine &CmdL)
_error->PushToStack();
res = sl.Read(sourceslist);
if (!res) {
- _error->DumpErrors();
+ _error->DumpErrors(std::cerr, GlobalError::DEBUG, false);
strprintf(outs, _("Failed to parse %s. Edit again? "),
sourceslist.c_str());
std::cout << outs;
diff --git a/methods/rred.cc b/methods/rred.cc
index 7c2ccd98e..d2cefc943 100644
--- a/methods/rred.cc
+++ b/methods/rred.cc
@@ -621,7 +621,7 @@ class RredMethod : public pkgAcqMethod {
if (p.Open(patch_name, FileFd::ReadOnly, FileFd::Gzip) == false ||
patch.read_diff(p, &patch_hash) == false)
{
- _error->DumpErrors(std::cerr);
+ _error->DumpErrors(std::cerr, GlobalError::DEBUG, false);
return false;
}
p.Close();
diff --git a/test/integration/test-apt-get-update-unauth-warning b/test/integration/test-apt-get-update-unauth-warning
index 4c45f8f26..35825ee24 100755
--- a/test/integration/test-apt-get-update-unauth-warning
+++ b/test/integration/test-apt-get-update-unauth-warning
@@ -26,6 +26,7 @@ Ign:1 file:$APTARCHIVE unstable InRelease
Get:2 file:$APTARCHIVE unstable Release
Err:2 file:$APTARCHIVE unstable Release
File not found
+Reading package lists...
W: The repository 'file:$APTARCHIVE unstable Release' does not have a Release file. This is deprecated, please contact the owner of the repository.
E: Use --allow-insecure-repositories to force the update" aptget update --no-allow-insecure-repositories
diff --git a/test/integration/test-apt-update-ims b/test/integration/test-apt-update-ims
index 33b4ed1b9..ed89cd342 100755
--- a/test/integration/test-apt-update-ims
+++ b/test/integration/test-apt-update-ims
@@ -97,6 +97,7 @@ signreleasefiles
msgmsg 'expired InRelease'
EXPECT='Hit:1 http://localhost:8080 unstable InRelease
+Reading package lists...
E: Release file for http://localhost:8080/dists/unstable/InRelease is expired (invalid since). Updates for this repository will not be applied.'
echo 'Acquire::GzipIndexes "0";' > rootdir/etc/apt/apt.conf.d/02compressindex
runtest 'failure'
@@ -107,6 +108,7 @@ msgmsg 'expired Release/Release.gpg'
EXPECT='Ign:1 http://localhost:8080 unstable InRelease
404 Not Found
Hit:2 http://localhost:8080 unstable Release
+Reading package lists...
E: Release file for http://localhost:8080/dists/unstable/Release is expired (invalid since). Updates for this repository will not be applied.'
find aptarchive -name 'InRelease' -delete
echo 'Acquire::GzipIndexes "0";' > rootdir/etc/apt/apt.conf.d/02compressindex
@@ -120,6 +122,7 @@ EXPECT="Ign:1 http://localhost:8080 unstable InRelease
Hit:2 http://localhost:8080 unstable Release
Ign:3 http://localhost:8080 unstable Release.gpg
404 Not Found
+Reading package lists...
W: The data from 'http://localhost:8080 unstable Release' is not signed. Packages from that repository can not be authenticated.
E: Release file for http://localhost:8080/dists/unstable/Release is expired (invalid since). Updates for this repository will not be applied."
find aptarchive -name 'Release.gpg' -delete
diff --git a/test/integration/test-apt-update-not-modified b/test/integration/test-apt-update-not-modified
index 3f822586a..8c580245a 100755
--- a/test/integration/test-apt-update-not-modified
+++ b/test/integration/test-apt-update-not-modified
@@ -46,6 +46,7 @@ EOF
Get:2 $1 unstable/main amd64 Packages [$(stat -c '%s' 'aptarchive/dists/unstable/main/binary-amd64/Packages.gz') B]
Err:2 $1 unstable/main amd64 Packages
Hash Sum mismatch
+Reading package lists...
W: Failed to fetch $1/dists/unstable/main/binary-amd64/Packages.gz Hash Sum mismatch
E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update
testfileequal 'listsdir-without-amd64.lst' "$(listcurrentlistsdirectory)"
@@ -105,6 +106,7 @@ Hit:2 $1 unstable Release
Get:4 $1 unstable/main amd64 Packages [$(stat -c '%s' 'aptarchive/dists/unstable/main/binary-amd64/Packages.gz') B]
Err:4 $1 unstable/main amd64 Packages
Hash Sum mismatch
+Reading package lists...
W: Failed to fetch $1/dists/unstable/main/binary-amd64/Packages.gz Hash Sum mismatch
E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update
testfileequal 'listsdir-without-amd64.lst' "$(listcurrentlistsdirectory)"
diff --git a/test/integration/test-bug-595691-empty-and-broken-archive-files b/test/integration/test-bug-595691-empty-and-broken-archive-files
index 0c02aee30..f73dfdd68 100755
--- a/test/integration/test-bug-595691-empty-and-broken-archive-files
+++ b/test/integration/test-bug-595691-empty-and-broken-archive-files
@@ -54,6 +54,7 @@ Reading package lists..." "empty archive Packages.$COMPRESS over file"
testaptgetupdate "Get:2 file:$APTARCHIVE Packages
Err:2 file:$APTARCHIVE Packages
Empty files can't be valid archives
+Reading package lists...
W: Failed to fetch ${COMPRESSOR}:${APTARCHIVE}/Packages.$COMPRESS Empty files can't be valid archives
E: Some index files failed to download. They have been ignored, or old ones used instead." "empty file Packages.$COMPRESS over file"
}
@@ -70,6 +71,7 @@ Reading package lists..." "empty archive Packages.$COMPRESS over http"
testaptgetupdate "Get:2 http://localhost:8080 Packages
Err:2 http://localhost:8080 Packages
Empty files can't be valid archives
+Reading package lists...
W: Failed to fetch ${COMPRESSOR}:$(readlink -f rootdir/var/lib/apt/lists/partial/localhost:8080_Packages.${COMPRESS}) Empty files can't be valid archives
E: Some index files failed to download. They have been ignored, or old ones used instead." "empty file Packages.$COMPRESS over http"
}