summaryrefslogtreecommitdiff
path: root/apt-pkg
diff options
context:
space:
mode:
Diffstat (limited to 'apt-pkg')
-rw-r--r--apt-pkg/acquire-item.cc13
-rw-r--r--apt-pkg/acquire-method.cc41
-rw-r--r--apt-pkg/acquire-method.h3
-rw-r--r--apt-pkg/acquire-worker.cc67
-rw-r--r--apt-pkg/acquire.cc39
-rw-r--r--apt-pkg/algorithms.cc78
-rw-r--r--apt-pkg/algorithms.h5
-rw-r--r--apt-pkg/aptconfiguration.cc63
-rw-r--r--apt-pkg/cachefilter.cc55
-rw-r--r--apt-pkg/cachefilter.h30
-rw-r--r--apt-pkg/cacheiterators.h4
-rw-r--r--apt-pkg/cacheset.cc61
-rw-r--r--apt-pkg/cacheset.h5
-rw-r--r--apt-pkg/cdrom.cc21
-rw-r--r--apt-pkg/contrib/cmndline.cc8
-rw-r--r--apt-pkg/contrib/configuration.cc149
-rw-r--r--apt-pkg/contrib/configuration.h2
-rw-r--r--apt-pkg/contrib/fileutl.cc280
-rw-r--r--apt-pkg/contrib/fileutl.h5
-rw-r--r--apt-pkg/contrib/mmap.cc21
-rw-r--r--apt-pkg/contrib/sha2_internal.cc8
-rw-r--r--apt-pkg/contrib/strutl.cc62
-rw-r--r--apt-pkg/deb/debindexfile.cc9
-rw-r--r--apt-pkg/deb/deblistparser.cc151
-rw-r--r--apt-pkg/deb/dpkgpm.cc6
-rw-r--r--apt-pkg/depcache.cc27
-rw-r--r--apt-pkg/edsp.cc12
-rw-r--r--apt-pkg/edsp/edspindexfile.cc3
-rw-r--r--apt-pkg/edsp/edspsystem.cc2
-rw-r--r--apt-pkg/indexcopy.cc6
-rw-r--r--apt-pkg/packagemanager.cc11
-rw-r--r--apt-pkg/pkgcache.cc29
-rw-r--r--apt-pkg/pkgcachegen.cc19
-rw-r--r--apt-pkg/srcrecords.h1
34 files changed, 889 insertions, 407 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index a30e98858..665dd427e 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -1235,9 +1235,20 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,string Hash, /
}
else
{
+ // FIXME: move this into pkgAcqMetaClearSig::Done on the next
+ // ABI break
+
+ // if we expect a ClearTextSignature (InRelase), ensure that
+ // this is what we get and if not fail to queue a
+ // Release/Release.gpg, see #346386
+ if (SigFile == DestFile && !StartsWithGPGClearTextSignature(DestFile))
+ {
+ Failed(Message, Cfg);
+ return;
+ }
+
// There was a signature file, so pass it to gpgv for
// verification
-
if (_config->FindB("Debug::pkgAcquire::Auth", false))
std::cerr << "Metaindex acquired, queueing gpg verification ("
<< SigFile << "," << DestFile << ")\n";
diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc
index 2041fd9e9..5bc1c159a 100644
--- a/apt-pkg/acquire-method.cc
+++ b/apt-pkg/acquire-method.cc
@@ -95,12 +95,7 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
{
std::cout << "400 URI Failure\nURI: " << Queue->Uri << "\n"
<< "Message: " << Err << " " << IP << "\n";
- // Dequeue
- FetchItem *Tmp = Queue;
- Queue = Queue->Next;
- delete Tmp;
- if (Tmp == QueueBack)
- QueueBack = Queue;
+ Dequeue();
}
else
std::cout << "400 URI Failure\nURI: <UNKNOWN>\nMessage: " << Err << "\n";
@@ -211,13 +206,7 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
}
std::cout << "\n" << std::flush;
-
- // Dequeue
- FetchItem *Tmp = Queue;
- Queue = Queue->Next;
- delete Tmp;
- if (Tmp == QueueBack)
- QueueBack = Queue;
+ Dequeue();
}
/*}}}*/
// AcqMethod::MediaFail - Syncronous request for new media /*{{{*/
@@ -423,26 +412,14 @@ void pkgAcqMethod::Status(const char *Format,...)
/*}}}*/
// AcqMethod::Redirect - Send a redirect message /*{{{*/
// ---------------------------------------------------------------------
-/* This method sends the redirect message and also manipulates the queue
- to keep the pipeline synchronized. */
+/* This method sends the redirect message and dequeues the item as
+ * the worker will enqueue again later on to the right queue */
void pkgAcqMethod::Redirect(const string &NewURI)
{
std::cout << "103 Redirect\nURI: " << Queue->Uri << "\n"
<< "New-URI: " << NewURI << "\n"
<< "\n" << std::flush;
-
- // Change the URI for the request.
- Queue->Uri = NewURI;
-
- /* To keep the pipeline synchronized, move the current request to
- the end of the queue, past the end of the current pipeline. */
- FetchItem *I;
- for (I = Queue; I->Next != 0; I = I->Next) ;
- I->Next = Queue;
- Queue = Queue->Next;
- I->Next->Next = 0;
- if (QueueBack == 0)
- QueueBack = I->Next;
+ Dequeue();
}
/*}}}*/
// AcqMethod::FetchResult::FetchResult - Constructor /*{{{*/
@@ -465,3 +442,11 @@ void pkgAcqMethod::FetchResult::TakeHashes(Hashes &Hash)
SHA512Sum = Hash.SHA512.Result();
}
/*}}}*/
+void pkgAcqMethod::Dequeue() { /*{{{*/
+ FetchItem const * const Tmp = Queue;
+ Queue = Queue->Next;
+ if (Tmp == QueueBack)
+ QueueBack = Queue;
+ delete Tmp;
+}
+ /*}}}*/
diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h
index 2dd9ad685..00f99e0a0 100644
--- a/apt-pkg/acquire-method.h
+++ b/apt-pkg/acquire-method.h
@@ -104,6 +104,9 @@ class pkgAcqMethod
pkgAcqMethod(const char *Ver,unsigned long Flags = 0);
virtual ~pkgAcqMethod() {};
+
+ private:
+ void Dequeue();
};
/** @} */
diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc
index 77e2fc311..9d90b08bc 100644
--- a/apt-pkg/acquire-worker.cc
+++ b/apt-pkg/acquire-worker.cc
@@ -244,6 +244,21 @@ bool pkgAcquire::Worker::RunMessages()
string NewURI = LookupTag(Message,"New-URI",URI.c_str());
Itm->URI = NewURI;
+
+ ItemDone();
+
+ pkgAcquire::Item *Owner = Itm->Owner;
+ pkgAcquire::ItemDesc Desc = *Itm;
+
+ // Change the status so that it can be dequeued
+ Owner->Status = pkgAcquire::Item::StatIdle;
+ // Mark the item as done (taking care of all queues)
+ // and then put it in the main queue again
+ OwnerQ->ItemDone(Itm);
+ OwnerQ->Owner->Enqueue(Desc);
+
+ if (Log != 0)
+ Log->Done(Desc);
break;
}
@@ -467,40 +482,19 @@ bool pkgAcquire::Worker::SendConfiguration()
if (OutFd == -1)
return false;
-
- string Message = "601 Configuration\n";
- Message.reserve(2000);
- /* Write out all of the configuration directives by walking the
+ /* Write out all of the configuration directives by walking the
configuration tree */
- const Configuration::Item *Top = _config->Tree(0);
- for (; Top != 0;)
- {
- if (Top->Value.empty() == false)
- {
- string Line = "Config-Item: " + QuoteString(Top->FullTag(),"=\"\n") + "=";
- Line += QuoteString(Top->Value,"\n") + '\n';
- Message += Line;
- }
-
- if (Top->Child != 0)
- {
- Top = Top->Child;
- continue;
- }
-
- while (Top != 0 && Top->Next == 0)
- Top = Top->Parent;
- if (Top != 0)
- Top = Top->Next;
- }
- Message += '\n';
+ std::ostringstream Message;
+ Message << "601 Configuration\n";
+ _config->Dump(Message, NULL, "Config-Item: %F=%V\n", false);
+ Message << '\n';
if (Debug == true)
- clog << " -> " << Access << ':' << QuoteString(Message,"\n") << endl;
- OutQueue += Message;
- OutReady = true;
-
+ clog << " -> " << Access << ':' << QuoteString(Message.str(),"\n") << endl;
+ OutQueue += Message.str();
+ OutReady = true;
+
return true;
}
/*}}}*/
@@ -532,10 +526,17 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item)
/* */
bool pkgAcquire::Worker::OutFdReady()
{
- if (FileFd::Write(OutFd,OutQueue.c_str(),OutQueue.length()) == false)
+ int Res;
+ do
+ {
+ Res = write(OutFd,OutQueue.c_str(),OutQueue.length());
+ }
+ while (Res < 0 && errno == EINTR);
+
+ if (Res <= 0)
return MethodFailure();
-
- OutQueue.clear();
+
+ OutQueue.erase(0,Res);
if (OutQueue.empty() == true)
OutReady = false;
diff --git a/apt-pkg/acquire.cc b/apt-pkg/acquire.cc
index 5e1419056..a8a5abd34 100644
--- a/apt-pkg/acquire.cc
+++ b/apt-pkg/acquire.cc
@@ -244,11 +244,19 @@ void pkgAcquire::Dequeue(Item *Itm)
{
Queue *I = Queues;
bool Res = false;
- for (; I != 0; I = I->Next)
- Res |= I->Dequeue(Itm);
-
if (Debug == true)
clog << "Dequeuing " << Itm->DestFile << endl;
+
+ for (; I != 0; I = I->Next)
+ {
+ if (I->Dequeue(Itm))
+ {
+ Res = true;
+ if (Debug == true)
+ clog << "Dequeued from " << I->Name << endl;
+ }
+ }
+
if (Res == true)
ToFetch--;
}
@@ -269,9 +277,30 @@ string pkgAcquire::QueueName(string Uri,MethodConfig const *&Config)
/* Single-Instance methods get exactly one queue per URI. This is
also used for the Access queue method */
if (Config->SingleInstance == true || QueueMode == QueueAccess)
- return U.Access;
+ return U.Access;
+
+ string AccessSchema = U.Access + ':',
+ FullQueueName = AccessSchema + U.Host;
+ unsigned int Instances = 0, SchemaLength = AccessSchema.length();
+
+ Queue *I = Queues;
+ for (; I != 0; I = I->Next) {
+ // if the queue already exists, re-use it
+ if (I->Name == FullQueueName)
+ return FullQueueName;
+
+ if (I->Name.compare(0, SchemaLength, AccessSchema) == 0)
+ Instances++;
+ }
+
+ if (Debug) {
+ clog << "Found " << Instances << " instances of " << U.Access << endl;
+ }
+
+ if (Instances >= (unsigned int)_config->FindI("Acquire::QueueHost::Limit",10))
+ return U.Access;
- return U.Access + ':' + U.Host;
+ return FullQueueName;
}
/*}}}*/
// Acquire::GetConfig - Fetch the configuration information /*{{{*/
diff --git a/apt-pkg/algorithms.cc b/apt-pkg/algorithms.cc
index 8beb2d51c..2f5fcc7ab 100644
--- a/apt-pkg/algorithms.cc
+++ b/apt-pkg/algorithms.cc
@@ -58,6 +58,12 @@ pkgSimulate::pkgSimulate(pkgDepCache *Cache) : pkgPackageManager(Cache),
FileNames[I] = Jnk;
}
/*}}}*/
+// Simulate::~Simulate - Destructor /*{{{*/
+pkgSimulate::~pkgSimulate()
+{
+ delete[] Flags;
+}
+ /*}}}*/
// Simulate::Describe - Describe a package /*{{{*/
// ---------------------------------------------------------------------
/* Parameter Current == true displays the current package version,
@@ -356,11 +362,36 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
if (I->CurrentVer != 0)
Cache.MarkInstall(I, true, 0, false);
- /* Now, auto upgrade all essential packages - this ensures that
- the essential packages are present and working */
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
- if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
- Cache.MarkInstall(I, true, 0, false);
+ /* Now, install each essential package which is not installed
+ (and not provided by another package in the same name group) */
+ std::string essential = _config->Find("pkgCacheGen::Essential", "all");
+ if (essential == "all")
+ {
+ for (pkgCache::GrpIterator G = Cache.GrpBegin(); G.end() == false; ++G)
+ {
+ bool isEssential = false;
+ bool instEssential = false;
+ for (pkgCache::PkgIterator P = G.PackageList(); P.end() == false; P = G.NextPkg(P))
+ {
+ if ((P->Flags & pkgCache::Flag::Essential) != pkgCache::Flag::Essential)
+ continue;
+ isEssential = true;
+ if (Cache[P].Install() == true)
+ {
+ instEssential = true;
+ break;
+ }
+ }
+ if (isEssential == false || instEssential == true)
+ continue;
+ pkgCache::PkgIterator P = G.FindPreferredPkg();
+ Cache.MarkInstall(P, true, 0, false);
+ }
+ }
+ else if (essential != "none")
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
+ if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
+ Cache.MarkInstall(I, true, 0, false);
/* We do it again over all previously installed packages to force
conflict resolution on them all. */
@@ -1453,7 +1484,7 @@ void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List)
qsort(List,Count,sizeof(*List),PrioComp);
}
/*}}}*/
-// ListUpdate - update the cache files /*{{{*/
+// ListUpdate - construct Fetcher and update the cache files /*{{{*/
// ---------------------------------------------------------------------
/* This is a simple wrapper to update the cache. it will fetch stuff
* from the network (or any other sources defined in sources.list)
@@ -1462,7 +1493,6 @@ bool ListUpdate(pkgAcquireStatus &Stat,
pkgSourceList &List,
int PulseInterval)
{
- pkgAcquire::RunResult res;
pkgAcquire Fetcher;
if (Fetcher.Setup(&Stat, _config->FindDir("Dir::State::Lists")) == false)
return false;
@@ -1471,11 +1501,24 @@ bool ListUpdate(pkgAcquireStatus &Stat,
if (List.GetIndexes(&Fetcher) == false)
return false;
+ return AcquireUpdate(Fetcher, PulseInterval, true);
+}
+ /*}}}*/
+// AcquireUpdate - take Fetcher and update the cache files /*{{{*/
+// ---------------------------------------------------------------------
+/* This is a simple wrapper to update the cache with a provided acquire
+ * If you only need control over Status and the used SourcesList use
+ * ListUpdate method instead.
+ */
+bool AcquireUpdate(pkgAcquire &Fetcher, int const PulseInterval,
+ bool const RunUpdateScripts, bool const ListCleanup)
+{
// Run scripts
- RunScripts("APT::Update::Pre-Invoke");
-
- // check arguments
- if(PulseInterval>0)
+ if (RunUpdateScripts == true)
+ RunScripts("APT::Update::Pre-Invoke");
+
+ pkgAcquire::RunResult res;
+ if(PulseInterval > 0)
res = Fetcher.Run(PulseInterval);
else
res = Fetcher.Run();
@@ -1512,7 +1555,7 @@ bool ListUpdate(pkgAcquireStatus &Stat,
// Clean out any old list files
// Keep "APT::Get::List-Cleanup" name for compatibility, but
// this is really a global option for the APT library now
- if (!TransientNetworkFailure && !Failed &&
+ if (!TransientNetworkFailure && !Failed && ListCleanup == true &&
(_config->FindB("APT::Get::List-Cleanup",true) == true &&
_config->FindB("APT::List-Cleanup",true) == true))
{
@@ -1529,11 +1572,14 @@ bool ListUpdate(pkgAcquireStatus &Stat,
// Run the success scripts if all was fine
- if(!TransientNetworkFailure && !Failed)
- RunScripts("APT::Update::Post-Invoke-Success");
+ if (RunUpdateScripts == true)
+ {
+ if(!TransientNetworkFailure && !Failed)
+ RunScripts("APT::Update::Post-Invoke-Success");
- // Run the other scripts
- RunScripts("APT::Update::Post-Invoke");
+ // Run the other scripts
+ RunScripts("APT::Update::Post-Invoke");
+ }
return true;
}
/*}}}*/
diff --git a/apt-pkg/algorithms.h b/apt-pkg/algorithms.h
index 37eacf1f8..aff8a68f2 100644
--- a/apt-pkg/algorithms.h
+++ b/apt-pkg/algorithms.h
@@ -78,6 +78,7 @@ private:
public:
pkgSimulate(pkgDepCache *Cache);
+ ~pkgSimulate();
};
/*}}}*/
class pkgProblemResolver /*{{{*/
@@ -147,5 +148,7 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache);
void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List);
bool ListUpdate(pkgAcquireStatus &progress, pkgSourceList &List, int PulseInterval=0);
-
+bool AcquireUpdate(pkgAcquire &Fetcher, int const PulseInterval = 0,
+ bool const RunUpdateScripts = true, bool const ListCleanup = true);
+
#endif
diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc
index d72b0c5ae..d31ccb642 100644
--- a/apt-pkg/aptconfiguration.cc
+++ b/apt-pkg/aptconfiguration.cc
@@ -47,6 +47,7 @@ const Configuration::getCompressionTypes(bool const &Cached) {
_config->CndSet("Acquire::CompressionTypes::gz","gzip");
setDefaultConfigurationForCompressors();
+ std::vector<APT::Configuration::Compressor> const compressors = getCompressors();
// accept non-list order as override setting for config settings on commandline
std::string const overrideOrder = _config->Find("Acquire::CompressionTypes::Order","");
@@ -60,15 +61,17 @@ const Configuration::getCompressionTypes(bool const &Cached) {
if ((*o).empty() == true)
continue;
// ignore types we have no method ready to use
- if (_config->Exists(std::string("Acquire::CompressionTypes::").append(*o)) == false)
+ std::string const method = std::string("Acquire::CompressionTypes::").append(*o);
+ if (_config->Exists(method) == false)
continue;
// ignore types we have no app ready to use
- std::string const appsetting = std::string("Dir::Bin::").append(*o);
- if (_config->Exists(appsetting) == true) {
- std::string const app = _config->FindFile(appsetting.c_str(), "");
- if (app.empty() == false && FileExists(app) == false)
- continue;
- }
+ std::string const app = _config->Find(method);
+ std::vector<APT::Configuration::Compressor>::const_iterator c = compressors.begin();
+ for (; c != compressors.end(); ++c)
+ if (c->Name == app)
+ break;
+ if (c == compressors.end())
+ continue;
types.push_back(*o);
}
@@ -84,12 +87,12 @@ const Configuration::getCompressionTypes(bool const &Cached) {
if (std::find(types.begin(),types.end(),Types->Tag) != types.end())
continue;
// ignore types we have no app ready to use
- std::string const appsetting = std::string("Dir::Bin::").append(Types->Value);
- if (appsetting.empty() == false && _config->Exists(appsetting) == true) {
- std::string const app = _config->FindFile(appsetting.c_str(), "");
- if (app.empty() == false && FileExists(app) == false)
- continue;
- }
+ std::vector<APT::Configuration::Compressor>::const_iterator c = compressors.begin();
+ for (; c != compressors.end(); ++c)
+ if (c->Name == Types->Value)
+ break;
+ if (c == compressors.end())
+ continue;
types.push_back(Types->Tag);
}
@@ -141,7 +144,7 @@ std::vector<std::string> const Configuration::getLanguages(bool const &All,
if (D != 0) {
builtin.push_back("none");
for (struct dirent *Ent = readdir(D); Ent != 0; Ent = readdir(D)) {
- string const name = Ent->d_name;
+ string const name = SubstVar(Ent->d_name, "%5f", "_");
size_t const foundDash = name.rfind("-");
size_t const foundUnderscore = name.rfind("_", foundDash);
if (foundDash == string::npos || foundUnderscore == string::npos ||
@@ -231,17 +234,21 @@ std::vector<std::string> const Configuration::getLanguages(bool const &All,
// override the configuration settings vector of languages.
string const forceLang = _config->Find("Acquire::Languages","");
if (forceLang.empty() == false) {
- if (forceLang == "environment") {
- codes = environment;
- } else if (forceLang != "none")
- codes.push_back(forceLang);
- else //if (forceLang == "none")
- builtin.clear();
- allCodes = codes;
- for (std::vector<string>::const_iterator b = builtin.begin();
- b != builtin.end(); ++b)
- if (std::find(allCodes.begin(), allCodes.end(), *b) == allCodes.end())
- allCodes.push_back(*b);
+ if (forceLang == "none") {
+ codes.clear();
+ allCodes.clear();
+ allCodes.push_back("none");
+ } else {
+ if (forceLang == "environment")
+ codes = environment;
+ else
+ codes.push_back(forceLang);
+ allCodes = codes;
+ for (std::vector<string>::const_iterator b = builtin.begin();
+ b != builtin.end(); ++b)
+ if (std::find(allCodes.begin(), allCodes.end(), *b) == allCodes.end())
+ allCodes.push_back(*b);
+ }
if (All == true)
return allCodes;
else
@@ -474,18 +481,18 @@ const Configuration::getCompressors(bool const Cached) {
setDefaultConfigurationForCompressors();
- compressors.push_back(Compressor(".", "", "", "", "", 1));
+ compressors.push_back(Compressor(".", "", "", NULL, NULL, 1));
if (_config->Exists("Dir::Bin::gzip") == false || FileExists(_config->FindFile("Dir::Bin::gzip")) == true)
compressors.push_back(Compressor("gzip",".gz","gzip","-9n","-d",2));
#ifdef HAVE_ZLIB
else
- compressors.push_back(Compressor("gzip",".gz","false", "", "", 2));
+ compressors.push_back(Compressor("gzip",".gz","false", NULL, NULL, 2));
#endif
if (_config->Exists("Dir::Bin::bzip2") == false || FileExists(_config->FindFile("Dir::Bin::bzip2")) == true)
compressors.push_back(Compressor("bzip2",".bz2","bzip2","-9","-d",3));
#ifdef HAVE_BZ2
else
- compressors.push_back(Compressor("bzip2",".bz2","false", "", "", 3));
+ compressors.push_back(Compressor("bzip2",".bz2","false", NULL, NULL, 3));
#endif
if (_config->Exists("Dir::Bin::xz") == false || FileExists(_config->FindFile("Dir::Bin::xz")) == true)
compressors.push_back(Compressor("xz",".xz","xz","-6","-d",4));
diff --git a/apt-pkg/cachefilter.cc b/apt-pkg/cachefilter.cc
index 9ec3fa699..35f95fe22 100644
--- a/apt-pkg/cachefilter.cc
+++ b/apt-pkg/cachefilter.cc
@@ -9,12 +9,14 @@
#include <apt-pkg/cachefilter.h>
#include <apt-pkg/error.h>
#include <apt-pkg/pkgcache.h>
-
-#include <apti18n.h>
+#include <apt-pkg/strutl.h>
#include <string>
#include <regex.h>
+#include <fnmatch.h>
+
+#include <apti18n.h>
/*}}}*/
namespace APT {
namespace CacheFilter {
@@ -52,5 +54,54 @@ PackageNameMatchesRegEx::~PackageNameMatchesRegEx() { /*{{{*/
delete pattern;
}
/*}}}*/
+
+// CompleteArch to <kernel>-<cpu> tuple /*{{{*/
+//----------------------------------------------------------------------
+/* The complete architecture, consisting of <kernel>-<cpu>. */
+static std::string CompleteArch(std::string const &arch) {
+ if (arch.find('-') != std::string::npos) {
+ // ensure that only -any- is replaced and not something like company-
+ std::string complete = std::string("-").append(arch).append("-");
+ complete = SubstVar(complete, "-any-", "-*-");
+ complete = complete.substr(1, complete.size()-2);
+ return complete;
+ }
+ else if (arch == "armel") return "linux-arm";
+ else if (arch == "armhf") return "linux-arm";
+ else if (arch == "lpia") return "linux-i386";
+ else if (arch == "powerpcspe") return "linux-powerpc";
+ else if (arch == "uclibc-linux-armel") return "linux-arm";
+ else if (arch == "uclinux-armel") return "uclinux-arm";
+ else if (arch == "any") return "*-*";
+ else return "linux-" + arch;
+}
+ /*}}}*/
+PackageArchitectureMatchesSpecification::PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern) :/*{{{*/
+ literal(pattern), isPattern(isPattern), d(NULL) {
+ complete = CompleteArch(pattern);
+}
+ /*}}}*/
+bool PackageArchitectureMatchesSpecification::operator() (char const * const &arch) {/*{{{*/
+ if (strcmp(literal.c_str(), arch) == 0 ||
+ strcmp(complete.c_str(), arch) == 0)
+ return true;
+ std::string const pkgarch = CompleteArch(arch);
+ if (isPattern == true)
+ return fnmatch(complete.c_str(), pkgarch.c_str(), 0) == 0;
+ return fnmatch(pkgarch.c_str(), complete.c_str(), 0) == 0;
+}
+ /*}}}*/
+bool PackageArchitectureMatchesSpecification::operator() (pkgCache::PkgIterator const &Pkg) {/*{{{*/
+ return (*this)(Pkg.Arch());
+}
+ /*}}}*/
+bool PackageArchitectureMatchesSpecification::operator() (pkgCache::VerIterator const &Ver) {/*{{{*/
+ return (*this)(Ver.ParentPkg());
+}
+ /*}}}*/
+PackageArchitectureMatchesSpecification::~PackageArchitectureMatchesSpecification() { /*{{{*/
+}
+ /*}}}*/
+
}
}
diff --git a/apt-pkg/cachefilter.h b/apt-pkg/cachefilter.h
index 5d426008b..25cd43f47 100644
--- a/apt-pkg/cachefilter.h
+++ b/apt-pkg/cachefilter.h
@@ -26,6 +26,36 @@ public:
~PackageNameMatchesRegEx();
};
/*}}}*/
+// PackageArchitectureMatchesSpecification /*{{{*/
+/** \class PackageArchitectureMatchesSpecification
+ \brief matching against architecture specification strings
+
+ The strings are of the format <kernel>-<cpu> where either component,
+ or the whole string, can be the wildcard "any" as defined in
+ debian-policy §11.1 "Architecture specification strings".
+
+ Examples: i386, mipsel, linux-any, any-amd64, any */
+class PackageArchitectureMatchesSpecification {
+ std::string literal;
+ std::string complete;
+ bool isPattern;
+ /** \brief dpointer placeholder (for later in case we need it) */
+ void *d;
+public:
+ /** \brief matching against architecture specification strings
+ *
+ * @param pattern is the architecture specification string
+ * @param isPattern defines if the given \b pattern is a
+ * architecture specification pattern to match others against
+ * or if it is the fixed string and matched against patterns
+ */
+ PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern = true);
+ bool operator() (char const * const &arch);
+ bool operator() (pkgCache::PkgIterator const &Pkg);
+ bool operator() (pkgCache::VerIterator const &Ver);
+ ~PackageArchitectureMatchesSpecification();
+};
+ /*}}}*/
}
}
#endif
diff --git a/apt-pkg/cacheiterators.h b/apt-pkg/cacheiterators.h
index d5e018be9..dcd353119 100644
--- a/apt-pkg/cacheiterators.h
+++ b/apt-pkg/cacheiterators.h
@@ -285,6 +285,7 @@ class pkgCache::DepIterator : public Iterator<Dependency, DepIterator> {
bool IsNegative() const;
bool IsIgnorable(PrvIterator const &Prv) const;
bool IsIgnorable(PkgIterator const &Pkg) const;
+ bool IsMultiArchImplicit() const;
void GlobOr(DepIterator &Start,DepIterator &End);
Version **AllTargets() const;
bool SmartTargetPkg(PkgIterator &Result) const;
@@ -329,8 +330,9 @@ class pkgCache::PrvIterator : public Iterator<Provides, PrvIterator> {
inline VerIterator OwnerVer() const {return VerIterator(*Owner,Owner->VerP + S->Version);};
inline PkgIterator OwnerPkg() const {return PkgIterator(*Owner,Owner->PkgP + Owner->VerP[S->Version].ParentPkg);};
- inline PrvIterator() : Iterator<Provides, PrvIterator>(), Type(PrvVer) {};
+ bool IsMultiArchImplicit() const;
+ inline PrvIterator() : Iterator<Provides, PrvIterator>(), Type(PrvVer) {};
inline PrvIterator(pkgCache &Owner, Provides *Trg, Version*) :
Iterator<Provides, PrvIterator>(Owner, Trg), Type(PrvVer) {
if (S == 0)
diff --git a/apt-pkg/cacheset.cc b/apt-pkg/cacheset.cc
index b892ab4bf..784d1f0bf 100644
--- a/apt-pkg/cacheset.cc
+++ b/apt-pkg/cacheset.cc
@@ -182,15 +182,61 @@ pkgCache::PkgIterator PackageContainerInterface::FromName(pkgCacheFile &Cache,
return Pkg;
}
/*}}}*/
+// FromGroup - Returns the package defined by this string /*{{{*/
+bool PackageContainerInterface::FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache,
+ std::string pkg, CacheSetHelper &helper) {
+ if (unlikely(Cache.GetPkgCache() == 0))
+ return false;
+
+ size_t const archfound = pkg.find_last_of(':');
+ std::string arch;
+ if (archfound != std::string::npos) {
+ arch = pkg.substr(archfound+1);
+ pkg.erase(archfound);
+ }
+
+ pkgCache::GrpIterator Grp = Cache.GetPkgCache()->FindGrp(pkg);
+ if (Grp.end() == false) {
+ if (arch.empty() == true) {
+ pkgCache::PkgIterator Pkg = Grp.FindPreferredPkg();
+ if (Pkg.end() == false)
+ {
+ pci->insert(Pkg);
+ return true;
+ }
+ } else {
+ bool found = false;
+ // for 'linux-any' return the first package matching, for 'linux-*' return all matches
+ bool const isGlobal = arch.find('*') != std::string::npos;
+ APT::CacheFilter::PackageArchitectureMatchesSpecification pams(arch);
+ for (pkgCache::PkgIterator Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg)) {
+ if (pams(Pkg) == false)
+ continue;
+ pci->insert(Pkg);
+ found = true;
+ if (isGlobal == false)
+ break;
+ }
+ if (found == true)
+ return true;
+ }
+ }
+
+ pkgCache::PkgIterator Pkg = helper.canNotFindPkgName(Cache, pkg);
+ if (Pkg.end() == true)
+ return false;
+
+ pci->insert(Pkg);
+ return true;
+}
+ /*}}}*/
// FromString - Return all packages matching a specific string /*{{{*/
bool PackageContainerInterface::FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str, CacheSetHelper &helper) {
bool found = true;
_error->PushToStack();
- pkgCache::PkgIterator Pkg = FromName(Cache, str, helper);
- if (Pkg.end() == false)
- pci->insert(Pkg);
- else if (FromTask(pci, Cache, str, helper) == false &&
+ if (FromGroup(pci, Cache, str, helper) == false &&
+ FromTask(pci, Cache, str, helper) == false &&
FromRegEx(pci, Cache, str, helper) == false)
{
helper.canNotFindPackage(pci, Cache, str);
@@ -217,6 +263,7 @@ bool PackageContainerInterface::FromModifierCommandLine(unsigned short &modID, P
pkgCacheFile &Cache, const char * cmdline,
std::list<Modifier> const &mods, CacheSetHelper &helper) {
std::string str = cmdline;
+ unsigned short fallback = modID;
bool modifierPresent = false;
for (std::list<Modifier>::const_iterator mod = mods.begin();
mod != mods.end(); ++mod) {
@@ -243,6 +290,7 @@ bool PackageContainerInterface::FromModifierCommandLine(unsigned short &modID, P
helper.showErrors(errors);
if (Pkg.end() == false) {
pci->insert(Pkg);
+ modID = fallback;
return true;
}
}
@@ -281,13 +329,14 @@ bool VersionContainerInterface::FromModifierCommandLine(unsigned short &modID,
modifierPresent = true;
break;
}
-
if (modifierPresent == true) {
bool const errors = helper.showErrors(false);
bool const found = VersionContainerInterface::FromString(vci, Cache, cmdline, select, helper, true);
helper.showErrors(errors);
- if (found == true)
+ if (found == true) {
+ modID = fallback;
return true;
+ }
}
return FromString(vci, Cache, str, select, helper);
}
diff --git a/apt-pkg/cacheset.h b/apt-pkg/cacheset.h
index 6f0a0e358..2a45910ba 100644
--- a/apt-pkg/cacheset.h
+++ b/apt-pkg/cacheset.h
@@ -139,6 +139,7 @@ public:
static bool FromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
static bool FromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper);
+ static bool FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
static bool FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper);
static bool FromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper);
@@ -186,7 +187,7 @@ public: /*{{{*/
pkgCache::PkgIterator getPkg(void) const { return *_iter; }
inline pkgCache::PkgIterator operator*(void) const { return *_iter; };
operator typename Container::iterator(void) const { return _iter; }
- operator typename PackageContainer<Container>::const_iterator() { return PackageContainer<Container>::const_iterator(_iter); }
+ operator typename PackageContainer<Container>::const_iterator() { return typename PackageContainer<Container>::const_iterator(_iter); }
inline iterator& operator++() { ++_iter; return *this; }
inline iterator operator++(int) { iterator tmp(*this); operator++(); return tmp; }
inline bool operator!=(iterator const &i) const { return _iter != i._iter; };
@@ -506,7 +507,7 @@ public: /*{{{*/
pkgCache::VerIterator getVer(void) const { return *_iter; }
inline pkgCache::VerIterator operator*(void) const { return *_iter; };
operator typename Container::iterator(void) const { return _iter; }
- operator typename VersionContainer<Container>::const_iterator() { return VersionContainer<Container>::const_iterator(_iter); }
+ operator typename VersionContainer<Container>::const_iterator() { return typename VersionContainer<Container>::const_iterator(_iter); }
inline iterator& operator++() { ++_iter; return *this; }
inline iterator operator++(int) { iterator tmp(*this); operator++(); return tmp; }
inline bool operator!=(iterator const &i) const { return _iter != i._iter; };
diff --git a/apt-pkg/cdrom.cc b/apt-pkg/cdrom.cc
index 50c204371..8462e8286 100644
--- a/apt-pkg/cdrom.cc
+++ b/apt-pkg/cdrom.cc
@@ -409,27 +409,10 @@ bool pkgCdrom::WriteDatabase(Configuration &Cnf)
/* Write out all of the configuration directives by walking the
configuration tree */
- const Configuration::Item *Top = Cnf.Tree(0);
- for (; Top != 0;)
- {
- // Print the config entry
- if (Top->Value.empty() == false)
- Out << Top->FullTag() + " \"" << Top->Value << "\";" << endl;
-
- if (Top->Child != 0)
- {
- Top = Top->Child;
- continue;
- }
-
- while (Top != 0 && Top->Next == 0)
- Top = Top->Parent;
- if (Top != 0)
- Top = Top->Next;
- }
+ Cnf.Dump(Out, NULL, "%f \"%v\";\n", false);
Out.close();
-
+
if (FileExists(DFile) == true && link(DFile.c_str(),string(DFile + '~').c_str()) != 0)
return _error->Errno("link", "Failed to link %s to %s~", DFile.c_str(), DFile.c_str());
if (rename(NewFile.c_str(),DFile.c_str()) != 0)
diff --git a/apt-pkg/contrib/cmndline.cc b/apt-pkg/contrib/cmndline.cc
index 159f330a1..75d02cad4 100644
--- a/apt-pkg/contrib/cmndline.cc
+++ b/apt-pkg/contrib/cmndline.cc
@@ -92,8 +92,9 @@ bool CommandLine::Parse(int argc,const char **argv)
// Match up to a = against the list
Args *A;
const char *OptEnd = strchrnul(Opt, '=');
- for (A = ArgList; A->end() == false &&
- stringcasecmp(Opt,OptEnd,A->LongOpt) != 0; A++);
+ for (A = ArgList; A->end() == false &&
+ (A->LongOpt == 0 || stringcasecmp(Opt,OptEnd,A->LongOpt) != 0);
+ ++A);
// Failed, look for a word after the first - (no-foo)
bool PreceedMatch = false;
@@ -105,7 +106,8 @@ bool CommandLine::Parse(int argc,const char **argv)
Opt++;
for (A = ArgList; A->end() == false &&
- stringcasecmp(Opt,OptEnd,A->LongOpt) != 0; A++);
+ (A->LongOpt == 0 || stringcasecmp(Opt,OptEnd,A->LongOpt) != 0);
+ ++A);
// Failed again..
if (A->end() == true && OptEnd - Opt != 1)
diff --git a/apt-pkg/contrib/configuration.cc b/apt-pkg/contrib/configuration.cc
index 36866a35a..4de17e3e1 100644
--- a/apt-pkg/contrib/configuration.cc
+++ b/apt-pkg/contrib/configuration.cc
@@ -171,48 +171,60 @@ string Configuration::Find(const char *Name,const char *Default) const
string Configuration::FindFile(const char *Name,const char *Default) const
{
const Item *RootItem = Lookup("RootDir");
- std::string rootDir = (RootItem == 0) ? "" : RootItem->Value;
- if(rootDir.size() > 0 && rootDir[rootDir.size() - 1] != '/')
- rootDir.push_back('/');
+ std::string result = (RootItem == 0) ? "" : RootItem->Value;
+ if(result.empty() == false && result[result.size() - 1] != '/')
+ result.push_back('/');
const Item *Itm = Lookup(Name);
if (Itm == 0 || Itm->Value.empty() == true)
{
- if (Default == 0)
- return rootDir;
- else
- return rootDir + Default;
+ if (Default != 0)
+ result.append(Default);
}
-
- string val = Itm->Value;
- while (Itm->Parent != 0)
+ else
{
- if (Itm->Parent->Value.empty() == true)
+ string val = Itm->Value;
+ while (Itm->Parent != 0)
{
- Itm = Itm->Parent;
- continue;
- }
+ if (Itm->Parent->Value.empty() == true)
+ {
+ Itm = Itm->Parent;
+ continue;
+ }
+
+ // Absolute
+ if (val.length() >= 1 && val[0] == '/')
+ {
+ if (val.compare(0, 9, "/dev/null") == 0)
+ val.erase(9);
+ break;
+ }
- // Absolute
- if (val.length() >= 1 && val[0] == '/')
- break;
+ // ~/foo or ./foo
+ if (val.length() >= 2 && (val[0] == '~' || val[0] == '.') && val[1] == '/')
+ break;
- // ~/foo or ./foo
- if (val.length() >= 2 && (val[0] == '~' || val[0] == '.') && val[1] == '/')
- break;
-
- // ../foo
- if (val.length() >= 3 && val[0] == '.' && val[1] == '.' && val[2] == '/')
- break;
-
- if (Itm->Parent->Value.end()[-1] != '/')
- val.insert(0, "/");
+ // ../foo
+ if (val.length() >= 3 && val[0] == '.' && val[1] == '.' && val[2] == '/')
+ break;
+
+ if (Itm->Parent->Value.end()[-1] != '/')
+ val.insert(0, "/");
- val.insert(0, Itm->Parent->Value);
- Itm = Itm->Parent;
+ val.insert(0, Itm->Parent->Value);
+ Itm = Itm->Parent;
+ }
+ result.append(val);
}
- return rootDir + val;
+ // do some normalisation by removing // and /./ from the path
+ size_t found = string::npos;
+ while ((found = result.find("/./")) != string::npos)
+ result.replace(found, 3, "/");
+ while ((found = result.find("//")) != string::npos)
+ result.replace(found, 2, "/");
+
+ return result;
}
/*}}}*/
// Configuration::FindDir - Find a directory name /*{{{*/
@@ -222,7 +234,12 @@ string Configuration::FindDir(const char *Name,const char *Default) const
{
string Res = FindFile(Name,Default);
if (Res.end()[-1] != '/')
+ {
+ size_t const found = Res.rfind("/dev/null");
+ if (found != string::npos && found == Res.size() - 9)
+ return Res; // /dev/null returning
return Res + '/';
+ }
return Res;
}
/*}}}*/
@@ -482,24 +499,80 @@ bool Configuration::ExistsAny(const char *Name) const
/* Dump the entire configuration space */
void Configuration::Dump(ostream& str)
{
- /* Write out all of the configuration directives by walking the
+ Dump(str, NULL, "%f \"%v\";\n", true);
+}
+void Configuration::Dump(ostream& str, char const * const root,
+ char const * const formatstr, bool const emptyValue)
+{
+ const Configuration::Item* Top = Tree(root);
+ if (Top == 0)
+ return;
+ const Configuration::Item* const Root = (root == NULL) ? NULL : Top;
+ std::vector<std::string> const format = VectorizeString(formatstr, '%');
+
+ /* Write out all of the configuration directives by walking the
configuration tree */
- const Configuration::Item *Top = Tree(0);
- for (; Top != 0;)
- {
- str << Top->FullTag() << " \"" << Top->Value << "\";" << endl;
-
+ do {
+ if (emptyValue == true || Top->Value.empty() == emptyValue)
+ {
+ std::vector<std::string>::const_iterator f = format.begin();
+ str << *f;
+ for (++f; f != format.end(); ++f)
+ {
+ if (f->empty() == true)
+ {
+ ++f;
+ str << '%' << *f;
+ continue;
+ }
+ char const type = (*f)[0];
+ if (type == 'f')
+ str << Top->FullTag();
+ else if (type == 't')
+ str << Top->Tag;
+ else if (type == 'v')
+ str << Top->Value;
+ else if (type == 'F')
+ str << QuoteString(Top->FullTag(), "=\"\n");
+ else if (type == 'T')
+ str << QuoteString(Top->Tag, "=\"\n");
+ else if (type == 'V')
+ str << QuoteString(Top->Value, "=\"\n");
+ else if (type == 'n')
+ str << "\n";
+ else if (type == 'N')
+ str << "\t";
+ else
+ str << '%' << type;
+ str << f->c_str() + 1;
+ }
+ }
+
if (Top->Child != 0)
{
Top = Top->Child;
continue;
}
-
+
while (Top != 0 && Top->Next == 0)
Top = Top->Parent;
if (Top != 0)
Top = Top->Next;
- }
+
+ if (Root != NULL)
+ {
+ const Configuration::Item* I = Top;
+ while(I != 0)
+ {
+ if (I == Root)
+ break;
+ else
+ I = I->Parent;
+ }
+ if (I == 0)
+ break;
+ }
+ } while (Top != 0);
}
/*}}}*/
diff --git a/apt-pkg/contrib/configuration.h b/apt-pkg/contrib/configuration.h
index 4c2e75041..ea94c2fe6 100644
--- a/apt-pkg/contrib/configuration.h
+++ b/apt-pkg/contrib/configuration.h
@@ -103,6 +103,8 @@ class Configuration
inline void Dump() { Dump(std::clog); };
void Dump(std::ostream& str);
+ void Dump(std::ostream& str, char const * const root,
+ char const * const format, bool const emptyValue);
Configuration(const Item *Root);
Configuration();
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index 9e3611b26..960616f33 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -81,6 +81,31 @@ class FileFdPrivate {
FileFdPrivate() : gz(NULL), bz2(NULL),
compressed_fd(-1), compressor_pid(-1), pipe(false),
openmode(0), seekpos(0) {};
+ bool CloseDown(std::string const &FileName)
+ {
+ bool Res = true;
+#ifdef HAVE_ZLIB
+ if (gz != NULL) {
+ int const e = gzclose(gz);
+ gz = NULL;
+ // gzdclose() on empty files always fails with "buffer error" here, ignore that
+ if (e != 0 && e != Z_BUF_ERROR)
+ Res &= _error->Errno("close",_("Problem closing the gzip file %s"), FileName.c_str());
+ }
+#endif
+#ifdef HAVE_BZ2
+ if (bz2 != NULL) {
+ BZ2_bzclose(bz2);
+ bz2 = NULL;
+ }
+#endif
+ if (compressor_pid > 0)
+ ExecWait(compressor_pid, "FileFdCompressor", true);
+ compressor_pid = -1;
+
+ return Res;
+ }
+ ~FileFdPrivate() { CloseDown(""); }
};
// RunScripts - Run a set of scripts from a configuration subtree /*{{{*/
@@ -827,6 +852,26 @@ bool ExecWait(pid_t Pid,const char *Name,bool Reap)
}
/*}}}*/
+// StartsWithGPGClearTextSignature - Check if a file is Pgp/GPG clearsigned /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool StartsWithGPGClearTextSignature(string const &FileName)
+{
+ static const char* SIGMSG = "-----BEGIN PGP SIGNED MESSAGE-----\n";
+ char buffer[strlen(SIGMSG)+1];
+ FILE* gpg = fopen(FileName.c_str(), "r");
+ if (gpg == NULL)
+ return false;
+
+ char const * const test = fgets(buffer, sizeof(buffer), gpg);
+ fclose(gpg);
+ if (test == NULL || strcmp(buffer, SIGMSG) != 0)
+ return false;
+
+ return true;
+}
+
+
// FileFd::Open - Open a file /*{{{*/
// ---------------------------------------------------------------------
/* The most commonly used open mode combinations are given with Mode */
@@ -905,8 +950,6 @@ bool FileFd::Open(string FileName,unsigned int const Mode,CompressMode Compress,
bool FileFd::Open(string FileName,unsigned int const Mode,APT::Configuration::Compressor const &compressor, unsigned long const Perms)
{
Close();
- d = new FileFdPrivate;
- d->openmode = Mode;
Flags = AutoClose;
if ((Mode & WriteOnly) != WriteOnly && (Mode & (Atomic | Create | Empty | Exclusive)) != 0)
@@ -1000,10 +1043,21 @@ bool FileFd::OpenDescriptor(int Fd, unsigned int const Mode, CompressMode Compre
bool FileFd::OpenDescriptor(int Fd, unsigned int const Mode, APT::Configuration::Compressor const &compressor, bool AutoClose)
{
Close();
- d = new FileFdPrivate;
- d->openmode = Mode;
Flags = (AutoClose) ? FileFd::AutoClose : 0;
- iFd = Fd;
+ if (AutoClose == false && (
+#ifdef HAVE_ZLIB
+ compressor.Name == "gzip" ||
+#endif
+#ifdef HAVE_BZ2
+ compressor.Name == "bzip2" ||
+#endif
+ false))
+ {
+ // Need to duplicate fd here or gzclose for cleanup will close the fd as well
+ iFd = dup(Fd);
+ }
+ else
+ iFd = Fd;
this->FileName = "";
if (OpenInternDescriptor(Mode, compressor) == false)
{
@@ -1015,12 +1069,24 @@ bool FileFd::OpenDescriptor(int Fd, unsigned int const Mode, APT::Configuration:
}
bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::Compressor const &compressor)
{
- d->compressor = compressor;
if (compressor.Name == "." || compressor.Binary.empty() == true)
return true;
+
+ if (d == NULL)
+ {
+ d = new FileFdPrivate();
+ d->openmode = Mode;
+ d->compressor = compressor;
+ }
+
#ifdef HAVE_ZLIB
- else if (compressor.Name == "gzip")
+ if (compressor.Name == "gzip")
{
+ if (d->gz != NULL)
+ {
+ gzclose(d->gz);
+ d->gz = NULL;
+ }
if ((Mode & ReadWrite) == ReadWrite)
d->gz = gzdopen(iFd, "r+");
else if ((Mode & WriteOnly) == WriteOnly)
@@ -1034,8 +1100,13 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
}
#endif
#ifdef HAVE_BZ2
- else if (compressor.Name == "bzip2")
+ if (compressor.Name == "bzip2")
{
+ if (d->bz2 != NULL)
+ {
+ BZ2_bzclose(d->bz2);
+ d->bz2 = NULL;
+ }
if ((Mode & ReadWrite) == ReadWrite)
d->bz2 = BZ2_bzdopen(iFd, "r+");
else if ((Mode & WriteOnly) == WriteOnly)
@@ -1049,14 +1120,20 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
}
#endif
+ // collect zombies here in case we reopen
+ if (d->compressor_pid > 0)
+ ExecWait(d->compressor_pid, "FileFdCompressor", true);
if ((Mode & ReadWrite) == ReadWrite)
+ {
+ Flags |= Fail;
return _error->Error("ReadWrite mode is not supported for file %s", FileName.c_str());
+ }
bool const Comp = (Mode & WriteOnly) == WriteOnly;
- // Handle 'decompression' of empty files
if (Comp == false)
{
+ // Handle 'decompression' of empty files
struct stat Buf;
fstat(iFd, &Buf);
if (Buf.st_size == 0 && S_ISFIFO(Buf.st_mode) == false)
@@ -1065,13 +1142,19 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
// We don't need the file open - instead let the compressor open it
// as he properly knows better how to efficiently read from 'his' file
if (FileName.empty() == false)
+ {
close(iFd);
+ iFd = -1;
+ }
}
// Create a data pipe
int Pipe[2] = {-1,-1};
if (pipe(Pipe) != 0)
+ {
+ Flags |= Fail;
return _error->Errno("pipe",_("Failed to create subprocess IPC"));
+ }
for (int J = 0; J != 2; J++)
SetCloseExec(Pipe[J],true);
@@ -1098,6 +1181,12 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
dup2(d->compressed_fd,STDIN_FILENO);
dup2(Pipe[1],STDOUT_FILENO);
}
+ int const nullfd = open("/dev/null", O_WRONLY);
+ if (nullfd != -1)
+ {
+ dup2(nullfd,STDERR_FILENO);
+ close(nullfd);
+ }
SetCloseExec(STDOUT_FILENO,false);
SetCloseExec(STDIN_FILENO,false);
@@ -1127,8 +1216,6 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
close(Pipe[0]);
else
close(Pipe[1]);
- if (Comp == true || FileName.empty() == true)
- close(d->compressed_fd);
return true;
}
@@ -1140,6 +1227,12 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
FileFd::~FileFd()
{
Close();
+ if (d != NULL)
+ {
+ d->CloseDown(FileName);
+ delete d;
+ d = NULL;
+ }
}
/*}}}*/
// FileFd::Read - Read a bit of the file /*{{{*/
@@ -1156,12 +1249,12 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
do
{
#ifdef HAVE_ZLIB
- if (d->gz != NULL)
+ if (d != NULL && d->gz != NULL)
Res = gzread(d->gz,To,Size);
else
#endif
#ifdef HAVE_BZ2
- if (d->bz2 != NULL)
+ if (d != NULL && d->bz2 != NULL)
Res = BZ2_bzread(d->bz2,To,Size);
else
#endif
@@ -1173,7 +1266,7 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
continue;
Flags |= Fail;
#ifdef HAVE_ZLIB
- if (d->gz != NULL)
+ if (d != NULL && d->gz != NULL)
{
int err;
char const * const errmsg = gzerror(d->gz, &err);
@@ -1182,7 +1275,7 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
}
#endif
#ifdef HAVE_BZ2
- if (d->bz2 != NULL)
+ if (d != NULL && d->bz2 != NULL)
{
int err;
char const * const errmsg = BZ2_bzerror(d->bz2, &err);
@@ -1195,7 +1288,8 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
To = (char *)To + Res;
Size -= Res;
- d->seekpos += Res;
+ if (d != NULL)
+ d->seekpos += Res;
if (Actual != 0)
*Actual += Res;
}
@@ -1223,7 +1317,7 @@ char* FileFd::ReadLine(char *To, unsigned long long const Size)
{
*To = '\0';
#ifdef HAVE_ZLIB
- if (d->gz != NULL)
+ if (d != NULL && d->gz != NULL)
return gzgets(d->gz, To, Size);
#endif
@@ -1254,12 +1348,12 @@ bool FileFd::Write(const void *From,unsigned long long Size)
do
{
#ifdef HAVE_ZLIB
- if (d->gz != NULL)
+ if (d != NULL && d->gz != NULL)
Res = gzwrite(d->gz,From,Size);
else
#endif
#ifdef HAVE_BZ2
- if (d->bz2 != NULL)
+ if (d != NULL && d->bz2 != NULL)
Res = BZ2_bzwrite(d->bz2,(void*)From,Size);
else
#endif
@@ -1270,7 +1364,7 @@ bool FileFd::Write(const void *From,unsigned long long Size)
{
Flags |= Fail;
#ifdef HAVE_ZLIB
- if (d->gz != NULL)
+ if (d != NULL && d->gz != NULL)
{
int err;
char const * const errmsg = gzerror(d->gz, &err);
@@ -1279,7 +1373,7 @@ bool FileFd::Write(const void *From,unsigned long long Size)
}
#endif
#ifdef HAVE_BZ2
- if (d->bz2 != NULL)
+ if (d != NULL && d->bz2 != NULL)
{
int err;
char const * const errmsg = BZ2_bzerror(d->bz2, &err);
@@ -1292,7 +1386,8 @@ bool FileFd::Write(const void *From,unsigned long long Size)
From = (char *)From + Res;
Size -= Res;
- d->seekpos += Res;
+ if (d != NULL)
+ d->seekpos += Res;
}
while (Res > 0 && Size > 0);
@@ -1330,11 +1425,11 @@ bool FileFd::Write(int Fd, const void *From, unsigned long long Size)
/* */
bool FileFd::Seek(unsigned long long To)
{
- if (d->pipe == true
+ if (d != NULL && (d->pipe == true
#ifdef HAVE_BZ2
- || d->bz2 != NULL
+ || d->bz2 != NULL
#endif
- )
+ ))
{
// Our poor man seeking in pipes is costly, so try to avoid it
unsigned long long seekpos = Tell();
@@ -1344,13 +1439,17 @@ bool FileFd::Seek(unsigned long long To)
return Skip(To - seekpos);
if ((d->openmode & ReadOnly) != ReadOnly)
+ {
+ Flags |= Fail;
return _error->Error("Reopen is only implemented for read-only files!");
+ }
#ifdef HAVE_BZ2
if (d->bz2 != NULL)
BZ2_bzclose(d->bz2);
#endif
- close(iFd);
- iFd = 0;
+ if (iFd != -1)
+ close(iFd);
+ iFd = -1;
if (TemporaryFileName.empty() == false)
iFd = open(TemporaryFileName.c_str(), O_RDONLY);
else if (FileName.empty() == false)
@@ -1360,12 +1459,18 @@ bool FileFd::Seek(unsigned long long To)
if (d->compressed_fd > 0)
if (lseek(d->compressed_fd, 0, SEEK_SET) != 0)
iFd = d->compressed_fd;
- if (iFd <= 0)
+ if (iFd < 0)
+ {
+ Flags |= Fail;
return _error->Error("Reopen is not implemented for pipes opened with FileFd::OpenDescriptor()!");
+ }
}
if (OpenInternDescriptor(d->openmode, d->compressor) == false)
+ {
+ Flags |= Fail;
return _error->Error("Seek on file %s because it couldn't be reopened", FileName.c_str());
+ }
if (To != 0)
return Skip(To);
@@ -1375,7 +1480,7 @@ bool FileFd::Seek(unsigned long long To)
}
int res;
#ifdef HAVE_ZLIB
- if (d->gz)
+ if (d != NULL && d->gz)
res = gzseek(d->gz,To,SEEK_SET);
else
#endif
@@ -1386,7 +1491,8 @@ bool FileFd::Seek(unsigned long long To)
return _error->Error("Unable to seek to %llu", To);
}
- d->seekpos = To;
+ if (d != NULL)
+ d->seekpos = To;
return true;
}
/*}}}*/
@@ -1395,11 +1501,11 @@ bool FileFd::Seek(unsigned long long To)
/* */
bool FileFd::Skip(unsigned long long Over)
{
- if (d->pipe == true
+ if (d != NULL && (d->pipe == true
#ifdef HAVE_BZ2
- || d->bz2 != NULL
+ || d->bz2 != NULL
#endif
- )
+ ))
{
d->seekpos += Over;
char buffer[1024];
@@ -1407,7 +1513,10 @@ bool FileFd::Skip(unsigned long long Over)
{
unsigned long long toread = std::min((unsigned long long) sizeof(buffer), Over);
if (Read(buffer, toread) == false)
+ {
+ Flags |= Fail;
return _error->Error("Unable to seek ahead %llu",Over);
+ }
Over -= toread;
}
return true;
@@ -1415,7 +1524,7 @@ bool FileFd::Skip(unsigned long long Over)
int res;
#ifdef HAVE_ZLIB
- if (d->gz != NULL)
+ if (d != NULL && d->gz != NULL)
res = gzseek(d->gz,Over,SEEK_CUR);
else
#endif
@@ -1425,7 +1534,8 @@ bool FileFd::Skip(unsigned long long Over)
Flags |= Fail;
return _error->Error("Unable to seek ahead %llu",Over);
}
- d->seekpos = res;
+ if (d != NULL)
+ d->seekpos = res;
return true;
}
@@ -1436,7 +1546,7 @@ bool FileFd::Skip(unsigned long long Over)
bool FileFd::Truncate(unsigned long long To)
{
#if defined HAVE_ZLIB || defined HAVE_BZ2
- if (d->gz != NULL || d->bz2 != NULL)
+ if (d != NULL && (d->gz != NULL || d->bz2 != NULL))
{
Flags |= Fail;
return _error->Error("Truncating compressed files is not implemented (%s)", FileName.c_str());
@@ -1460,23 +1570,27 @@ unsigned long long FileFd::Tell()
// seeking around, but not all users of FileFd use always Seek() and co
// so d->seekpos isn't always true and we can just use it as a hint if
// we have nothing else, but not always as an authority…
- if (d->pipe == true
+ if (d != NULL && (d->pipe == true
#ifdef HAVE_BZ2
- || d->bz2 != NULL
+ || d->bz2 != NULL
#endif
- )
+ ))
return d->seekpos;
off_t Res;
#ifdef HAVE_ZLIB
- if (d->gz != NULL)
+ if (d != NULL && d->gz != NULL)
Res = gztell(d->gz);
else
#endif
Res = lseek(iFd,0,SEEK_CUR);
if (Res == (off_t)-1)
+ {
+ Flags |= Fail;
_error->Errno("lseek","Failed to determine the current file position");
- d->seekpos = Res;
+ }
+ if (d != NULL)
+ d->seekpos = Res;
return Res;
}
/*}}}*/
@@ -1486,17 +1600,24 @@ unsigned long long FileFd::Tell()
unsigned long long FileFd::FileSize()
{
struct stat Buf;
- if (d->pipe == false && fstat(iFd,&Buf) != 0)
+ if ((d == NULL || d->pipe == false) && fstat(iFd,&Buf) != 0)
+ {
+ Flags |= Fail;
return _error->Errno("fstat","Unable to determine the file size");
+ }
// for compressor pipes st_size is undefined and at 'best' zero
- if (d->pipe == true || S_ISFIFO(Buf.st_mode))
+ if ((d != NULL && d->pipe == true) || S_ISFIFO(Buf.st_mode))
{
// we set it here, too, as we get the info here for free
// in theory the Open-methods should take care of it already
- d->pipe = true;
+ if (d != NULL)
+ d->pipe = true;
if (stat(FileName.c_str(), &Buf) != 0)
+ {
+ Flags |= Fail;
return _error->Errno("stat","Unable to determine the file size");
+ }
}
return Buf.st_size;
@@ -1511,11 +1632,11 @@ unsigned long long FileFd::Size()
// for compressor pipes st_size is undefined and at 'best' zero,
// so we 'read' the content and 'seek' back - see there
- if (d->pipe == true
+ if (d != NULL && (d->pipe == true
#ifdef HAVE_BZ2
- || (d->bz2 && size > 0)
+ || (d->bz2 && size > 0)
#endif
- )
+ ))
{
unsigned long long const oldSeek = Tell();
char ignore[1000];
@@ -1530,7 +1651,7 @@ unsigned long long FileFd::Size()
// only check gzsize if we are actually a gzip file, just checking for
// "gz" is not sufficient as uncompressed files could be opened with
// gzopen in "direct" mode as well
- else if (d->gz && !gzdirect(d->gz) && size > 0)
+ else if (d != NULL && d->gz && !gzdirect(d->gz) && size > 0)
{
off_t const oldPos = lseek(iFd,0,SEEK_CUR);
/* unfortunately zlib.h doesn't provide a gzsize(), so we have to do
@@ -1538,10 +1659,16 @@ unsigned long long FileFd::Size()
* bits of the file */
// FIXME: Size for gz-files is limited by 32bit… no largefile support
if (lseek(iFd, -4, SEEK_END) < 0)
- return _error->Errno("lseek","Unable to seek to end of gzipped file");
+ {
+ Flags |= Fail;
+ return _error->Errno("lseek","Unable to seek to end of gzipped file");
+ }
size = 0L;
if (read(iFd, &size, 4) != 4)
- return _error->Errno("read","Unable to read original size of gzipped file");
+ {
+ Flags |= Fail;
+ return _error->Errno("read","Unable to read original size of gzipped file");
+ }
#ifdef WORDS_BIGENDIAN
uint32_t tmp_size = size;
@@ -1551,7 +1678,10 @@ unsigned long long FileFd::Size()
#endif
if (lseek(iFd, oldPos, SEEK_SET) < 0)
- return _error->Errno("lseek","Unable to seek in gzipped file");
+ {
+ Flags |= Fail;
+ return _error->Errno("lseek","Unable to seek in gzipped file");
+ }
return size;
}
@@ -1566,20 +1696,23 @@ unsigned long long FileFd::Size()
time_t FileFd::ModificationTime()
{
struct stat Buf;
- if (d->pipe == false && fstat(iFd,&Buf) != 0)
+ if ((d == NULL || d->pipe == false) && fstat(iFd,&Buf) != 0)
{
+ Flags |= Fail;
_error->Errno("fstat","Unable to determine the modification time of file %s", FileName.c_str());
return 0;
}
// for compressor pipes st_size is undefined and at 'best' zero
- if (d->pipe == true || S_ISFIFO(Buf.st_mode))
+ if ((d != NULL && d->pipe == true) || S_ISFIFO(Buf.st_mode))
{
// we set it here, too, as we get the info here for free
// in theory the Open-methods should take care of it already
- d->pipe = true;
+ if (d != NULL)
+ d->pipe = true;
if (stat(FileName.c_str(), &Buf) != 0)
{
+ Flags |= Fail;
_error->Errno("fstat","Unable to determine the modification time of file %s", FileName.c_str());
return 0;
}
@@ -1599,24 +1732,18 @@ bool FileFd::Close()
bool Res = true;
if ((Flags & AutoClose) == AutoClose)
{
-#ifdef HAVE_ZLIB
- if (d != NULL && d->gz != NULL) {
- int const e = gzclose(d->gz);
- // gzdclose() on empty files always fails with "buffer error" here, ignore that
- if (e != 0 && e != Z_BUF_ERROR)
- Res &= _error->Errno("close",_("Problem closing the gzip file %s"), FileName.c_str());
- } else
-#endif
-#ifdef HAVE_BZ2
- if (d != NULL && d->bz2 != NULL)
- BZ2_bzclose(d->bz2);
- else
-#endif
- if (iFd > 0 && close(iFd) != 0)
- Res &= _error->Errno("close",_("Problem closing the file %s"), FileName.c_str());
+ if ((Flags & Compressed) != Compressed && iFd > 0 && close(iFd) != 0)
+ Res &= _error->Errno("close",_("Problem closing the file %s"), FileName.c_str());
+
+ if (d != NULL)
+ {
+ Res &= d->CloseDown(FileName);
+ delete d;
+ d = NULL;
+ }
}
- if ((Flags & Replace) == Replace && iFd >= 0) {
+ if ((Flags & Replace) == Replace) {
if (rename(TemporaryFileName.c_str(), FileName.c_str()) != 0)
Res &= _error->Errno("rename",_("Problem renaming the file %s to %s"), TemporaryFileName.c_str(), FileName.c_str());
@@ -1631,14 +1758,8 @@ bool FileFd::Close()
if (unlink(FileName.c_str()) != 0)
Res &= _error->WarningE("unlnk",_("Problem unlinking the file %s"), FileName.c_str());
- if (d != NULL)
- {
- if (d->compressor_pid > 0)
- ExecWait(d->compressor_pid, "FileFdCompressor", true);
- delete d;
- d = NULL;
- }
-
+ if (Res == false)
+ Flags |= Fail;
return Res;
}
/*}}}*/
@@ -1649,7 +1770,10 @@ bool FileFd::Sync()
{
#ifdef _POSIX_SYNCHRONIZED_IO
if (fsync(iFd) != 0)
+ {
+ Flags |= Fail;
return _error->Errno("sync",_("Problem syncing the file"));
+ }
#endif
return true;
}
diff --git a/apt-pkg/contrib/fileutl.h b/apt-pkg/contrib/fileutl.h
index 426664d3a..510b1c984 100644
--- a/apt-pkg/contrib/fileutl.h
+++ b/apt-pkg/contrib/fileutl.h
@@ -180,6 +180,9 @@ bool WaitFd(int Fd,bool write = false,unsigned long timeout = 0);
pid_t ExecFork();
bool ExecWait(pid_t Pid,const char *Name,bool Reap = false);
+// check if the given file starts with a PGP cleartext signature
+bool StartsWithGPGClearTextSignature(std::string const &FileName);
+
// File string manipulators
std::string flNotDir(std::string File);
std::string flNotFile(std::string File);
@@ -187,4 +190,6 @@ std::string flNoLink(std::string File);
std::string flExtension(std::string File);
std::string flCombine(std::string Dir,std::string File);
+
+
#endif
diff --git a/apt-pkg/contrib/mmap.cc b/apt-pkg/contrib/mmap.cc
index 160718ea5..593bb063b 100644
--- a/apt-pkg/contrib/mmap.cc
+++ b/apt-pkg/contrib/mmap.cc
@@ -83,24 +83,25 @@ bool MMap::Map(FileFd &Fd)
{
if ((Flags & ReadOnly) != ReadOnly)
return _error->Error("Compressed file %s can only be mapped readonly", Fd.Name().c_str());
- Base = new unsigned char[iSize];
+ Base = malloc(iSize);
+ SyncToFd = new FileFd();
if (Fd.Seek(0L) == false || Fd.Read(Base, iSize) == false)
return _error->Error("Compressed file %s can't be read into mmap", Fd.Name().c_str());
return true;
}
// Map it.
- Base = mmap(0,iSize,Prot,Map,Fd.Fd(),0);
+ Base = (Flags & Fallback) ? MAP_FAILED : mmap(0,iSize,Prot,Map,Fd.Fd(),0);
if (Base == (void *)-1)
{
- if (errno == ENODEV || errno == EINVAL)
+ if (errno == ENODEV || errno == EINVAL || (Flags & Fallback))
{
// The filesystem doesn't support this particular kind of mmap.
// So we allocate a buffer and read the whole file into it.
if ((Flags & ReadOnly) == ReadOnly)
{
// for readonly, we don't need sync, so make it simple
- Base = new unsigned char[iSize];
+ Base = malloc(iSize);
return Fd.Read(Base, iSize);
}
// FIXME: Writing to compressed fd's ?
@@ -108,7 +109,7 @@ bool MMap::Map(FileFd &Fd)
if (dupped_fd == -1)
return _error->Errno("mmap", _("Couldn't duplicate file descriptor %i"), Fd.Fd());
- Base = new unsigned char[iSize];
+ Base = calloc(iSize, 1);
SyncToFd = new FileFd (dupped_fd);
if (!SyncToFd->Seek(0L) || !SyncToFd->Read(Base, iSize))
return false;
@@ -134,7 +135,7 @@ bool MMap::Close(bool DoSync)
if (SyncToFd != NULL)
{
- delete[] (char *)Base;
+ free(Base);
delete SyncToFd;
SyncToFd = NULL;
}
@@ -282,8 +283,7 @@ DynamicMMap::DynamicMMap(unsigned long Flags,unsigned long const &WorkSpace,
}
#endif
// fallback to a static allocated space
- Base = new unsigned char[WorkSpace];
- memset(Base,0,WorkSpace);
+ Base = calloc(WorkSpace, 1);
iSize = 0;
}
/*}}}*/
@@ -299,7 +299,7 @@ DynamicMMap::~DynamicMMap()
#ifdef _POSIX_MAPPED_FILES
munmap(Base, WorkSpace);
#else
- delete [] (unsigned char *)Base;
+ free(Base);
#endif
return;
}
@@ -463,6 +463,9 @@ bool DynamicMMap::Grow() {
Base = realloc(Base, newSize);
if (Base == NULL)
return false;
+ else
+ /* Set new memory to 0 */
+ memset((char*)Base + WorkSpace, 0, newSize - WorkSpace);
}
Pools =(Pool*) Base + poolOffset;
diff --git a/apt-pkg/contrib/sha2_internal.cc b/apt-pkg/contrib/sha2_internal.cc
index 6d27e8f2b..83b5a98d3 100644
--- a/apt-pkg/contrib/sha2_internal.cc
+++ b/apt-pkg/contrib/sha2_internal.cc
@@ -552,7 +552,9 @@ void SHA256_Update(SHA256_CTX* context, const sha2_byte *data, size_t len) {
}
while (len >= SHA256_BLOCK_LENGTH) {
/* Process as many complete blocks as we can */
- SHA256_Transform(context, (sha2_word32*)data);
+ sha2_byte buffer[SHA256_BLOCK_LENGTH];
+ MEMCPY_BCOPY(buffer, data, SHA256_BLOCK_LENGTH);
+ SHA256_Transform(context, (sha2_word32*)buffer);
context->bitcount += SHA256_BLOCK_LENGTH << 3;
len -= SHA256_BLOCK_LENGTH;
data += SHA256_BLOCK_LENGTH;
@@ -879,7 +881,9 @@ void SHA512_Update(SHA512_CTX* context, const sha2_byte *data, size_t len) {
}
while (len >= SHA512_BLOCK_LENGTH) {
/* Process as many complete blocks as we can */
- SHA512_Transform(context, (sha2_word64*)data);
+ sha2_byte buffer[SHA512_BLOCK_LENGTH];
+ MEMCPY_BCOPY(buffer, data, SHA512_BLOCK_LENGTH);
+ SHA512_Transform(context, (sha2_word64*)buffer);
ADDINC128(context->bitcount, SHA512_BLOCK_LENGTH << 3);
len -= SHA512_BLOCK_LENGTH;
data += SHA512_BLOCK_LENGTH;
diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc
index 99efa8d98..df11a80ad 100644
--- a/apt-pkg/contrib/strutl.cc
+++ b/apt-pkg/contrib/strutl.cc
@@ -23,6 +23,7 @@
#include <ctype.h>
#include <string.h>
+#include <sstream>
#include <stdio.h>
#include <algorithm>
#include <unistd.h>
@@ -751,7 +752,8 @@ bool ReadMessages(int Fd, vector<string> &List)
// Look for the end of the message
for (char *I = Buffer; I + 1 < End; I++)
{
- if (I[0] != '\n' || I[1] != '\n')
+ if (I[1] != '\n' ||
+ (strncmp(I, "\n\n", 2) != 0 && strncmp(I, "\r\n\r\n", 4) != 0))
continue;
// Pull the message out
@@ -759,7 +761,7 @@ bool ReadMessages(int Fd, vector<string> &List)
PartialMessage += Message;
// Fix up the buffer
- for (; I < End && *I == '\n'; I++);
+ for (; I < End && (*I == '\r' || *I == '\n'); ++I);
End -= I-Buffer;
memmove(Buffer,I,End-Buffer);
I = Buffer;
@@ -1168,34 +1170,50 @@ unsigned long RegexChoice(RxChoiceList *Rxs,const char **ListBegin,
return Hits;
}
/*}}}*/
-// ioprintf - C format string outputter to C++ iostreams /*{{{*/
+// {str,io}printf - C format string outputter to C++ strings/iostreams /*{{{*/
// ---------------------------------------------------------------------
/* This is used to make the internationalization strings easier to translate
and to allow reordering of parameters */
-void ioprintf(ostream &out,const char *format,...)
+static bool iovprintf(ostream &out, const char *format,
+ va_list &args, ssize_t &size) {
+ char *S = (char*)malloc(size);
+ ssize_t const n = vsnprintf(S, size, format, args);
+ if (n > -1 && n < size) {
+ out << S;
+ free(S);
+ return true;
+ } else {
+ if (n > -1)
+ size = n + 1;
+ else
+ size *= 2;
+ }
+ free(S);
+ return false;
+}
+void ioprintf(ostream &out,const char *format,...)
{
va_list args;
- va_start(args,format);
-
- // sprintf the description
- char S[4096];
- vsnprintf(S,sizeof(S),format,args);
- out << S;
+ ssize_t size = 400;
+ while (true) {
+ va_start(args,format);
+ if (iovprintf(out, format, args, size) == true)
+ return;
+ va_end(args);
+ }
}
- /*}}}*/
-// strprintf - C format string outputter to C++ strings /*{{{*/
-// ---------------------------------------------------------------------
-/* This is used to make the internationalization strings easier to translate
- and to allow reordering of parameters */
-void strprintf(string &out,const char *format,...)
+void strprintf(string &out,const char *format,...)
{
va_list args;
- va_start(args,format);
-
- // sprintf the description
- char S[4096];
- vsnprintf(S,sizeof(S),format,args);
- out = string(S);
+ ssize_t size = 400;
+ std::ostringstream outstr;
+ while (true) {
+ va_start(args,format);
+ if (iovprintf(outstr, format, args, size) == true)
+ break;
+ va_end(args);
+ }
+ out = outstr.str();
}
/*}}}*/
// safe_snprintf - Safer snprintf /*{{{*/
diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc
index 5dc2a2ac2..de645bb6e 100644
--- a/apt-pkg/deb/debindexfile.cc
+++ b/apt-pkg/deb/debindexfile.cc
@@ -161,7 +161,7 @@ unsigned long debSourcesIndex::Size() const
/* we need to ignore errors here; if the lists are absent, just return 0 */
_error->PushToStack();
- FileFd f = FileFd (IndexFile("Sources"), FileFd::ReadOnly, FileFd::Extension);
+ FileFd f(IndexFile("Sources"), FileFd::ReadOnly, FileFd::Extension);
if (!f.Failed())
size = f.Size();
@@ -290,7 +290,7 @@ unsigned long debPackagesIndex::Size() const
/* we need to ignore errors here; if the lists are absent, just return 0 */
_error->PushToStack();
- FileFd f = FileFd (IndexFile("Packages"), FileFd::ReadOnly, FileFd::Extension);
+ FileFd f(IndexFile("Packages"), FileFd::ReadOnly, FileFd::Extension);
if (!f.Failed())
size = f.Size();
@@ -488,7 +488,7 @@ unsigned long debTranslationsIndex::Size() const
/* we need to ignore errors here; if the lists are absent, just return 0 */
_error->PushToStack();
- FileFd f = FileFd (IndexFile(Language), FileFd::ReadOnly, FileFd::Extension);
+ FileFd f(IndexFile(Language), FileFd::ReadOnly, FileFd::Extension);
if (!f.Failed())
size = f.Size();
@@ -602,7 +602,8 @@ bool debStatusIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const
pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
CFile->Size = Pkg.FileSize();
CFile->mtime = Pkg.ModificationTime();
- CFile->Archive = Gen.WriteUniqString("now");
+ map_ptrloc const storage = Gen.WriteUniqString("now");
+ CFile->Archive = storage;
if (Gen.MergeList(Parser) == false)
return _error->Error("Problem with MergeList %s",File.c_str());
diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc
index 84e6c38c5..e93e51af3 100644
--- a/apt-pkg/deb/deblistparser.cc
+++ b/apt-pkg/deb/deblistparser.cc
@@ -15,6 +15,7 @@
#include <apt-pkg/deblistparser.h>
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/cachefilter.h>
#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
@@ -22,7 +23,6 @@
#include <apt-pkg/md5.h>
#include <apt-pkg/macros.h>
-#include <fnmatch.h>
#include <ctype.h>
/*}}}*/
@@ -215,15 +215,22 @@ string debListParser::DescriptionLanguage()
*/
MD5SumValue debListParser::Description_md5()
{
- string value = Section.FindS("Description-md5");
-
- if (value.empty())
+ string const value = Section.FindS("Description-md5");
+ if (value.empty() == true)
{
MD5Summation md5;
md5.Add((Description() + "\n").c_str());
return md5.Result();
- } else
- return MD5SumValue(value);
+ }
+ else if (likely(value.size() == 32))
+ {
+ if (likely(value.find_first_not_of("0123456789abcdefABCDEF") == string::npos))
+ return MD5SumValue(value);
+ _error->Error("Malformed Description-md5 line; includes invalid character '%s'", value.c_str());
+ return MD5SumValue();
+ }
+ _error->Error("Malformed Description-md5 line; doesn't have the required length (32 != %d) '%s'", (int)value.size(), value.c_str());
+ return MD5SumValue();
}
/*}}}*/
// ListParser::UsePackage - Update a package structure /*{{{*/
@@ -236,21 +243,26 @@ bool debListParser::UsePackage(pkgCache::PkgIterator &Pkg,
if (Pkg->Section == 0)
Pkg->Section = UniqFindTagWrite("Section");
- // Packages which are not from the "native" arch doesn't get the essential flag
- // in the default "native" mode - it is also possible to mark "all" or "none".
- // The "installed" mode is handled by ParseStatus(), See #544481 and friends.
string const static myArch = _config->Find("APT::Architecture");
- string const static essential = _config->Find("pkgCacheGen::Essential", "native");
- if ((essential == "native" && Pkg->Arch != 0 && myArch == Pkg.Arch()) ||
- essential == "all")
+ // Possible values are: "all", "native", "installed" and "none"
+ // The "installed" mode is handled by ParseStatus(), See #544481 and friends.
+ string const static essential = _config->Find("pkgCacheGen::Essential", "all");
+ if (essential == "all" ||
+ (essential == "native" && Pkg->Arch != 0 && myArch == Pkg.Arch()))
if (Section.FindFlag("Essential",Pkg->Flags,pkgCache::Flag::Essential) == false)
return false;
if (Section.FindFlag("Important",Pkg->Flags,pkgCache::Flag::Important) == false)
return false;
if (strcmp(Pkg.Name(),"apt") == 0)
- Pkg->Flags |= pkgCache::Flag::Essential | pkgCache::Flag::Important;
-
+ {
+ if ((essential == "native" && Pkg->Arch != 0 && myArch == Pkg.Arch()) ||
+ essential == "all")
+ Pkg->Flags |= pkgCache::Flag::Essential | pkgCache::Flag::Important;
+ else
+ Pkg->Flags |= pkgCache::Flag::Important;
+ }
+
if (ParseStatus(Pkg,Ver) == false)
return false;
return true;
@@ -452,22 +464,6 @@ const char *debListParser::ConvertRelation(const char *I,unsigned int &Op)
}
return I;
}
-
-/*
- * CompleteArch:
- *
- * The complete architecture, consisting of <kernel>-<cpu>.
- */
-static string CompleteArch(std::string const &arch) {
- if (arch == "armel") return "linux-arm";
- if (arch == "armhf") return "linux-arm";
- if (arch == "lpia") return "linux-i386";
- if (arch == "powerpcspe") return "linux-powerpc";
- if (arch == "uclibc-linux-armel") return "linux-arm";
- if (arch == "uclinux-armel") return "uclinux-arm";
-
- return (arch.find("-") != string::npos) ? arch : "linux-" + arch;
-}
/*}}}*/
// ListParser::ParseDepends - Parse a dependency element /*{{{*/
// ---------------------------------------------------------------------
@@ -544,58 +540,59 @@ const char *debListParser::ParseDepends(const char *Start,const char *Stop,
if (ParseArchFlags == true)
{
- string completeArch = CompleteArch(arch);
+ APT::CacheFilter::PackageArchitectureMatchesSpecification matchesArch(arch, false);
// Parse an architecture
if (I != Stop && *I == '[')
{
+ ++I;
// malformed
- I++;
- if (I == Stop)
- return 0;
-
- const char *End = I;
- bool Found = false;
- bool NegArch = false;
- while (I != Stop)
+ if (unlikely(I == Stop))
+ return 0;
+
+ const char *End = I;
+ bool Found = false;
+ bool NegArch = false;
+ while (I != Stop)
{
- // look for whitespace or ending ']'
- while (End != Stop && !isspace(*End) && *End != ']')
- End++;
-
- if (End == Stop)
+ // look for whitespace or ending ']'
+ for (;End != Stop && !isspace(*End) && *End != ']'; ++End);
+
+ if (unlikely(End == Stop))
return 0;
if (*I == '!')
- {
+ {
NegArch = true;
- I++;
- }
+ ++I;
+ }
- if (stringcmp(arch,I,End) == 0) {
+ std::string arch(I, End);
+ if (arch.empty() == false && matchesArch(arch.c_str()) == true)
+ {
Found = true;
- } else {
- std::string wildcard = SubstVar(string(I, End), "any", "*");
- if (fnmatch(wildcard.c_str(), completeArch.c_str(), 0) == 0)
- Found = true;
+ if (I[-1] != '!')
+ NegArch = false;
+ // we found a match, so fast-forward to the end of the wildcards
+ for (; End != Stop && *End != ']'; ++End);
}
-
+
if (*End++ == ']') {
I = End;
break;
}
-
+
I = End;
for (;I != Stop && isspace(*I) != 0; I++);
- }
+ }
- if (NegArch)
+ if (NegArch == true)
Found = !Found;
-
- if (Found == false)
+
+ if (Found == false)
Package = ""; /* not for this arch */
}
-
+
// Skip whitespace
for (;I != Stop && isspace(*I) != 0; I++);
}
@@ -625,16 +622,18 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver,
if (Section.Find(Tag,Start,Stop) == false)
return true;
- string Package;
string const pkgArch = Ver.Arch();
- string Version;
- unsigned int Op;
while (1)
{
+ string Package;
+ string Version;
+ unsigned int Op;
+
Start = ParseDepends(Start,Stop,Package,Version,Op,false,!MultiArchEnabled);
if (Start == 0)
return _error->Error("Problem parsing dependency %s",Tag);
+ size_t const found = Package.rfind(':');
if (MultiArchEnabled == true &&
(Type == pkgCache::Dep::Conflicts ||
@@ -646,6 +645,18 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver,
if (NewDepends(Ver,Package,*a,Version,Op,Type) == false)
return false;
}
+ else if (MultiArchEnabled == true && found != string::npos &&
+ strcmp(Package.c_str() + found, ":any") != 0)
+ {
+ string Arch = Package.substr(found+1, string::npos);
+ Package = Package.substr(0, found);
+ // Such dependencies are not supposed to be accepted …
+ // … but this is probably the best thing to do.
+ if (Arch == "native")
+ Arch = _config->Find("APT::Architecture");
+ if (NewDepends(Ver,Package,Arch,Version,Op,Type) == false)
+ return false;
+ }
else if (NewDepends(Ver,Package,pkgArch,Version,Op,Type) == false)
return false;
if (Start == Stop)
@@ -771,7 +782,8 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,
{
// apt-secure does no longer download individual (per-section) Release
// file. to provide Component pinning we use the section name now
- FileI->Component = WriteUniqString(component);
+ map_ptrloc const storage = WriteUniqString(component);
+ FileI->Component = storage;
// FIXME: Code depends on the fact that Release files aren't compressed
FILE* release = fdopen(dup(File.Fd()), "r");
@@ -858,13 +870,14 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,
break;
*s = '\0';
}
+ map_ptrloc const storage = WriteUniqString(data);
switch (writeTo) {
- case Suite: FileI->Archive = WriteUniqString(data); break;
- case Component: FileI->Component = WriteUniqString(data); break;
- case Version: FileI->Version = WriteUniqString(data); break;
- case Origin: FileI->Origin = WriteUniqString(data); break;
- case Codename: FileI->Codename = WriteUniqString(data); break;
- case Label: FileI->Label = WriteUniqString(data); break;
+ case Suite: FileI->Archive = storage; break;
+ case Component: FileI->Component = storage; break;
+ case Version: FileI->Version = storage; break;
+ case Origin: FileI->Origin = storage; break;
+ case Codename: FileI->Codename = storage; break;
+ case Label: FileI->Label = storage; break;
case None: break;
}
}
diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc
index 496daf1df..296426c80 100644
--- a/apt-pkg/deb/dpkgpm.cc
+++ b/apt-pkg/deb/dpkgpm.cc
@@ -738,6 +738,7 @@ bool pkgDPkgPM::OpenLog()
d->history_out = fopen(history_name.c_str(),"a");
if (d->history_out == NULL)
return _error->WarningE("OpenLog", _("Could not open file '%s'"), history_name.c_str());
+ SetCloseExec(fileno(d->history_out), true);
chmod(history_name.c_str(), 0644);
fprintf(d->history_out, "\nStart-Date: %s\n", timestr);
string remove, purge, install, reinstall, upgrade, downgrade;
@@ -1596,7 +1597,10 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg)
const char *ops_str[] = {"Install", "Configure","Remove","Purge"};
fprintf(report, "AptOrdering:\n");
for (vector<Item>::iterator I = List.begin(); I != List.end(); ++I)
- fprintf(report, " %s: %s\n", (*I).Pkg.Name(), ops_str[(*I).Op]);
+ if ((*I).Pkg != NULL)
+ fprintf(report, " %s: %s\n", (*I).Pkg.Name(), ops_str[(*I).Op]);
+ else
+ fprintf(report, " %s: %s\n", "NULL", ops_str[(*I).Op]);
// attach dmesg log (to learn about segfaults)
if (FileExists("/bin/dmesg"))
diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc
index 1eea55560..2656e9b42 100644
--- a/apt-pkg/depcache.cc
+++ b/apt-pkg/depcache.cc
@@ -1152,9 +1152,8 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
}
/* This bit is for processing the possibilty of an install/upgrade
- fixing the problem */
- if (Start->Type != Dep::DpkgBreaks &&
- (DepState[Start->ID] & DepCVer) == DepCVer)
+ fixing the problem for "positive" dependencies */
+ if (Start.IsNegative() == false && (DepState[Start->ID] & DepCVer) == DepCVer)
{
APT::VersionList verlist;
pkgCache::VerIterator Cand = PkgState[Start.TargetPkg()->ID].CandidateVerIter(*this);
@@ -1165,7 +1164,7 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
pkgCache::VerIterator V = Prv.OwnerVer();
pkgCache::VerIterator Cand = PkgState[Prv.OwnerPkg()->ID].CandidateVerIter(*this);
if (Cand.end() == true || V != Cand ||
- VS().CheckDep(Cand.VerStr(), Start->CompareOp, Start.TargetVer()) == false)
+ VS().CheckDep(Prv.ProvideVersion(), Start->CompareOp, Start.TargetVer()) == false)
continue;
verlist.insert(Cand);
}
@@ -1198,13 +1197,13 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
}
continue;
}
-
- /* For conflicts we just de-install the package and mark as auto,
- Conflicts may not have or groups. For dpkg's Breaks we try to
- upgrade the package. */
- if (Start.IsNegative() == true)
+ /* Negative dependencies have no or-group
+ If the dependency isn't versioned, we try if an upgrade might solve the problem.
+ Otherwise we remove the offender if needed */
+ else if (Start.IsNegative() == true && Start->Type != pkgCache::Dep::Obsoletes)
{
SPtrArray<Version *> List = Start.AllTargets();
+ pkgCache::PkgIterator TrgPkg = Start.TargetPkg();
for (Version **I = List; *I != 0; I++)
{
VerIterator Ver(*this,*I);
@@ -1215,15 +1214,17 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
if (PkgState[Pkg->ID].InstallVer == 0)
continue;
- if (PkgState[Pkg->ID].CandidateVer != *I &&
- Start->Type == Dep::DpkgBreaks &&
+ if ((Start->Version != 0 || TrgPkg != Pkg) &&
+ PkgState[Pkg->ID].CandidateVer != PkgState[Pkg->ID].InstallVer &&
+ PkgState[Pkg->ID].CandidateVer != *I &&
MarkInstall(Pkg,true,Depth + 1, false, ForceImportantDeps) == true)
continue;
- else if (MarkDelete(Pkg,false,Depth + 1, false) == false)
+ else if ((Start->Type == pkgCache::Dep::Conflicts || Start->Type == pkgCache::Dep::DpkgBreaks) &&
+ MarkDelete(Pkg,false,Depth + 1, false) == false)
break;
}
continue;
- }
+ }
}
return Dep.end() == true;
diff --git a/apt-pkg/edsp.cc b/apt-pkg/edsp.cc
index 791aac72f..adb8788b3 100644
--- a/apt-pkg/edsp.cc
+++ b/apt-pkg/edsp.cc
@@ -118,8 +118,7 @@ void EDSP::WriteScenarioDependency(pkgDepCache &Cache, FILE* output, pkgCache::P
bool orGroup = false;
for (pkgCache::DepIterator Dep = Ver.DependsList(); Dep.end() == false; ++Dep)
{
- // Ignore implicit dependencies for multiarch here
- if (strcmp(Pkg.Arch(), Dep.TargetPkg().Arch()) != 0)
+ if (Dep.IsMultiArchImplicit() == true)
continue;
if (orGroup == false)
dependencies[Dep->Type].append(", ");
@@ -140,8 +139,7 @@ void EDSP::WriteScenarioDependency(pkgDepCache &Cache, FILE* output, pkgCache::P
string provides;
for (pkgCache::PrvIterator Prv = Ver.ProvidesList(); Prv.end() == false; ++Prv)
{
- // Ignore implicit provides for multiarch here
- if (strcmp(Pkg.Arch(), Prv.ParentPkg().Arch()) != 0 || strcmp(Pkg.Name(),Prv.Name()) == 0)
+ if (Prv.IsMultiArchImplicit() == true)
continue;
provides.append(", ").append(Prv.Name());
}
@@ -159,8 +157,7 @@ void EDSP::WriteScenarioLimitedDependency(pkgDepCache &Cache, FILE* output,
bool orGroup = false;
for (pkgCache::DepIterator Dep = Ver.DependsList(); Dep.end() == false; ++Dep)
{
- // Ignore implicit dependencies for multiarch here
- if (strcmp(Pkg.Arch(), Dep.TargetPkg().Arch()) != 0)
+ if (Dep.IsMultiArchImplicit() == true)
continue;
if (orGroup == false)
{
@@ -193,8 +190,7 @@ void EDSP::WriteScenarioLimitedDependency(pkgDepCache &Cache, FILE* output,
string provides;
for (pkgCache::PrvIterator Prv = Ver.ProvidesList(); Prv.end() == false; ++Prv)
{
- // Ignore implicit provides for multiarch here
- if (strcmp(Pkg.Arch(), Prv.ParentPkg().Arch()) != 0 || strcmp(Pkg.Name(),Prv.Name()) == 0)
+ if (Prv.IsMultiArchImplicit() == true)
continue;
if (pkgset.find(Prv.ParentPkg()) == pkgset.end())
continue;
diff --git a/apt-pkg/edsp/edspindexfile.cc b/apt-pkg/edsp/edspindexfile.cc
index 482581979..98ce4497a 100644
--- a/apt-pkg/edsp/edspindexfile.cc
+++ b/apt-pkg/edsp/edspindexfile.cc
@@ -51,7 +51,8 @@ bool edspIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const
pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
CFile->Size = Pkg.FileSize();
CFile->mtime = Pkg.ModificationTime();
- CFile->Archive = Gen.WriteUniqString("edsp::scenario");
+ map_ptrloc const storage = Gen.WriteUniqString("edsp::scenario");
+ CFile->Archive = storage;
if (Gen.MergeList(Parser) == false)
return _error->Error("Problem with MergeList %s",File.c_str());
diff --git a/apt-pkg/edsp/edspsystem.cc b/apt-pkg/edsp/edspsystem.cc
index 6b9207451..aae969d9d 100644
--- a/apt-pkg/edsp/edspsystem.cc
+++ b/apt-pkg/edsp/edspsystem.cc
@@ -91,7 +91,7 @@ signed edspSystem::Score(Configuration const &Cnf)
{
if (Cnf.Find("edsp::scenario", "") == "stdin")
return 1000;
- if (FileExists(Cnf.FindFile("edsp::scenario","")) == true)
+ if (RealFileExists(Cnf.FindFile("edsp::scenario","")) == true)
return 1000;
return -1000;
}
diff --git a/apt-pkg/indexcopy.cc b/apt-pkg/indexcopy.cc
index e29e2819c..ddf1909b7 100644
--- a/apt-pkg/indexcopy.cc
+++ b/apt-pkg/indexcopy.cc
@@ -648,16 +648,12 @@ bool SigVerify::RunGPGV(std::string const &File, std::string const &FileGPG,
{
if (File == FileGPG)
{
- #define SIGMSG "-----BEGIN PGP SIGNED MESSAGE-----\n"
- char buffer[sizeof(SIGMSG)];
FILE* gpg = fopen(File.c_str(), "r");
if (gpg == NULL)
return _error->Errno("RunGPGV", _("Could not open file %s"), File.c_str());
- char const * const test = fgets(buffer, sizeof(buffer), gpg);
fclose(gpg);
- if (test == NULL || strcmp(buffer, SIGMSG) != 0)
+ if (!StartsWithGPGClearTextSignature(File))
return _error->Error(_("File %s doesn't start with a clearsigned message"), File.c_str());
- #undef SIGMSG
}
diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc
index c62c4d187..46fc499c6 100644
--- a/apt-pkg/packagemanager.cc
+++ b/apt-pkg/packagemanager.cc
@@ -25,9 +25,10 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/sptr.h>
-#include <apti18n.h>
#include <iostream>
#include <fcntl.h>
+
+#include <apti18n.h>
/*}}}*/
using namespace std;
@@ -602,7 +603,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
or by the ConfigureAll call at the end of the for loop in OrderInstall. */
bool Changed = false;
const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 500);
- unsigned int i;
+ unsigned int i = 0;
do
{
Changed = false;
@@ -621,7 +622,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
// Look for easy targets: packages that are already okay
for (DepIterator Cur = Start; Bad == true; ++Cur)
{
- SPtrArray<Version *> VList = Start.AllTargets();
+ SPtrArray<Version *> VList = Cur.AllTargets();
for (Version **I = VList; *I != 0; ++I)
{
VerIterator Ver(Cache,*I);
@@ -644,7 +645,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
// Look for something that could be configured.
for (DepIterator Cur = Start; Bad == true; ++Cur)
{
- SPtrArray<Version *> VList = Start.AllTargets();
+ SPtrArray<Version *> VList = Cur.AllTargets();
for (Version **I = VList; *I != 0; ++I)
{
VerIterator Ver(Cache,*I);
@@ -784,7 +785,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
VerIterator V(Cache,*I);
PkgIterator P = V.ParentPkg();
// we are checking for installation as an easy 'protection' against or-groups and (unchosen) providers
- if (P->CurrentVer == 0 || P != Pkg || (P.CurrentVer() != V && Cache[P].InstallVer != V))
+ if (P != Pkg || (P.CurrentVer() != V && Cache[P].InstallVer != V))
continue;
circle = true;
break;
diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc
index 997c70768..9acb7da72 100644
--- a/apt-pkg/pkgcache.cc
+++ b/apt-pkg/pkgcache.cc
@@ -708,6 +708,21 @@ bool pkgCache::DepIterator::IsIgnorable(PrvIterator const &Prv) const
return false;
}
/*}}}*/
+// DepIterator::IsMultiArchImplicit - added by the cache generation /*{{{*/
+// ---------------------------------------------------------------------
+/* MultiArch can be translated to SingleArch for an resolver and we did so,
+ by adding dependencies to help the resolver understand the problem, but
+ sometimes it is needed to identify these to ignore them… */
+bool pkgCache::DepIterator::IsMultiArchImplicit() const
+{
+ if (ParentPkg()->Arch != TargetPkg()->Arch &&
+ (S->Type == pkgCache::Dep::Replaces ||
+ S->Type == pkgCache::Dep::DpkgBreaks ||
+ S->Type == pkgCache::Dep::Conflicts))
+ return true;
+ return false;
+}
+ /*}}}*/
// ostream operator to handle string representation of a dependecy /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -946,3 +961,17 @@ pkgCache::DescIterator pkgCache::VerIterator::TranslatedDescription() const
};
/*}}}*/
+// PrvIterator::IsMultiArchImplicit - added by the cache generation /*{{{*/
+// ---------------------------------------------------------------------
+/* MultiArch can be translated to SingleArch for an resolver and we did so,
+ by adding provides to help the resolver understand the problem, but
+ sometimes it is needed to identify these to ignore them… */
+bool pkgCache::PrvIterator::IsMultiArchImplicit() const
+{
+ pkgCache::PkgIterator const Owner = OwnerPkg();
+ pkgCache::PkgIterator const Parent = ParentPkg();
+ if (strcmp(Owner.Arch(), Parent.Arch()) != 0 || Owner->Name == Parent->Name)
+ return true;
+ return false;
+}
+ /*}}}*/
diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc
index ec072fddd..f70cbd02a 100644
--- a/apt-pkg/pkgcachegen.cc
+++ b/apt-pkg/pkgcachegen.cc
@@ -38,7 +38,7 @@
typedef std::vector<pkgIndexFile *>::iterator FileIterator;
template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
-bool IsDuplicateDescription(pkgCache::DescIterator Desc,
+static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
MD5SumValue const &CurMd5, std::string const &CurLang);
using std::string;
@@ -286,7 +286,7 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator
pkgCache::DescIterator Desc = Ver.DescriptionList();
// a version can only have one md5 describing it
- if (MD5SumValue(Desc.md5()) != CurMd5)
+ if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
continue;
// don't add a new description if we have one for the given
@@ -304,6 +304,9 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator
void const * const oldMap = Map.Data();
map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
+ if (unlikely(descindex == 0 && _error->PendingError()))
+ return _error->Error(_("Error occurred while processing %s (%s%d)"),
+ Pkg.Name(), "NewDescription", 1);
if (oldMap != Map.Data())
LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
*LastDesc = descindex;
@@ -456,6 +459,9 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
oldMap = Map.Data();
map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
+ if (unlikely(descindex == 0 && _error->PendingError()))
+ return _error->Error(_("Error occurred while processing %s (%s%d)"),
+ Pkg.Name(), "NewDescription", 2);
if (oldMap != Map.Data())
LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
*LastDesc = descindex;
@@ -1310,10 +1316,11 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress
}
_error->RevertToStack();
}
- else if (Debug == true)
+ else
{
_error->MergeWithStack();
- std::clog << "Open filebased MMap" << std::endl;
+ if (Debug == true)
+ std::clog << "Open filebased MMap" << std::endl;
}
}
if (Writeable == false || CacheFile.empty() == true)
@@ -1449,11 +1456,11 @@ bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **O
}
/*}}}*/
// IsDuplicateDescription /*{{{*/
-bool IsDuplicateDescription(pkgCache::DescIterator Desc,
+static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
MD5SumValue const &CurMd5, std::string const &CurLang)
{
// Descriptions in the same link-list have all the same md5
- if (MD5SumValue(Desc.md5()) != CurMd5)
+ if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
return false;
for (; Desc.end() == false; ++Desc)
if (Desc.LanguageCode() == CurLang)
diff --git a/apt-pkg/srcrecords.h b/apt-pkg/srcrecords.h
index 06f0dce6c..ed69d0d72 100644
--- a/apt-pkg/srcrecords.h
+++ b/apt-pkg/srcrecords.h
@@ -71,6 +71,7 @@ class pkgSrcRecords
virtual std::string Section() const = 0;
virtual const char **Binaries() = 0; // Ownership does not transfer
+ //FIXME: Add a parameter to specify which architecture to use for [wildcard] matching
virtual bool BuildDepends(std::vector<BuildDepRec> &BuildDeps, bool const &ArchOnly, bool const &StripMultiArch = true) = 0;
static const char *BuildDepType(unsigned char const &Type);