summaryrefslogtreecommitdiff
path: root/apt-pkg
diff options
context:
space:
mode:
Diffstat (limited to 'apt-pkg')
-rw-r--r--apt-pkg/acquire-method.cc41
-rw-r--r--apt-pkg/acquire-method.h3
-rw-r--r--apt-pkg/acquire-worker.cc62
-rw-r--r--apt-pkg/acquire.cc43
-rw-r--r--apt-pkg/algorithms.cc111
-rw-r--r--apt-pkg/algorithms.h5
-rw-r--r--apt-pkg/aptconfiguration.cc111
-rw-r--r--apt-pkg/aptconfiguration.h8
-rw-r--r--apt-pkg/cachefilter.cc49
-rw-r--r--apt-pkg/cachefilter.h30
-rw-r--r--apt-pkg/cacheiterators.h4
-rw-r--r--apt-pkg/cacheset.cc63
-rw-r--r--apt-pkg/cacheset.h5
-rw-r--r--apt-pkg/cdrom.cc61
-rw-r--r--apt-pkg/cdrom.h1
-rw-r--r--apt-pkg/clean.cc16
-rw-r--r--apt-pkg/contrib/cmndline.cc8
-rw-r--r--apt-pkg/contrib/configuration.cc149
-rw-r--r--apt-pkg/contrib/configuration.h2
-rw-r--r--apt-pkg/contrib/fileutl.cc374
-rw-r--r--apt-pkg/contrib/fileutl.h1
-rw-r--r--apt-pkg/contrib/mmap.cc52
-rw-r--r--apt-pkg/contrib/netrc.cc66
-rw-r--r--apt-pkg/contrib/netrc.h8
-rw-r--r--apt-pkg/contrib/sha2_internal.cc8
-rw-r--r--apt-pkg/contrib/strutl.cc57
-rw-r--r--apt-pkg/deb/debindexfile.cc9
-rw-r--r--apt-pkg/deb/deblistparser.cc176
-rw-r--r--apt-pkg/deb/dpkgpm.cc30
-rw-r--r--apt-pkg/depcache.cc56
-rw-r--r--apt-pkg/edsp.cc18
-rw-r--r--apt-pkg/edsp/edspindexfile.cc3
-rw-r--r--apt-pkg/edsp/edspsystem.cc2
-rw-r--r--apt-pkg/indexcopy.cc33
-rw-r--r--apt-pkg/init.cc2
-rw-r--r--apt-pkg/makefile8
-rw-r--r--apt-pkg/packagemanager.cc30
-rw-r--r--apt-pkg/pkgcache.cc60
-rw-r--r--apt-pkg/pkgcachegen.cc89
-rw-r--r--apt-pkg/policy.cc49
-rw-r--r--apt-pkg/srcrecords.h1
41 files changed, 1342 insertions, 562 deletions
diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc
index 2041fd9e9..5bc1c159a 100644
--- a/apt-pkg/acquire-method.cc
+++ b/apt-pkg/acquire-method.cc
@@ -95,12 +95,7 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
{
std::cout << "400 URI Failure\nURI: " << Queue->Uri << "\n"
<< "Message: " << Err << " " << IP << "\n";
- // Dequeue
- FetchItem *Tmp = Queue;
- Queue = Queue->Next;
- delete Tmp;
- if (Tmp == QueueBack)
- QueueBack = Queue;
+ Dequeue();
}
else
std::cout << "400 URI Failure\nURI: <UNKNOWN>\nMessage: " << Err << "\n";
@@ -211,13 +206,7 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
}
std::cout << "\n" << std::flush;
-
- // Dequeue
- FetchItem *Tmp = Queue;
- Queue = Queue->Next;
- delete Tmp;
- if (Tmp == QueueBack)
- QueueBack = Queue;
+ Dequeue();
}
/*}}}*/
// AcqMethod::MediaFail - Syncronous request for new media /*{{{*/
@@ -423,26 +412,14 @@ void pkgAcqMethod::Status(const char *Format,...)
/*}}}*/
// AcqMethod::Redirect - Send a redirect message /*{{{*/
// ---------------------------------------------------------------------
-/* This method sends the redirect message and also manipulates the queue
- to keep the pipeline synchronized. */
+/* This method sends the redirect message and dequeues the item as
+ * the worker will enqueue again later on to the right queue */
void pkgAcqMethod::Redirect(const string &NewURI)
{
std::cout << "103 Redirect\nURI: " << Queue->Uri << "\n"
<< "New-URI: " << NewURI << "\n"
<< "\n" << std::flush;
-
- // Change the URI for the request.
- Queue->Uri = NewURI;
-
- /* To keep the pipeline synchronized, move the current request to
- the end of the queue, past the end of the current pipeline. */
- FetchItem *I;
- for (I = Queue; I->Next != 0; I = I->Next) ;
- I->Next = Queue;
- Queue = Queue->Next;
- I->Next->Next = 0;
- if (QueueBack == 0)
- QueueBack = I->Next;
+ Dequeue();
}
/*}}}*/
// AcqMethod::FetchResult::FetchResult - Constructor /*{{{*/
@@ -465,3 +442,11 @@ void pkgAcqMethod::FetchResult::TakeHashes(Hashes &Hash)
SHA512Sum = Hash.SHA512.Result();
}
/*}}}*/
+void pkgAcqMethod::Dequeue() { /*{{{*/
+ FetchItem const * const Tmp = Queue;
+ Queue = Queue->Next;
+ if (Tmp == QueueBack)
+ QueueBack = Queue;
+ delete Tmp;
+}
+ /*}}}*/
diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h
index 2dd9ad685..00f99e0a0 100644
--- a/apt-pkg/acquire-method.h
+++ b/apt-pkg/acquire-method.h
@@ -104,6 +104,9 @@ class pkgAcqMethod
pkgAcqMethod(const char *Ver,unsigned long Flags = 0);
virtual ~pkgAcqMethod() {};
+
+ private:
+ void Dequeue();
};
/** @} */
diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc
index 3bb977e14..9d90b08bc 100644
--- a/apt-pkg/acquire-worker.cc
+++ b/apt-pkg/acquire-worker.cc
@@ -244,6 +244,21 @@ bool pkgAcquire::Worker::RunMessages()
string NewURI = LookupTag(Message,"New-URI",URI.c_str());
Itm->URI = NewURI;
+
+ ItemDone();
+
+ pkgAcquire::Item *Owner = Itm->Owner;
+ pkgAcquire::ItemDesc Desc = *Itm;
+
+ // Change the status so that it can be dequeued
+ Owner->Status = pkgAcquire::Item::StatIdle;
+ // Mark the item as done (taking care of all queues)
+ // and then put it in the main queue again
+ OwnerQ->ItemDone(Itm);
+ OwnerQ->Owner->Enqueue(Desc);
+
+ if (Log != 0)
+ Log->Done(Desc);
break;
}
@@ -431,7 +446,9 @@ bool pkgAcquire::Worker::MediaChange(string Message)
<< Drive << ":" // drive
<< msg.str() // l10n message
<< endl;
- write(status_fd, status.str().c_str(), status.str().size());
+
+ std::string const dlstatus = status.str();
+ FileFd::Write(status_fd, dlstatus.c_str(), dlstatus.size());
}
if (Log == 0 || Log->MediaChange(LookupTag(Message,"Media"),
@@ -465,40 +482,19 @@ bool pkgAcquire::Worker::SendConfiguration()
if (OutFd == -1)
return false;
-
- string Message = "601 Configuration\n";
- Message.reserve(2000);
- /* Write out all of the configuration directives by walking the
+ /* Write out all of the configuration directives by walking the
configuration tree */
- const Configuration::Item *Top = _config->Tree(0);
- for (; Top != 0;)
- {
- if (Top->Value.empty() == false)
- {
- string Line = "Config-Item: " + QuoteString(Top->FullTag(),"=\"\n") + "=";
- Line += QuoteString(Top->Value,"\n") + '\n';
- Message += Line;
- }
-
- if (Top->Child != 0)
- {
- Top = Top->Child;
- continue;
- }
-
- while (Top != 0 && Top->Next == 0)
- Top = Top->Parent;
- if (Top != 0)
- Top = Top->Next;
- }
- Message += '\n';
+ std::ostringstream Message;
+ Message << "601 Configuration\n";
+ _config->Dump(Message, NULL, "Config-Item: %F=%V\n", false);
+ Message << '\n';
if (Debug == true)
- clog << " -> " << Access << ':' << QuoteString(Message,"\n") << endl;
- OutQueue += Message;
- OutReady = true;
-
+ clog << " -> " << Access << ':' << QuoteString(Message.str(),"\n") << endl;
+ OutQueue += Message.str();
+ OutReady = true;
+
return true;
}
/*}}}*/
@@ -536,10 +532,10 @@ bool pkgAcquire::Worker::OutFdReady()
Res = write(OutFd,OutQueue.c_str(),OutQueue.length());
}
while (Res < 0 && errno == EINTR);
-
+
if (Res <= 0)
return MethodFailure();
-
+
OutQueue.erase(0,Res);
if (OutQueue.empty() == true)
OutReady = false;
diff --git a/apt-pkg/acquire.cc b/apt-pkg/acquire.cc
index 573a85c2f..a8a5abd34 100644
--- a/apt-pkg/acquire.cc
+++ b/apt-pkg/acquire.cc
@@ -244,11 +244,19 @@ void pkgAcquire::Dequeue(Item *Itm)
{
Queue *I = Queues;
bool Res = false;
- for (; I != 0; I = I->Next)
- Res |= I->Dequeue(Itm);
-
if (Debug == true)
clog << "Dequeuing " << Itm->DestFile << endl;
+
+ for (; I != 0; I = I->Next)
+ {
+ if (I->Dequeue(Itm))
+ {
+ Res = true;
+ if (Debug == true)
+ clog << "Dequeued from " << I->Name << endl;
+ }
+ }
+
if (Res == true)
ToFetch--;
}
@@ -269,9 +277,30 @@ string pkgAcquire::QueueName(string Uri,MethodConfig const *&Config)
/* Single-Instance methods get exactly one queue per URI. This is
also used for the Access queue method */
if (Config->SingleInstance == true || QueueMode == QueueAccess)
- return U.Access;
+ return U.Access;
- return U.Access + ':' + U.Host;
+ string AccessSchema = U.Access + ':',
+ FullQueueName = AccessSchema + U.Host;
+ unsigned int Instances = 0, SchemaLength = AccessSchema.length();
+
+ Queue *I = Queues;
+ for (; I != 0; I = I->Next) {
+ // if the queue already exists, re-use it
+ if (I->Name == FullQueueName)
+ return FullQueueName;
+
+ if (I->Name.compare(0, SchemaLength, AccessSchema) == 0)
+ Instances++;
+ }
+
+ if (Debug) {
+ clog << "Found " << Instances << " instances of " << U.Access << endl;
+ }
+
+ if (Instances >= (unsigned int)_config->FindI("Acquire::QueueHost::Limit",10))
+ return U.Access;
+
+ return FullQueueName;
}
/*}}}*/
// Acquire::GetConfig - Fetch the configuration information /*{{{*/
@@ -872,7 +901,9 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
<< ":" << (CurrentBytes/float(TotalBytes)*100.0)
<< ":" << msg
<< endl;
- write(fd, status.str().c_str(), status.str().size());
+
+ std::string const dlstatus = status.str();
+ FileFd::Write(fd, dlstatus.c_str(), dlstatus.size());
}
return true;
diff --git a/apt-pkg/algorithms.cc b/apt-pkg/algorithms.cc
index 8beb2d51c..8cd9d4c6e 100644
--- a/apt-pkg/algorithms.cc
+++ b/apt-pkg/algorithms.cc
@@ -58,6 +58,12 @@ pkgSimulate::pkgSimulate(pkgDepCache *Cache) : pkgPackageManager(Cache),
FileNames[I] = Jnk;
}
/*}}}*/
+// Simulate::~Simulate - Destructor /*{{{*/
+pkgSimulate::~pkgSimulate()
+{
+ delete[] Flags;
+}
+ /*}}}*/
// Simulate::Describe - Describe a package /*{{{*/
// ---------------------------------------------------------------------
/* Parameter Current == true displays the current package version,
@@ -188,6 +194,11 @@ bool pkgSimulate::Remove(PkgIterator iPkg,bool Purge)
{
// Adapt the iterator
PkgIterator Pkg = Sim.FindPkg(iPkg.Name(), iPkg.Arch());
+ if (Pkg.end() == true)
+ {
+ std::cerr << (Purge ? "Purg" : "Remv") << " invalid package " << iPkg.FullName() << std::endl;
+ return false;
+ }
Flags[Pkg->ID] = 3;
Sim.MarkDelete(Pkg);
@@ -276,13 +287,13 @@ bool pkgApplyStatus(pkgDepCache &Cache)
Cache[I].CandidateVerIter(Cache).Downloadable() == true)
Cache.MarkInstall(I, true, 0, false);
else
- Cache.MarkDelete(I);
+ Cache.MarkDelete(I, false, 0, false);
}
break;
// This means removal failed
case pkgCache::State::HalfInstalled:
- Cache.MarkDelete(I);
+ Cache.MarkDelete(I, false, 0, false);
break;
default:
@@ -356,11 +367,36 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
if (I->CurrentVer != 0)
Cache.MarkInstall(I, true, 0, false);
- /* Now, auto upgrade all essential packages - this ensures that
- the essential packages are present and working */
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
- if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
- Cache.MarkInstall(I, true, 0, false);
+ /* Now, install each essential package which is not installed
+ (and not provided by another package in the same name group) */
+ std::string essential = _config->Find("pkgCacheGen::Essential", "all");
+ if (essential == "all")
+ {
+ for (pkgCache::GrpIterator G = Cache.GrpBegin(); G.end() == false; ++G)
+ {
+ bool isEssential = false;
+ bool instEssential = false;
+ for (pkgCache::PkgIterator P = G.PackageList(); P.end() == false; P = G.NextPkg(P))
+ {
+ if ((P->Flags & pkgCache::Flag::Essential) != pkgCache::Flag::Essential)
+ continue;
+ isEssential = true;
+ if (Cache[P].Install() == true)
+ {
+ instEssential = true;
+ break;
+ }
+ }
+ if (isEssential == false || instEssential == true)
+ continue;
+ pkgCache::PkgIterator P = G.FindPreferredPkg();
+ Cache.MarkInstall(P, true, 0, false);
+ }
+ }
+ else if (essential != "none")
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
+ if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
+ Cache.MarkInstall(I, true, 0, false);
/* We do it again over all previously installed packages to force
conflict resolution on them all. */
@@ -738,7 +774,7 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
if (WasKept == true)
Cache.MarkKeep(Pkg, false, false);
else
- Cache.MarkDelete(Pkg);
+ Cache.MarkDelete(Pkg, false, 0, false);
return false;
}
@@ -867,7 +903,7 @@ bool pkgProblemResolver::ResolveInternal(bool const BrokenFix)
OldBreaks < Cache.BrokenCount())
{
if (OldVer == 0)
- Cache.MarkDelete(I);
+ Cache.MarkDelete(I, false, 0, false);
else
Cache.MarkKeep(I, false, false);
}
@@ -906,7 +942,7 @@ bool pkgProblemResolver::ResolveInternal(bool const BrokenFix)
{
if (Debug == true)
clog << " Or group remove for " << I.FullName(false) << endl;
- Cache.MarkDelete(I);
+ Cache.MarkDelete(I, false, 0, false);
Change = true;
}
}
@@ -1041,7 +1077,7 @@ bool pkgProblemResolver::ResolveInternal(bool const BrokenFix)
{
if (Debug == true)
clog << " Removing " << I.FullName(false) << " rather than change " << Start.TargetPkg().FullName(false) << endl;
- Cache.MarkDelete(I);
+ Cache.MarkDelete(I, false, 0, false);
if (Counter > 1 && Scores[Pkg->ID] > Scores[I->ID])
Scores[I->ID] = Scores[Pkg->ID];
}
@@ -1130,7 +1166,7 @@ bool pkgProblemResolver::ResolveInternal(bool const BrokenFix)
if (Debug == true)
clog << " Removing " << I.FullName(false) << " because I can't find " << Start.TargetPkg().FullName(false) << endl;
if (InOr == false)
- Cache.MarkDelete(I);
+ Cache.MarkDelete(I, false, 0, false);
}
Change = true;
@@ -1157,7 +1193,7 @@ bool pkgProblemResolver::ResolveInternal(bool const BrokenFix)
{
if (Debug == true)
clog << " Fixing " << I.FullName(false) << " via remove of " << J->Pkg.FullName(false) << endl;
- Cache.MarkDelete(J->Pkg);
+ Cache.MarkDelete(J->Pkg, false, 0, false);
}
}
else
@@ -1383,12 +1419,18 @@ bool pkgProblemResolver::ResolveByKeepInternal()
continue;
// Restart again.
- if (K == LastStop)
- return _error->Error("Internal Error, pkgProblemResolver::ResolveByKeep is looping on package %s.",I.FullName(false).c_str());
+ if (K == LastStop) {
+ // I is an iterator based off our temporary package list,
+ // so copy the name we need before deleting the temporary list
+ std::string const LoopingPackage = I.FullName(false);
+ delete[] PList;
+ return _error->Error("Internal Error, pkgProblemResolver::ResolveByKeep is looping on package %s.", LoopingPackage.c_str());
+ }
LastStop = K;
K = PList - 1;
- }
+ }
+ delete[] PList;
return true;
}
/*}}}*/
@@ -1453,7 +1495,7 @@ void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List)
qsort(List,Count,sizeof(*List),PrioComp);
}
/*}}}*/
-// ListUpdate - update the cache files /*{{{*/
+// ListUpdate - construct Fetcher and update the cache files /*{{{*/
// ---------------------------------------------------------------------
/* This is a simple wrapper to update the cache. it will fetch stuff
* from the network (or any other sources defined in sources.list)
@@ -1462,7 +1504,6 @@ bool ListUpdate(pkgAcquireStatus &Stat,
pkgSourceList &List,
int PulseInterval)
{
- pkgAcquire::RunResult res;
pkgAcquire Fetcher;
if (Fetcher.Setup(&Stat, _config->FindDir("Dir::State::Lists")) == false)
return false;
@@ -1471,11 +1512,24 @@ bool ListUpdate(pkgAcquireStatus &Stat,
if (List.GetIndexes(&Fetcher) == false)
return false;
+ return AcquireUpdate(Fetcher, PulseInterval, true);
+}
+ /*}}}*/
+// AcquireUpdate - take Fetcher and update the cache files /*{{{*/
+// ---------------------------------------------------------------------
+/* This is a simple wrapper to update the cache with a provided acquire
+ * If you only need control over Status and the used SourcesList use
+ * ListUpdate method instead.
+ */
+bool AcquireUpdate(pkgAcquire &Fetcher, int const PulseInterval,
+ bool const RunUpdateScripts, bool const ListCleanup)
+{
// Run scripts
- RunScripts("APT::Update::Pre-Invoke");
-
- // check arguments
- if(PulseInterval>0)
+ if (RunUpdateScripts == true)
+ RunScripts("APT::Update::Pre-Invoke");
+
+ pkgAcquire::RunResult res;
+ if(PulseInterval > 0)
res = Fetcher.Run(PulseInterval);
else
res = Fetcher.Run();
@@ -1512,7 +1566,7 @@ bool ListUpdate(pkgAcquireStatus &Stat,
// Clean out any old list files
// Keep "APT::Get::List-Cleanup" name for compatibility, but
// this is really a global option for the APT library now
- if (!TransientNetworkFailure && !Failed &&
+ if (!TransientNetworkFailure && !Failed && ListCleanup == true &&
(_config->FindB("APT::Get::List-Cleanup",true) == true &&
_config->FindB("APT::List-Cleanup",true) == true))
{
@@ -1529,11 +1583,14 @@ bool ListUpdate(pkgAcquireStatus &Stat,
// Run the success scripts if all was fine
- if(!TransientNetworkFailure && !Failed)
- RunScripts("APT::Update::Post-Invoke-Success");
+ if (RunUpdateScripts == true)
+ {
+ if(!TransientNetworkFailure && !Failed)
+ RunScripts("APT::Update::Post-Invoke-Success");
- // Run the other scripts
- RunScripts("APT::Update::Post-Invoke");
+ // Run the other scripts
+ RunScripts("APT::Update::Post-Invoke");
+ }
return true;
}
/*}}}*/
diff --git a/apt-pkg/algorithms.h b/apt-pkg/algorithms.h
index 37eacf1f8..aff8a68f2 100644
--- a/apt-pkg/algorithms.h
+++ b/apt-pkg/algorithms.h
@@ -78,6 +78,7 @@ private:
public:
pkgSimulate(pkgDepCache *Cache);
+ ~pkgSimulate();
};
/*}}}*/
class pkgProblemResolver /*{{{*/
@@ -147,5 +148,7 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache);
void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List);
bool ListUpdate(pkgAcquireStatus &progress, pkgSourceList &List, int PulseInterval=0);
-
+bool AcquireUpdate(pkgAcquire &Fetcher, int const PulseInterval = 0,
+ bool const RunUpdateScripts = true, bool const ListCleanup = true);
+
#endif
diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc
index 4324f0e63..653775688 100644
--- a/apt-pkg/aptconfiguration.cc
+++ b/apt-pkg/aptconfiguration.cc
@@ -47,6 +47,7 @@ const Configuration::getCompressionTypes(bool const &Cached) {
_config->CndSet("Acquire::CompressionTypes::gz","gzip");
setDefaultConfigurationForCompressors();
+ std::vector<APT::Configuration::Compressor> const compressors = getCompressors();
// accept non-list order as override setting for config settings on commandline
std::string const overrideOrder = _config->Find("Acquire::CompressionTypes::Order","");
@@ -60,15 +61,17 @@ const Configuration::getCompressionTypes(bool const &Cached) {
if ((*o).empty() == true)
continue;
// ignore types we have no method ready to use
- if (_config->Exists(std::string("Acquire::CompressionTypes::").append(*o)) == false)
+ std::string const method = std::string("Acquire::CompressionTypes::").append(*o);
+ if (_config->Exists(method) == false)
continue;
// ignore types we have no app ready to use
- std::string const appsetting = std::string("Dir::Bin::").append(*o);
- if (_config->Exists(appsetting) == true) {
- std::string const app = _config->FindFile(appsetting.c_str(), "");
- if (app.empty() == false && FileExists(app) == false)
- continue;
- }
+ std::string const app = _config->Find(method);
+ std::vector<APT::Configuration::Compressor>::const_iterator c = compressors.begin();
+ for (; c != compressors.end(); ++c)
+ if (c->Name == app)
+ break;
+ if (c == compressors.end())
+ continue;
types.push_back(*o);
}
@@ -84,12 +87,12 @@ const Configuration::getCompressionTypes(bool const &Cached) {
if (std::find(types.begin(),types.end(),Types->Tag) != types.end())
continue;
// ignore types we have no app ready to use
- std::string const appsetting = std::string("Dir::Bin::").append(Types->Value);
- if (appsetting.empty() == false && _config->Exists(appsetting) == true) {
- std::string const app = _config->FindFile(appsetting.c_str(), "");
- if (app.empty() == false && FileExists(app) == false)
- continue;
- }
+ std::vector<APT::Configuration::Compressor>::const_iterator c = compressors.begin();
+ for (; c != compressors.end(); ++c)
+ if (c->Name == Types->Value)
+ break;
+ if (c == compressors.end())
+ continue;
types.push_back(Types->Tag);
}
@@ -141,7 +144,7 @@ std::vector<std::string> const Configuration::getLanguages(bool const &All,
if (D != 0) {
builtin.push_back("none");
for (struct dirent *Ent = readdir(D); Ent != 0; Ent = readdir(D)) {
- string const name = Ent->d_name;
+ string const name = SubstVar(Ent->d_name, "%5f", "_");
size_t const foundDash = name.rfind("-");
size_t const foundUnderscore = name.rfind("_", foundDash);
if (foundDash == string::npos || foundUnderscore == string::npos ||
@@ -231,17 +234,21 @@ std::vector<std::string> const Configuration::getLanguages(bool const &All,
// override the configuration settings vector of languages.
string const forceLang = _config->Find("Acquire::Languages","");
if (forceLang.empty() == false) {
- if (forceLang == "environment") {
- codes = environment;
- } else if (forceLang != "none")
- codes.push_back(forceLang);
- else //if (forceLang == "none")
- builtin.clear();
- allCodes = codes;
- for (std::vector<string>::const_iterator b = builtin.begin();
- b != builtin.end(); ++b)
- if (std::find(allCodes.begin(), allCodes.end(), *b) == allCodes.end())
- allCodes.push_back(*b);
+ if (forceLang == "none") {
+ codes.clear();
+ allCodes.clear();
+ allCodes.push_back("none");
+ } else {
+ if (forceLang == "environment")
+ codes = environment;
+ else
+ codes.push_back(forceLang);
+ allCodes = codes;
+ for (std::vector<string>::const_iterator b = builtin.begin();
+ b != builtin.end(); ++b)
+ if (std::find(allCodes.begin(), allCodes.end(), *b) == allCodes.end())
+ allCodes.push_back(*b);
+ }
if (All == true)
return allCodes;
else
@@ -312,6 +319,17 @@ std::vector<std::string> const Configuration::getLanguages(bool const &All,
return codes;
}
/*}}}*/
+// checkLanguage - are we interested in the given Language? /*{{{*/
+bool const Configuration::checkLanguage(std::string Lang, bool const All) {
+ // the empty Language is always interesting as it is the original
+ if (Lang.empty() == true)
+ return true;
+ // filenames are encoded, so undo this
+ Lang = SubstVar(Lang, "%5f", "_");
+ std::vector<std::string> const langs = getLanguages(All, true);
+ return (std::find(langs.begin(), langs.end(), Lang) != langs.end());
+}
+ /*}}}*/
// getArchitectures - Return Vector of prefered Architectures /*{{{*/
std::vector<std::string> const Configuration::getArchitectures(bool const &Cached) {
using std::string;
@@ -432,9 +450,30 @@ bool const Configuration::checkArchitecture(std::string const &Arch) {
// setDefaultConfigurationForCompressors /*{{{*/
void Configuration::setDefaultConfigurationForCompressors() {
// Set default application paths to check for optional compression types
- _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma");
- _config->CndSet("Dir::Bin::xz", "/usr/bin/xz");
_config->CndSet("Dir::Bin::bzip2", "/bin/bzip2");
+ _config->CndSet("Dir::Bin::xz", "/usr/bin/xz");
+ if (FileExists(_config->FindFile("Dir::Bin::xz")) == true) {
+ _config->Clear("Dir::Bin::lzma");
+ _config->Set("APT::Compressor::lzma::Binary", "xz");
+ if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) {
+ _config->Set("APT::Compressor::lzma::CompressArg::", "--format=lzma");
+ _config->Set("APT::Compressor::lzma::CompressArg::", "-9");
+ }
+ if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) {
+ _config->Set("APT::Compressor::lzma::UncompressArg::", "--format=lzma");
+ _config->Set("APT::Compressor::lzma::UncompressArg::", "-d");
+ }
+ } else {
+ _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma");
+ if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) {
+ _config->Set("APT::Compressor::lzma::CompressArg::", "--suffix=");
+ _config->Set("APT::Compressor::lzma::CompressArg::", "-9");
+ }
+ if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) {
+ _config->Set("APT::Compressor::lzma::UncompressArg::", "--suffix=");
+ _config->Set("APT::Compressor::lzma::UncompressArg::", "-d");
+ }
+ }
}
/*}}}*/
// getCompressors - Return Vector of usbale compressors /*{{{*/
@@ -453,15 +492,23 @@ const Configuration::getCompressors(bool const Cached) {
setDefaultConfigurationForCompressors();
- compressors.push_back(Compressor(".", "", "", "", "", 1));
+ compressors.push_back(Compressor(".", "", "", NULL, NULL, 1));
if (_config->Exists("Dir::Bin::gzip") == false || FileExists(_config->FindFile("Dir::Bin::gzip")) == true)
compressors.push_back(Compressor("gzip",".gz","gzip","-9n","-d",2));
+#ifdef HAVE_ZLIB
+ else
+ compressors.push_back(Compressor("gzip",".gz","false", NULL, NULL, 2));
+#endif
if (_config->Exists("Dir::Bin::bzip2") == false || FileExists(_config->FindFile("Dir::Bin::bzip2")) == true)
compressors.push_back(Compressor("bzip2",".bz2","bzip2","-9","-d",3));
- if (_config->Exists("Dir::Bin::lzma") == false || FileExists(_config->FindFile("Dir::Bin::lzma")) == true)
- compressors.push_back(Compressor("lzma",".lzma","lzma","-9","-d",4));
+#ifdef HAVE_BZ2
+ else
+ compressors.push_back(Compressor("bzip2",".bz2","false", NULL, NULL, 3));
+#endif
if (_config->Exists("Dir::Bin::xz") == false || FileExists(_config->FindFile("Dir::Bin::xz")) == true)
- compressors.push_back(Compressor("xz",".xz","xz","-6","-d",5));
+ compressors.push_back(Compressor("xz",".xz","xz","-6","-d",4));
+ if (_config->Exists("Dir::Bin::lzma") == false || FileExists(_config->FindFile("Dir::Bin::lzma")) == true)
+ compressors.push_back(Compressor("lzma",".lzma","lzma","-9","-d",5));
std::vector<std::string> const comp = _config->FindVector("APT::Compressor");
for (std::vector<std::string>::const_iterator c = comp.begin();
@@ -494,7 +541,7 @@ Configuration::Compressor::Compressor(char const *name, char const *extension,
char const *binary,
char const *compressArg, char const *uncompressArg,
unsigned short const cost) {
- std::string const config = std::string("APT:Compressor::").append(name).append("::");
+ std::string const config = std::string("APT::Compressor::").append(name).append("::");
Name = _config->Find(std::string(config).append("Name"), name);
Extension = _config->Find(std::string(config).append("Extension"), extension);
Binary = _config->Find(std::string(config).append("Binary"), binary);
diff --git a/apt-pkg/aptconfiguration.h b/apt-pkg/aptconfiguration.h
index e098d0fd6..d22b675c0 100644
--- a/apt-pkg/aptconfiguration.h
+++ b/apt-pkg/aptconfiguration.h
@@ -67,6 +67,14 @@ public: /*{{{*/
std::vector<std::string> static const getLanguages(bool const &All = false,
bool const &Cached = true, char const ** const Locale = 0);
+ /** \brief Are we interested in the given Language?
+ *
+ * \param Lang is the language we want to check
+ * \param All defines if we check against all codes or only against used codes
+ * \return true if we are interested, false otherwise
+ */
+ bool static const checkLanguage(std::string Lang, bool const All = false);
+
/** \brief Returns a vector of Architectures we support
*
* \param Cached saves the result so we need to calculated it only once
diff --git a/apt-pkg/cachefilter.cc b/apt-pkg/cachefilter.cc
index 9ec3fa699..58cc812bf 100644
--- a/apt-pkg/cachefilter.cc
+++ b/apt-pkg/cachefilter.cc
@@ -9,12 +9,14 @@
#include <apt-pkg/cachefilter.h>
#include <apt-pkg/error.h>
#include <apt-pkg/pkgcache.h>
-
-#include <apti18n.h>
+#include <apt-pkg/strutl.h>
#include <string>
#include <regex.h>
+#include <fnmatch.h>
+
+#include <apti18n.h>
/*}}}*/
namespace APT {
namespace CacheFilter {
@@ -52,5 +54,48 @@ PackageNameMatchesRegEx::~PackageNameMatchesRegEx() { /*{{{*/
delete pattern;
}
/*}}}*/
+
+// CompleteArch to <kernel>-<cpu> tuple /*{{{*/
+//----------------------------------------------------------------------
+/* The complete architecture, consisting of <kernel>-<cpu>. */
+static std::string CompleteArch(std::string const &arch) {
+ if (arch.find('-') != std::string::npos) {
+ // ensure that only -any- is replaced and not something like company-
+ std::string complete = std::string("-").append(arch).append("-");
+ complete = SubstVar(complete, "-any-", "-*-");
+ complete = complete.substr(1, complete.size()-2);
+ return complete;
+ }
+ else if (arch == "any") return "*-*";
+ else return "linux-" + arch;
+}
+ /*}}}*/
+PackageArchitectureMatchesSpecification::PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern) :/*{{{*/
+ literal(pattern), isPattern(isPattern), d(NULL) {
+ complete = CompleteArch(pattern);
+}
+ /*}}}*/
+bool PackageArchitectureMatchesSpecification::operator() (char const * const &arch) {/*{{{*/
+ if (strcmp(literal.c_str(), arch) == 0 ||
+ strcmp(complete.c_str(), arch) == 0)
+ return true;
+ std::string const pkgarch = CompleteArch(arch);
+ if (isPattern == true)
+ return fnmatch(complete.c_str(), pkgarch.c_str(), 0) == 0;
+ return fnmatch(pkgarch.c_str(), complete.c_str(), 0) == 0;
+}
+ /*}}}*/
+bool PackageArchitectureMatchesSpecification::operator() (pkgCache::PkgIterator const &Pkg) {/*{{{*/
+ return (*this)(Pkg.Arch());
+}
+ /*}}}*/
+bool PackageArchitectureMatchesSpecification::operator() (pkgCache::VerIterator const &Ver) {/*{{{*/
+ return (*this)(Ver.ParentPkg());
+}
+ /*}}}*/
+PackageArchitectureMatchesSpecification::~PackageArchitectureMatchesSpecification() { /*{{{*/
+}
+ /*}}}*/
+
}
}
diff --git a/apt-pkg/cachefilter.h b/apt-pkg/cachefilter.h
index 5d426008b..25cd43f47 100644
--- a/apt-pkg/cachefilter.h
+++ b/apt-pkg/cachefilter.h
@@ -26,6 +26,36 @@ public:
~PackageNameMatchesRegEx();
};
/*}}}*/
+// PackageArchitectureMatchesSpecification /*{{{*/
+/** \class PackageArchitectureMatchesSpecification
+ \brief matching against architecture specification strings
+
+ The strings are of the format <kernel>-<cpu> where either component,
+ or the whole string, can be the wildcard "any" as defined in
+ debian-policy §11.1 "Architecture specification strings".
+
+ Examples: i386, mipsel, linux-any, any-amd64, any */
+class PackageArchitectureMatchesSpecification {
+ std::string literal;
+ std::string complete;
+ bool isPattern;
+ /** \brief dpointer placeholder (for later in case we need it) */
+ void *d;
+public:
+ /** \brief matching against architecture specification strings
+ *
+ * @param pattern is the architecture specification string
+ * @param isPattern defines if the given \b pattern is a
+ * architecture specification pattern to match others against
+ * or if it is the fixed string and matched against patterns
+ */
+ PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern = true);
+ bool operator() (char const * const &arch);
+ bool operator() (pkgCache::PkgIterator const &Pkg);
+ bool operator() (pkgCache::VerIterator const &Ver);
+ ~PackageArchitectureMatchesSpecification();
+};
+ /*}}}*/
}
}
#endif
diff --git a/apt-pkg/cacheiterators.h b/apt-pkg/cacheiterators.h
index d5e018be9..dcd353119 100644
--- a/apt-pkg/cacheiterators.h
+++ b/apt-pkg/cacheiterators.h
@@ -285,6 +285,7 @@ class pkgCache::DepIterator : public Iterator<Dependency, DepIterator> {
bool IsNegative() const;
bool IsIgnorable(PrvIterator const &Prv) const;
bool IsIgnorable(PkgIterator const &Pkg) const;
+ bool IsMultiArchImplicit() const;
void GlobOr(DepIterator &Start,DepIterator &End);
Version **AllTargets() const;
bool SmartTargetPkg(PkgIterator &Result) const;
@@ -329,8 +330,9 @@ class pkgCache::PrvIterator : public Iterator<Provides, PrvIterator> {
inline VerIterator OwnerVer() const {return VerIterator(*Owner,Owner->VerP + S->Version);};
inline PkgIterator OwnerPkg() const {return PkgIterator(*Owner,Owner->PkgP + Owner->VerP[S->Version].ParentPkg);};
- inline PrvIterator() : Iterator<Provides, PrvIterator>(), Type(PrvVer) {};
+ bool IsMultiArchImplicit() const;
+ inline PrvIterator() : Iterator<Provides, PrvIterator>(), Type(PrvVer) {};
inline PrvIterator(pkgCache &Owner, Provides *Trg, Version*) :
Iterator<Provides, PrvIterator>(Owner, Trg), Type(PrvVer) {
if (S == 0)
diff --git a/apt-pkg/cacheset.cc b/apt-pkg/cacheset.cc
index b892ab4bf..1fea4f94a 100644
--- a/apt-pkg/cacheset.cc
+++ b/apt-pkg/cacheset.cc
@@ -182,15 +182,63 @@ pkgCache::PkgIterator PackageContainerInterface::FromName(pkgCacheFile &Cache,
return Pkg;
}
/*}}}*/
+// FromGroup - Returns the package defined by this string /*{{{*/
+bool PackageContainerInterface::FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache,
+ std::string pkg, CacheSetHelper &helper) {
+ if (unlikely(Cache.GetPkgCache() == 0))
+ return false;
+
+ size_t const archfound = pkg.find_last_of(':');
+ std::string arch;
+ if (archfound != std::string::npos) {
+ arch = pkg.substr(archfound+1);
+ pkg.erase(archfound);
+ if (arch == "all" || arch == "native")
+ arch = _config->Find("APT::Architecture");
+ }
+
+ pkgCache::GrpIterator Grp = Cache.GetPkgCache()->FindGrp(pkg);
+ if (Grp.end() == false) {
+ if (arch.empty() == true) {
+ pkgCache::PkgIterator Pkg = Grp.FindPreferredPkg();
+ if (Pkg.end() == false)
+ {
+ pci->insert(Pkg);
+ return true;
+ }
+ } else {
+ bool found = false;
+ // for 'linux-any' return the first package matching, for 'linux-*' return all matches
+ bool const isGlobal = arch.find('*') != std::string::npos;
+ APT::CacheFilter::PackageArchitectureMatchesSpecification pams(arch);
+ for (pkgCache::PkgIterator Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg)) {
+ if (pams(Pkg) == false)
+ continue;
+ pci->insert(Pkg);
+ found = true;
+ if (isGlobal == false)
+ break;
+ }
+ if (found == true)
+ return true;
+ }
+ }
+
+ pkgCache::PkgIterator Pkg = helper.canNotFindPkgName(Cache, pkg);
+ if (Pkg.end() == true)
+ return false;
+
+ pci->insert(Pkg);
+ return true;
+}
+ /*}}}*/
// FromString - Return all packages matching a specific string /*{{{*/
bool PackageContainerInterface::FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str, CacheSetHelper &helper) {
bool found = true;
_error->PushToStack();
- pkgCache::PkgIterator Pkg = FromName(Cache, str, helper);
- if (Pkg.end() == false)
- pci->insert(Pkg);
- else if (FromTask(pci, Cache, str, helper) == false &&
+ if (FromGroup(pci, Cache, str, helper) == false &&
+ FromTask(pci, Cache, str, helper) == false &&
FromRegEx(pci, Cache, str, helper) == false)
{
helper.canNotFindPackage(pci, Cache, str);
@@ -217,6 +265,7 @@ bool PackageContainerInterface::FromModifierCommandLine(unsigned short &modID, P
pkgCacheFile &Cache, const char * cmdline,
std::list<Modifier> const &mods, CacheSetHelper &helper) {
std::string str = cmdline;
+ unsigned short fallback = modID;
bool modifierPresent = false;
for (std::list<Modifier>::const_iterator mod = mods.begin();
mod != mods.end(); ++mod) {
@@ -243,6 +292,7 @@ bool PackageContainerInterface::FromModifierCommandLine(unsigned short &modID, P
helper.showErrors(errors);
if (Pkg.end() == false) {
pci->insert(Pkg);
+ modID = fallback;
return true;
}
}
@@ -281,13 +331,14 @@ bool VersionContainerInterface::FromModifierCommandLine(unsigned short &modID,
modifierPresent = true;
break;
}
-
if (modifierPresent == true) {
bool const errors = helper.showErrors(false);
bool const found = VersionContainerInterface::FromString(vci, Cache, cmdline, select, helper, true);
helper.showErrors(errors);
- if (found == true)
+ if (found == true) {
+ modID = fallback;
return true;
+ }
}
return FromString(vci, Cache, str, select, helper);
}
diff --git a/apt-pkg/cacheset.h b/apt-pkg/cacheset.h
index 6f0a0e358..2a45910ba 100644
--- a/apt-pkg/cacheset.h
+++ b/apt-pkg/cacheset.h
@@ -139,6 +139,7 @@ public:
static bool FromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
static bool FromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper);
+ static bool FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
static bool FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper);
static bool FromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper);
@@ -186,7 +187,7 @@ public: /*{{{*/
pkgCache::PkgIterator getPkg(void) const { return *_iter; }
inline pkgCache::PkgIterator operator*(void) const { return *_iter; };
operator typename Container::iterator(void) const { return _iter; }
- operator typename PackageContainer<Container>::const_iterator() { return PackageContainer<Container>::const_iterator(_iter); }
+ operator typename PackageContainer<Container>::const_iterator() { return typename PackageContainer<Container>::const_iterator(_iter); }
inline iterator& operator++() { ++_iter; return *this; }
inline iterator operator++(int) { iterator tmp(*this); operator++(); return tmp; }
inline bool operator!=(iterator const &i) const { return _iter != i._iter; };
@@ -506,7 +507,7 @@ public: /*{{{*/
pkgCache::VerIterator getVer(void) const { return *_iter; }
inline pkgCache::VerIterator operator*(void) const { return *_iter; };
operator typename Container::iterator(void) const { return _iter; }
- operator typename VersionContainer<Container>::const_iterator() { return VersionContainer<Container>::const_iterator(_iter); }
+ operator typename VersionContainer<Container>::const_iterator() { return typename VersionContainer<Container>::const_iterator(_iter); }
inline iterator& operator++() { ++_iter; return *this; }
inline iterator operator++(int) { iterator tmp(*this); operator++(); return tmp; }
inline bool operator!=(iterator const &i) const { return _iter != i._iter; };
diff --git a/apt-pkg/cdrom.cc b/apt-pkg/cdrom.cc
index 4462d4e24..9a9a854bf 100644
--- a/apt-pkg/cdrom.cc
+++ b/apt-pkg/cdrom.cc
@@ -272,6 +272,29 @@ bool pkgCdrom::DropBinaryArch(vector<string> &List)
return true;
}
/*}}}*/
+// DropTranslation - Dump unwanted Translation-<lang> files /*{{{*/
+// ---------------------------------------------------------------------
+/* Here we drop everything that is not configured in Acquire::Languages */
+bool pkgCdrom::DropTranslation(vector<string> &List)
+{
+ for (unsigned int I = 0; I < List.size(); I++)
+ {
+ const char *Start;
+ if ((Start = strstr(List[I].c_str(), "/Translation-")) == NULL)
+ continue;
+ Start += strlen("/Translation-");
+
+ if (APT::Configuration::checkLanguage(Start, true) == true)
+ continue;
+
+ // not accepted -> Erase it
+ List.erase(List.begin() + I);
+ --I; // the next entry is at the same index after the erase
+ }
+
+ return true;
+}
+ /*}}}*/
// DropRepeats - Drop repeated files resulting from symlinks /*{{{*/
// ---------------------------------------------------------------------
/* Here we go and stat every file that we found and strip dup inodes. */
@@ -363,6 +386,7 @@ void pkgCdrom::ReduceSourcelist(string CD,vector<string> &List)
string Word1 = string(*I,Space,SSpace-Space);
string Prefix = string(*I,0,Space);
+ string Component = string(*I,SSpace);
for (vector<string>::iterator J = List.begin(); J != I; ++J)
{
// Find a space..
@@ -377,9 +401,11 @@ void pkgCdrom::ReduceSourcelist(string CD,vector<string> &List)
continue;
if (string(*J,Space2,SSpace2-Space2) != Word1)
continue;
-
- *J += string(*I,SSpace);
- *I = string();
+
+ string Component2 = string(*J, SSpace2) + " ";
+ if (Component2.find(Component + " ") == std::string::npos)
+ *J += Component;
+ I->clear();
}
}
@@ -409,28 +435,12 @@ bool pkgCdrom::WriteDatabase(Configuration &Cnf)
/* Write out all of the configuration directives by walking the
configuration tree */
- const Configuration::Item *Top = Cnf.Tree(0);
- for (; Top != 0;)
- {
- // Print the config entry
- if (Top->Value.empty() == false)
- Out << Top->FullTag() + " \"" << Top->Value << "\";" << endl;
-
- if (Top->Child != 0)
- {
- Top = Top->Child;
- continue;
- }
-
- while (Top != 0 && Top->Next == 0)
- Top = Top->Parent;
- if (Top != 0)
- Top = Top->Next;
- }
+ Cnf.Dump(Out, NULL, "%f \"%v\";\n", false);
Out.close();
-
- link(DFile.c_str(),string(DFile + '~').c_str());
+
+ if (FileExists(DFile) == true)
+ rename(DFile.c_str(), string(DFile + '~').c_str());
if (rename(NewFile.c_str(),DFile.c_str()) != 0)
return _error->Errno("rename","Failed to rename %s.new to %s",
DFile.c_str(),DFile.c_str());
@@ -697,7 +707,8 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
return false;
}
- chdir(StartDir.c_str());
+ if (chdir(StartDir.c_str()) != 0)
+ return _error->Errno("chdir","Unable to change to %s", StartDir.c_str());
if (_config->FindB("Debug::aptcdrom",false) == true)
{
@@ -726,6 +737,8 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
DropRepeats(SigList,"InRelease");
_error->RevertToStack();
DropRepeats(TransList,"");
+ if (_config->FindB("APT::CDROM::DropTranslation", true) == true)
+ DropTranslation(TransList);
if(log != NULL) {
msg.str("");
ioprintf(msg, _("Found %zu package indexes, %zu source indexes, "
diff --git a/apt-pkg/cdrom.h b/apt-pkg/cdrom.h
index cedfccff7..4fc3d3928 100644
--- a/apt-pkg/cdrom.h
+++ b/apt-pkg/cdrom.h
@@ -60,6 +60,7 @@ class pkgCdrom /*{{{*/
unsigned int Depth = 0);
bool DropBinaryArch(std::vector<std::string> &List);
bool DropRepeats(std::vector<std::string> &List,const char *Name);
+ bool DropTranslation(std::vector<std::string> &List);
void ReduceSourcelist(std::string CD,std::vector<std::string> &List);
bool WriteDatabase(Configuration &Cnf);
bool WriteSourceList(std::string Name,std::vector<std::string> &List,bool Source);
diff --git a/apt-pkg/clean.cc b/apt-pkg/clean.cc
index ed8fa1aa9..eae419e34 100644
--- a/apt-pkg/clean.cc
+++ b/apt-pkg/clean.cc
@@ -54,9 +54,11 @@ bool pkgArchiveCleaner::Go(std::string Dir,pkgCache &Cache)
struct stat St;
if (stat(Dir->d_name,&St) != 0)
{
- chdir(StartDir.c_str());
+ _error->Errno("stat",_("Unable to stat %s."),Dir->d_name);
closedir(D);
- return _error->Errno("stat",_("Unable to stat %s."),Dir->d_name);
+ if (chdir(StartDir.c_str()) != 0)
+ return _error->Errno("chdir", _("Unable to change to %s"), StartDir.c_str());
+ return false;
}
// Grab the package name
@@ -79,12 +81,13 @@ bool pkgArchiveCleaner::Go(std::string Dir,pkgCache &Cache)
if (*I != '.')
continue;
std::string const Arch = DeQuoteString(std::string(Start,I-Start));
-
+
+ // ignore packages of unconfigured architectures
if (APT::Configuration::checkArchitecture(Arch) == false)
continue;
// Lookup the package
- pkgCache::PkgIterator P = Cache.FindPkg(Pkg);
+ pkgCache::PkgIterator P = Cache.FindPkg(Pkg, Arch);
if (P.end() != true)
{
pkgCache::VerIterator V = P.VersionList();
@@ -115,8 +118,9 @@ bool pkgArchiveCleaner::Go(std::string Dir,pkgCache &Cache)
Erase(Dir->d_name,Pkg,Ver,St);
};
- chdir(StartDir.c_str());
closedir(D);
- return true;
+ if (chdir(StartDir.c_str()) != 0)
+ return _error->Errno("chdir", _("Unable to change to %s"), StartDir.c_str());
+ return true;
}
/*}}}*/
diff --git a/apt-pkg/contrib/cmndline.cc b/apt-pkg/contrib/cmndline.cc
index 159f330a1..75d02cad4 100644
--- a/apt-pkg/contrib/cmndline.cc
+++ b/apt-pkg/contrib/cmndline.cc
@@ -92,8 +92,9 @@ bool CommandLine::Parse(int argc,const char **argv)
// Match up to a = against the list
Args *A;
const char *OptEnd = strchrnul(Opt, '=');
- for (A = ArgList; A->end() == false &&
- stringcasecmp(Opt,OptEnd,A->LongOpt) != 0; A++);
+ for (A = ArgList; A->end() == false &&
+ (A->LongOpt == 0 || stringcasecmp(Opt,OptEnd,A->LongOpt) != 0);
+ ++A);
// Failed, look for a word after the first - (no-foo)
bool PreceedMatch = false;
@@ -105,7 +106,8 @@ bool CommandLine::Parse(int argc,const char **argv)
Opt++;
for (A = ArgList; A->end() == false &&
- stringcasecmp(Opt,OptEnd,A->LongOpt) != 0; A++);
+ (A->LongOpt == 0 || stringcasecmp(Opt,OptEnd,A->LongOpt) != 0);
+ ++A);
// Failed again..
if (A->end() == true && OptEnd - Opt != 1)
diff --git a/apt-pkg/contrib/configuration.cc b/apt-pkg/contrib/configuration.cc
index 36866a35a..4de17e3e1 100644
--- a/apt-pkg/contrib/configuration.cc
+++ b/apt-pkg/contrib/configuration.cc
@@ -171,48 +171,60 @@ string Configuration::Find(const char *Name,const char *Default) const
string Configuration::FindFile(const char *Name,const char *Default) const
{
const Item *RootItem = Lookup("RootDir");
- std::string rootDir = (RootItem == 0) ? "" : RootItem->Value;
- if(rootDir.size() > 0 && rootDir[rootDir.size() - 1] != '/')
- rootDir.push_back('/');
+ std::string result = (RootItem == 0) ? "" : RootItem->Value;
+ if(result.empty() == false && result[result.size() - 1] != '/')
+ result.push_back('/');
const Item *Itm = Lookup(Name);
if (Itm == 0 || Itm->Value.empty() == true)
{
- if (Default == 0)
- return rootDir;
- else
- return rootDir + Default;
+ if (Default != 0)
+ result.append(Default);
}
-
- string val = Itm->Value;
- while (Itm->Parent != 0)
+ else
{
- if (Itm->Parent->Value.empty() == true)
+ string val = Itm->Value;
+ while (Itm->Parent != 0)
{
- Itm = Itm->Parent;
- continue;
- }
+ if (Itm->Parent->Value.empty() == true)
+ {
+ Itm = Itm->Parent;
+ continue;
+ }
+
+ // Absolute
+ if (val.length() >= 1 && val[0] == '/')
+ {
+ if (val.compare(0, 9, "/dev/null") == 0)
+ val.erase(9);
+ break;
+ }
- // Absolute
- if (val.length() >= 1 && val[0] == '/')
- break;
+ // ~/foo or ./foo
+ if (val.length() >= 2 && (val[0] == '~' || val[0] == '.') && val[1] == '/')
+ break;
- // ~/foo or ./foo
- if (val.length() >= 2 && (val[0] == '~' || val[0] == '.') && val[1] == '/')
- break;
-
- // ../foo
- if (val.length() >= 3 && val[0] == '.' && val[1] == '.' && val[2] == '/')
- break;
-
- if (Itm->Parent->Value.end()[-1] != '/')
- val.insert(0, "/");
+ // ../foo
+ if (val.length() >= 3 && val[0] == '.' && val[1] == '.' && val[2] == '/')
+ break;
+
+ if (Itm->Parent->Value.end()[-1] != '/')
+ val.insert(0, "/");
- val.insert(0, Itm->Parent->Value);
- Itm = Itm->Parent;
+ val.insert(0, Itm->Parent->Value);
+ Itm = Itm->Parent;
+ }
+ result.append(val);
}
- return rootDir + val;
+ // do some normalisation by removing // and /./ from the path
+ size_t found = string::npos;
+ while ((found = result.find("/./")) != string::npos)
+ result.replace(found, 3, "/");
+ while ((found = result.find("//")) != string::npos)
+ result.replace(found, 2, "/");
+
+ return result;
}
/*}}}*/
// Configuration::FindDir - Find a directory name /*{{{*/
@@ -222,7 +234,12 @@ string Configuration::FindDir(const char *Name,const char *Default) const
{
string Res = FindFile(Name,Default);
if (Res.end()[-1] != '/')
+ {
+ size_t const found = Res.rfind("/dev/null");
+ if (found != string::npos && found == Res.size() - 9)
+ return Res; // /dev/null returning
return Res + '/';
+ }
return Res;
}
/*}}}*/
@@ -482,24 +499,80 @@ bool Configuration::ExistsAny(const char *Name) const
/* Dump the entire configuration space */
void Configuration::Dump(ostream& str)
{
- /* Write out all of the configuration directives by walking the
+ Dump(str, NULL, "%f \"%v\";\n", true);
+}
+void Configuration::Dump(ostream& str, char const * const root,
+ char const * const formatstr, bool const emptyValue)
+{
+ const Configuration::Item* Top = Tree(root);
+ if (Top == 0)
+ return;
+ const Configuration::Item* const Root = (root == NULL) ? NULL : Top;
+ std::vector<std::string> const format = VectorizeString(formatstr, '%');
+
+ /* Write out all of the configuration directives by walking the
configuration tree */
- const Configuration::Item *Top = Tree(0);
- for (; Top != 0;)
- {
- str << Top->FullTag() << " \"" << Top->Value << "\";" << endl;
-
+ do {
+ if (emptyValue == true || Top->Value.empty() == emptyValue)
+ {
+ std::vector<std::string>::const_iterator f = format.begin();
+ str << *f;
+ for (++f; f != format.end(); ++f)
+ {
+ if (f->empty() == true)
+ {
+ ++f;
+ str << '%' << *f;
+ continue;
+ }
+ char const type = (*f)[0];
+ if (type == 'f')
+ str << Top->FullTag();
+ else if (type == 't')
+ str << Top->Tag;
+ else if (type == 'v')
+ str << Top->Value;
+ else if (type == 'F')
+ str << QuoteString(Top->FullTag(), "=\"\n");
+ else if (type == 'T')
+ str << QuoteString(Top->Tag, "=\"\n");
+ else if (type == 'V')
+ str << QuoteString(Top->Value, "=\"\n");
+ else if (type == 'n')
+ str << "\n";
+ else if (type == 'N')
+ str << "\t";
+ else
+ str << '%' << type;
+ str << f->c_str() + 1;
+ }
+ }
+
if (Top->Child != 0)
{
Top = Top->Child;
continue;
}
-
+
while (Top != 0 && Top->Next == 0)
Top = Top->Parent;
if (Top != 0)
Top = Top->Next;
- }
+
+ if (Root != NULL)
+ {
+ const Configuration::Item* I = Top;
+ while(I != 0)
+ {
+ if (I == Root)
+ break;
+ else
+ I = I->Parent;
+ }
+ if (I == 0)
+ break;
+ }
+ } while (Top != 0);
}
/*}}}*/
diff --git a/apt-pkg/contrib/configuration.h b/apt-pkg/contrib/configuration.h
index 4c2e75041..ea94c2fe6 100644
--- a/apt-pkg/contrib/configuration.h
+++ b/apt-pkg/contrib/configuration.h
@@ -103,6 +103,8 @@ class Configuration
inline void Dump() { Dump(std::clog); };
void Dump(std::ostream& str);
+ void Dump(std::ostream& str, char const * const root,
+ char const * const format, bool const emptyValue);
Configuration(const Item *Root);
Configuration();
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index 1808489d7..90e49cbfa 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -44,14 +44,11 @@
#include <set>
#include <algorithm>
-// FIXME: Compressor Fds have some speed disadvantages and are a bit buggy currently,
-// so while the current implementation satisfies the testcases it is not a real option
-// to disable it for now
-#define APT_USE_ZLIB 1
-#if APT_USE_ZLIB
-#include <zlib.h>
-#else
-#pragma message "Usage of zlib is DISABLED!"
+#ifdef HAVE_ZLIB
+ #include <zlib.h>
+#endif
+#ifdef HAVE_BZ2
+ #include <bzlib.h>
#endif
#ifdef WORDS_BIGENDIAN
@@ -65,19 +62,50 @@ using namespace std;
class FileFdPrivate {
public:
-#if APT_USE_ZLIB
+#ifdef HAVE_ZLIB
gzFile gz;
#else
void* gz;
#endif
+#ifdef HAVE_BZ2
+ BZFILE* bz2;
+#else
+ void* bz2;
+#endif
int compressed_fd;
pid_t compressor_pid;
bool pipe;
APT::Configuration::Compressor compressor;
unsigned int openmode;
unsigned long long seekpos;
- FileFdPrivate() : gz(NULL), compressed_fd(-1), compressor_pid(-1), pipe(false),
+ FileFdPrivate() : gz(NULL), bz2(NULL),
+ compressed_fd(-1), compressor_pid(-1), pipe(false),
openmode(0), seekpos(0) {};
+ bool CloseDown(std::string const &FileName)
+ {
+ bool Res = true;
+#ifdef HAVE_ZLIB
+ if (gz != NULL) {
+ int const e = gzclose(gz);
+ gz = NULL;
+ // gzdclose() on empty files always fails with "buffer error" here, ignore that
+ if (e != 0 && e != Z_BUF_ERROR)
+ Res &= _error->Errno("close",_("Problem closing the gzip file %s"), FileName.c_str());
+ }
+#endif
+#ifdef HAVE_BZ2
+ if (bz2 != NULL) {
+ BZ2_bzclose(bz2);
+ bz2 = NULL;
+ }
+#endif
+ if (compressor_pid > 0)
+ ExecWait(compressor_pid, "FileFdCompressor", true);
+ compressor_pid = -1;
+
+ return Res;
+ }
+ ~FileFdPrivate() { CloseDown(""); }
};
// RunScripts - Run a set of scripts from a configuration subtree /*{{{*/
@@ -835,7 +863,6 @@ bool FileFd::Open(string FileName,unsigned int const Mode,CompressMode Compress,
if (Compress == Auto && (Mode & WriteOnly) == WriteOnly)
return _error->Error("Autodetection on %s only works in ReadOnly openmode!", FileName.c_str());
- // FIXME: Denote inbuilt compressors somehow - as we don't need to have the binaries for them
std::vector<APT::Configuration::Compressor> const compressors = APT::Configuration::getCompressors();
std::vector<APT::Configuration::Compressor>::const_iterator compressor = compressors.begin();
if (Compress == Auto)
@@ -903,8 +930,6 @@ bool FileFd::Open(string FileName,unsigned int const Mode,CompressMode Compress,
bool FileFd::Open(string FileName,unsigned int const Mode,APT::Configuration::Compressor const &compressor, unsigned long const Perms)
{
Close();
- d = new FileFdPrivate;
- d->openmode = Mode;
Flags = AutoClose;
if ((Mode & WriteOnly) != WriteOnly && (Mode & (Atomic | Create | Empty | Exclusive)) != 0)
@@ -998,10 +1023,21 @@ bool FileFd::OpenDescriptor(int Fd, unsigned int const Mode, CompressMode Compre
bool FileFd::OpenDescriptor(int Fd, unsigned int const Mode, APT::Configuration::Compressor const &compressor, bool AutoClose)
{
Close();
- d = new FileFdPrivate;
- d->openmode = Mode;
Flags = (AutoClose) ? FileFd::AutoClose : 0;
- iFd = Fd;
+ if (AutoClose == false && (
+#ifdef HAVE_ZLIB
+ compressor.Name == "gzip" ||
+#endif
+#ifdef HAVE_BZ2
+ compressor.Name == "bzip2" ||
+#endif
+ false))
+ {
+ // Need to duplicate fd here or gzclose for cleanup will close the fd as well
+ iFd = dup(Fd);
+ }
+ else
+ iFd = Fd;
this->FileName = "";
if (OpenInternDescriptor(Mode, compressor) == false)
{
@@ -1013,32 +1049,71 @@ bool FileFd::OpenDescriptor(int Fd, unsigned int const Mode, APT::Configuration:
}
bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::Compressor const &compressor)
{
- d->compressor = compressor;
if (compressor.Name == "." || compressor.Binary.empty() == true)
return true;
-#if APT_USE_ZLIB
- else if (compressor.Name == "gzip")
+
+ if (d == NULL)
{
+ d = new FileFdPrivate();
+ d->openmode = Mode;
+ d->compressor = compressor;
+ }
+
+#ifdef HAVE_ZLIB
+ if (compressor.Name == "gzip")
+ {
+ if (d->gz != NULL)
+ {
+ gzclose(d->gz);
+ d->gz = NULL;
+ }
if ((Mode & ReadWrite) == ReadWrite)
d->gz = gzdopen(iFd, "r+");
else if ((Mode & WriteOnly) == WriteOnly)
d->gz = gzdopen(iFd, "w");
else
- d->gz = gzdopen (iFd, "r");
+ d->gz = gzdopen(iFd, "r");
if (d->gz == NULL)
return false;
Flags |= Compressed;
return true;
}
#endif
+#ifdef HAVE_BZ2
+ if (compressor.Name == "bzip2")
+ {
+ if (d->bz2 != NULL)
+ {
+ BZ2_bzclose(d->bz2);
+ d->bz2 = NULL;
+ }
+ if ((Mode & ReadWrite) == ReadWrite)
+ d->bz2 = BZ2_bzdopen(iFd, "r+");
+ else if ((Mode & WriteOnly) == WriteOnly)
+ d->bz2 = BZ2_bzdopen(iFd, "w");
+ else
+ d->bz2 = BZ2_bzdopen(iFd, "r");
+ if (d->bz2 == NULL)
+ return false;
+ Flags |= Compressed;
+ return true;
+ }
+#endif
+
+ // collect zombies here in case we reopen
+ if (d->compressor_pid > 0)
+ ExecWait(d->compressor_pid, "FileFdCompressor", true);
if ((Mode & ReadWrite) == ReadWrite)
+ {
+ Flags |= Fail;
return _error->Error("ReadWrite mode is not supported for file %s", FileName.c_str());
+ }
bool const Comp = (Mode & WriteOnly) == WriteOnly;
- // Handle 'decompression' of empty files
if (Comp == false)
{
+ // Handle 'decompression' of empty files
struct stat Buf;
fstat(iFd, &Buf);
if (Buf.st_size == 0 && S_ISFIFO(Buf.st_mode) == false)
@@ -1047,13 +1122,19 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
// We don't need the file open - instead let the compressor open it
// as he properly knows better how to efficiently read from 'his' file
if (FileName.empty() == false)
+ {
close(iFd);
+ iFd = -1;
+ }
}
// Create a data pipe
int Pipe[2] = {-1,-1};
if (pipe(Pipe) != 0)
+ {
+ Flags |= Fail;
return _error->Errno("pipe",_("Failed to create subprocess IPC"));
+ }
for (int J = 0; J != 2; J++)
SetCloseExec(Pipe[J],true);
@@ -1080,6 +1161,12 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
dup2(d->compressed_fd,STDIN_FILENO);
dup2(Pipe[1],STDOUT_FILENO);
}
+ int const nullfd = open("/dev/null", O_WRONLY);
+ if (nullfd != -1)
+ {
+ dup2(nullfd,STDERR_FILENO);
+ close(nullfd);
+ }
SetCloseExec(STDOUT_FILENO,false);
SetCloseExec(STDIN_FILENO,false);
@@ -1109,8 +1196,6 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
close(Pipe[0]);
else
close(Pipe[1]);
- if (Comp == true || FileName.empty() == true)
- close(d->compressed_fd);
return true;
}
@@ -1122,6 +1207,12 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C
FileFd::~FileFd()
{
Close();
+ if (d != NULL)
+ {
+ d->CloseDown(FileName);
+ delete d;
+ d = NULL;
+ }
}
/*}}}*/
// FileFd::Read - Read a bit of the file /*{{{*/
@@ -1137,9 +1228,14 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
*((char *)To) = '\0';
do
{
-#if APT_USE_ZLIB
- if (d->gz != NULL)
- Res = gzread(d->gz,To,Size);
+#ifdef HAVE_ZLIB
+ if (d != NULL && d->gz != NULL)
+ Res = gzread(d->gz,To,Size);
+ else
+#endif
+#ifdef HAVE_BZ2
+ if (d != NULL && d->bz2 != NULL)
+ Res = BZ2_bzread(d->bz2,To,Size);
else
#endif
Res = read(iFd,To,Size);
@@ -1149,8 +1245,8 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
if (errno == EINTR)
continue;
Flags |= Fail;
-#if APT_USE_ZLIB
- if (d->gz != NULL)
+#ifdef HAVE_ZLIB
+ if (d != NULL && d->gz != NULL)
{
int err;
char const * const errmsg = gzerror(d->gz, &err);
@@ -1158,12 +1254,22 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
return _error->Error("gzread: %s (%d: %s)", _("Read error"), err, errmsg);
}
#endif
+#ifdef HAVE_BZ2
+ if (d != NULL && d->bz2 != NULL)
+ {
+ int err;
+ char const * const errmsg = BZ2_bzerror(d->bz2, &err);
+ if (err != BZ_IO_ERROR)
+ return _error->Error("BZ2_bzread: %s (%d: %s)", _("Read error"), err, errmsg);
+ }
+#endif
return _error->Errno("read",_("Read error"));
}
To = (char *)To + Res;
Size -= Res;
- d->seekpos += Res;
+ if (d != NULL)
+ d->seekpos += Res;
if (Actual != 0)
*Actual += Res;
}
@@ -1190,8 +1296,8 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
char* FileFd::ReadLine(char *To, unsigned long long const Size)
{
*To = '\0';
-#if APT_USE_ZLIB
- if (d->gz != NULL)
+#ifdef HAVE_ZLIB
+ if (d != NULL && d->gz != NULL)
return gzgets(d->gz, To, Size);
#endif
@@ -1221,23 +1327,47 @@ bool FileFd::Write(const void *From,unsigned long long Size)
errno = 0;
do
{
-#if APT_USE_ZLIB
- if (d->gz != NULL)
+#ifdef HAVE_ZLIB
+ if (d != NULL && d->gz != NULL)
Res = gzwrite(d->gz,From,Size);
else
#endif
+#ifdef HAVE_BZ2
+ if (d != NULL && d->bz2 != NULL)
+ Res = BZ2_bzwrite(d->bz2,(void*)From,Size);
+ else
+#endif
Res = write(iFd,From,Size);
if (Res < 0 && errno == EINTR)
continue;
if (Res < 0)
{
Flags |= Fail;
+#ifdef HAVE_ZLIB
+ if (d != NULL && d->gz != NULL)
+ {
+ int err;
+ char const * const errmsg = gzerror(d->gz, &err);
+ if (err != Z_ERRNO)
+ return _error->Error("gzwrite: %s (%d: %s)", _("Write error"), err, errmsg);
+ }
+#endif
+#ifdef HAVE_BZ2
+ if (d != NULL && d->bz2 != NULL)
+ {
+ int err;
+ char const * const errmsg = BZ2_bzerror(d->bz2, &err);
+ if (err != BZ_IO_ERROR)
+ return _error->Error("BZ2_bzwrite: %s (%d: %s)", _("Write error"), err, errmsg);
+ }
+#endif
return _error->Errno("write",_("Write error"));
}
From = (char *)From + Res;
Size -= Res;
- d->seekpos += Res;
+ if (d != NULL)
+ d->seekpos += Res;
}
while (Res > 0 && Size > 0);
@@ -1247,13 +1377,39 @@ bool FileFd::Write(const void *From,unsigned long long Size)
Flags |= Fail;
return _error->Error(_("write, still have %llu to write but couldn't"), Size);
}
+bool FileFd::Write(int Fd, const void *From, unsigned long long Size)
+{
+ int Res;
+ errno = 0;
+ do
+ {
+ Res = write(Fd,From,Size);
+ if (Res < 0 && errno == EINTR)
+ continue;
+ if (Res < 0)
+ return _error->Errno("write",_("Write error"));
+
+ From = (char *)From + Res;
+ Size -= Res;
+ }
+ while (Res > 0 && Size > 0);
+
+ if (Size == 0)
+ return true;
+
+ return _error->Error(_("write, still have %llu to write but couldn't"), Size);
+}
/*}}}*/
// FileFd::Seek - Seek in the file /*{{{*/
// ---------------------------------------------------------------------
/* */
bool FileFd::Seek(unsigned long long To)
{
- if (d->pipe == true)
+ if (d != NULL && (d->pipe == true
+#ifdef HAVE_BZ2
+ || d->bz2 != NULL
+#endif
+ ))
{
// Our poor man seeking in pipes is costly, so try to avoid it
unsigned long long seekpos = Tell();
@@ -1263,9 +1419,17 @@ bool FileFd::Seek(unsigned long long To)
return Skip(To - seekpos);
if ((d->openmode & ReadOnly) != ReadOnly)
+ {
+ Flags |= Fail;
return _error->Error("Reopen is only implemented for read-only files!");
- close(iFd);
- iFd = 0;
+ }
+#ifdef HAVE_BZ2
+ if (d->bz2 != NULL)
+ BZ2_bzclose(d->bz2);
+#endif
+ if (iFd != -1)
+ close(iFd);
+ iFd = -1;
if (TemporaryFileName.empty() == false)
iFd = open(TemporaryFileName.c_str(), O_RDONLY);
else if (FileName.empty() == false)
@@ -1275,12 +1439,18 @@ bool FileFd::Seek(unsigned long long To)
if (d->compressed_fd > 0)
if (lseek(d->compressed_fd, 0, SEEK_SET) != 0)
iFd = d->compressed_fd;
- if (iFd <= 0)
+ if (iFd < 0)
+ {
+ Flags |= Fail;
return _error->Error("Reopen is not implemented for pipes opened with FileFd::OpenDescriptor()!");
+ }
}
if (OpenInternDescriptor(d->openmode, d->compressor) == false)
+ {
+ Flags |= Fail;
return _error->Error("Seek on file %s because it couldn't be reopened", FileName.c_str());
+ }
if (To != 0)
return Skip(To);
@@ -1289,8 +1459,8 @@ bool FileFd::Seek(unsigned long long To)
return true;
}
int res;
-#if APT_USE_ZLIB
- if (d->gz)
+#ifdef HAVE_ZLIB
+ if (d != NULL && d->gz)
res = gzseek(d->gz,To,SEEK_SET);
else
#endif
@@ -1301,7 +1471,8 @@ bool FileFd::Seek(unsigned long long To)
return _error->Error("Unable to seek to %llu", To);
}
- d->seekpos = To;
+ if (d != NULL)
+ d->seekpos = To;
return true;
}
/*}}}*/
@@ -1310,7 +1481,11 @@ bool FileFd::Seek(unsigned long long To)
/* */
bool FileFd::Skip(unsigned long long Over)
{
- if (d->pipe == true)
+ if (d != NULL && (d->pipe == true
+#ifdef HAVE_BZ2
+ || d->bz2 != NULL
+#endif
+ ))
{
d->seekpos += Over;
char buffer[1024];
@@ -1318,15 +1493,18 @@ bool FileFd::Skip(unsigned long long Over)
{
unsigned long long toread = std::min((unsigned long long) sizeof(buffer), Over);
if (Read(buffer, toread) == false)
+ {
+ Flags |= Fail;
return _error->Error("Unable to seek ahead %llu",Over);
+ }
Over -= toread;
}
return true;
}
int res;
-#if APT_USE_ZLIB
- if (d->gz != NULL)
+#ifdef HAVE_ZLIB
+ if (d != NULL && d->gz != NULL)
res = gzseek(d->gz,Over,SEEK_CUR);
else
#endif
@@ -1336,7 +1514,8 @@ bool FileFd::Skip(unsigned long long Over)
Flags |= Fail;
return _error->Error("Unable to seek ahead %llu",Over);
}
- d->seekpos = res;
+ if (d != NULL)
+ d->seekpos = res;
return true;
}
@@ -1346,11 +1525,13 @@ bool FileFd::Skip(unsigned long long Over)
/* */
bool FileFd::Truncate(unsigned long long To)
{
- if (d->gz != NULL)
+#if defined HAVE_ZLIB || defined HAVE_BZ2
+ if (d != NULL && (d->gz != NULL || d->bz2 != NULL))
{
Flags |= Fail;
- return _error->Error("Truncating gzipped files is not implemented (%s)", FileName.c_str());
+ return _error->Error("Truncating compressed files is not implemented (%s)", FileName.c_str());
}
+#endif
if (ftruncate(iFd,To) != 0)
{
Flags |= Fail;
@@ -1369,19 +1550,27 @@ unsigned long long FileFd::Tell()
// seeking around, but not all users of FileFd use always Seek() and co
// so d->seekpos isn't always true and we can just use it as a hint if
// we have nothing else, but not always as an authority…
- if (d->pipe == true)
+ if (d != NULL && (d->pipe == true
+#ifdef HAVE_BZ2
+ || d->bz2 != NULL
+#endif
+ ))
return d->seekpos;
off_t Res;
-#if APT_USE_ZLIB
- if (d->gz != NULL)
+#ifdef HAVE_ZLIB
+ if (d != NULL && d->gz != NULL)
Res = gztell(d->gz);
else
#endif
Res = lseek(iFd,0,SEEK_CUR);
if (Res == (off_t)-1)
+ {
+ Flags |= Fail;
_error->Errno("lseek","Failed to determine the current file position");
- d->seekpos = Res;
+ }
+ if (d != NULL)
+ d->seekpos = Res;
return Res;
}
/*}}}*/
@@ -1391,17 +1580,24 @@ unsigned long long FileFd::Tell()
unsigned long long FileFd::FileSize()
{
struct stat Buf;
- if (d->pipe == false && fstat(iFd,&Buf) != 0)
+ if ((d == NULL || d->pipe == false) && fstat(iFd,&Buf) != 0)
+ {
+ Flags |= Fail;
return _error->Errno("fstat","Unable to determine the file size");
+ }
// for compressor pipes st_size is undefined and at 'best' zero
- if (d->pipe == true || S_ISFIFO(Buf.st_mode))
+ if ((d != NULL && d->pipe == true) || S_ISFIFO(Buf.st_mode))
{
// we set it here, too, as we get the info here for free
// in theory the Open-methods should take care of it already
- d->pipe = true;
+ if (d != NULL)
+ d->pipe = true;
if (stat(FileName.c_str(), &Buf) != 0)
+ {
+ Flags |= Fail;
return _error->Errno("stat","Unable to determine the file size");
+ }
}
return Buf.st_size;
@@ -1416,7 +1612,11 @@ unsigned long long FileFd::Size()
// for compressor pipes st_size is undefined and at 'best' zero,
// so we 'read' the content and 'seek' back - see there
- if (d->pipe == true)
+ if (d != NULL && (d->pipe == true
+#ifdef HAVE_BZ2
+ || (d->bz2 && size > 0)
+#endif
+ ))
{
unsigned long long const oldSeek = Tell();
char ignore[1000];
@@ -1427,11 +1627,11 @@ unsigned long long FileFd::Size()
size = Tell();
Seek(oldSeek);
}
-#if APT_USE_ZLIB
+#ifdef HAVE_ZLIB
// only check gzsize if we are actually a gzip file, just checking for
// "gz" is not sufficient as uncompressed files could be opened with
// gzopen in "direct" mode as well
- else if (d->gz && !gzdirect(d->gz) && size > 0)
+ else if (d != NULL && d->gz && !gzdirect(d->gz) && size > 0)
{
off_t const oldPos = lseek(iFd,0,SEEK_CUR);
/* unfortunately zlib.h doesn't provide a gzsize(), so we have to do
@@ -1439,10 +1639,16 @@ unsigned long long FileFd::Size()
* bits of the file */
// FIXME: Size for gz-files is limited by 32bit… no largefile support
if (lseek(iFd, -4, SEEK_END) < 0)
- return _error->Errno("lseek","Unable to seek to end of gzipped file");
+ {
+ Flags |= Fail;
+ return _error->Errno("lseek","Unable to seek to end of gzipped file");
+ }
size = 0L;
if (read(iFd, &size, 4) != 4)
- return _error->Errno("read","Unable to read original size of gzipped file");
+ {
+ Flags |= Fail;
+ return _error->Errno("read","Unable to read original size of gzipped file");
+ }
#ifdef WORDS_BIGENDIAN
uint32_t tmp_size = size;
@@ -1452,7 +1658,10 @@ unsigned long long FileFd::Size()
#endif
if (lseek(iFd, oldPos, SEEK_SET) < 0)
- return _error->Errno("lseek","Unable to seek in gzipped file");
+ {
+ Flags |= Fail;
+ return _error->Errno("lseek","Unable to seek in gzipped file");
+ }
return size;
}
@@ -1467,20 +1676,23 @@ unsigned long long FileFd::Size()
time_t FileFd::ModificationTime()
{
struct stat Buf;
- if (d->pipe == false && fstat(iFd,&Buf) != 0)
+ if ((d == NULL || d->pipe == false) && fstat(iFd,&Buf) != 0)
{
+ Flags |= Fail;
_error->Errno("fstat","Unable to determine the modification time of file %s", FileName.c_str());
return 0;
}
// for compressor pipes st_size is undefined and at 'best' zero
- if (d->pipe == true || S_ISFIFO(Buf.st_mode))
+ if ((d != NULL && d->pipe == true) || S_ISFIFO(Buf.st_mode))
{
// we set it here, too, as we get the info here for free
// in theory the Open-methods should take care of it already
- d->pipe = true;
+ if (d != NULL)
+ d->pipe = true;
if (stat(FileName.c_str(), &Buf) != 0)
{
+ Flags |= Fail;
_error->Errno("fstat","Unable to determine the modification time of file %s", FileName.c_str());
return 0;
}
@@ -1500,19 +1712,18 @@ bool FileFd::Close()
bool Res = true;
if ((Flags & AutoClose) == AutoClose)
{
-#if APT_USE_ZLIB
- if (d != NULL && d->gz != NULL) {
- int const e = gzclose(d->gz);
- // gzdclose() on empty files always fails with "buffer error" here, ignore that
- if (e != 0 && e != Z_BUF_ERROR)
- Res &= _error->Errno("close",_("Problem closing the gzip file %s"), FileName.c_str());
- } else
-#endif
- if (iFd > 0 && close(iFd) != 0)
- Res &= _error->Errno("close",_("Problem closing the file %s"), FileName.c_str());
+ if ((Flags & Compressed) != Compressed && iFd > 0 && close(iFd) != 0)
+ Res &= _error->Errno("close",_("Problem closing the file %s"), FileName.c_str());
+
+ if (d != NULL)
+ {
+ Res &= d->CloseDown(FileName);
+ delete d;
+ d = NULL;
+ }
}
- if ((Flags & Replace) == Replace && iFd >= 0) {
+ if ((Flags & Replace) == Replace) {
if (rename(TemporaryFileName.c_str(), FileName.c_str()) != 0)
Res &= _error->Errno("rename",_("Problem renaming the file %s to %s"), TemporaryFileName.c_str(), FileName.c_str());
@@ -1527,14 +1738,8 @@ bool FileFd::Close()
if (unlink(FileName.c_str()) != 0)
Res &= _error->WarningE("unlnk",_("Problem unlinking the file %s"), FileName.c_str());
- if (d != NULL)
- {
- if (d->compressor_pid > 0)
- ExecWait(d->compressor_pid, "FileFdCompressor", true);
- delete d;
- d = NULL;
- }
-
+ if (Res == false)
+ Flags |= Fail;
return Res;
}
/*}}}*/
@@ -1543,10 +1748,11 @@ bool FileFd::Close()
/* */
bool FileFd::Sync()
{
-#ifdef _POSIX_SYNCHRONIZED_IO
if (fsync(iFd) != 0)
+ {
+ Flags |= Fail;
return _error->Errno("sync",_("Problem syncing the file"));
-#endif
+ }
return true;
}
/*}}}*/
diff --git a/apt-pkg/contrib/fileutl.h b/apt-pkg/contrib/fileutl.h
index 1ca41cb7d..426664d3a 100644
--- a/apt-pkg/contrib/fileutl.h
+++ b/apt-pkg/contrib/fileutl.h
@@ -78,6 +78,7 @@ class FileFd
bool Read(void *To,unsigned long long Size,unsigned long long *Actual = 0);
char* ReadLine(char *To, unsigned long long const Size);
bool Write(const void *From,unsigned long long Size);
+ bool static Write(int Fd, const void *From, unsigned long long Size);
bool Seek(unsigned long long To);
bool Skip(unsigned long long To);
bool Truncate(unsigned long long To);
diff --git a/apt-pkg/contrib/mmap.cc b/apt-pkg/contrib/mmap.cc
index 160718ea5..a176da636 100644
--- a/apt-pkg/contrib/mmap.cc
+++ b/apt-pkg/contrib/mmap.cc
@@ -83,24 +83,26 @@ bool MMap::Map(FileFd &Fd)
{
if ((Flags & ReadOnly) != ReadOnly)
return _error->Error("Compressed file %s can only be mapped readonly", Fd.Name().c_str());
- Base = new unsigned char[iSize];
+ Base = malloc(iSize);
+ SyncToFd = new FileFd();
if (Fd.Seek(0L) == false || Fd.Read(Base, iSize) == false)
return _error->Error("Compressed file %s can't be read into mmap", Fd.Name().c_str());
return true;
}
// Map it.
- Base = mmap(0,iSize,Prot,Map,Fd.Fd(),0);
+ Base = (Flags & Fallback) ? MAP_FAILED : mmap(0,iSize,Prot,Map,Fd.Fd(),0);
if (Base == (void *)-1)
{
- if (errno == ENODEV || errno == EINVAL)
+ if (errno == ENODEV || errno == EINVAL || (Flags & Fallback))
{
// The filesystem doesn't support this particular kind of mmap.
// So we allocate a buffer and read the whole file into it.
if ((Flags & ReadOnly) == ReadOnly)
{
// for readonly, we don't need sync, so make it simple
- Base = new unsigned char[iSize];
+ Base = malloc(iSize);
+ SyncToFd = new FileFd();
return Fd.Read(Base, iSize);
}
// FIXME: Writing to compressed fd's ?
@@ -108,7 +110,7 @@ bool MMap::Map(FileFd &Fd)
if (dupped_fd == -1)
return _error->Errno("mmap", _("Couldn't duplicate file descriptor %i"), Fd.Fd());
- Base = new unsigned char[iSize];
+ Base = calloc(iSize, 1);
SyncToFd = new FileFd (dupped_fd);
if (!SyncToFd->Seek(0L) || !SyncToFd->Read(Base, iSize))
return false;
@@ -134,7 +136,7 @@ bool MMap::Close(bool DoSync)
if (SyncToFd != NULL)
{
- delete[] (char *)Base;
+ free(Base);
delete SyncToFd;
SyncToFd = NULL;
}
@@ -154,11 +156,10 @@ bool MMap::Close(bool DoSync)
/* This is done in syncronous mode - the docs indicate that this will
not return till all IO is complete */
bool MMap::Sync()
-{
+{
if ((Flags & UnMapped) == UnMapped)
return true;
-
-#ifdef _POSIX_SYNCHRONIZED_IO
+
if ((Flags & ReadOnly) != ReadOnly)
{
if (SyncToFd != NULL)
@@ -168,11 +169,12 @@ bool MMap::Sync()
}
else
{
+#ifdef _POSIX_SYNCHRONIZED_IO
if (msync((char *)Base, iSize, MS_SYNC) < 0)
return _error->Errno("msync", _("Unable to synchronize mmap"));
+#endif
}
}
-#endif
return true;
}
/*}}}*/
@@ -183,9 +185,7 @@ bool MMap::Sync(unsigned long Start,unsigned long Stop)
{
if ((Flags & UnMapped) == UnMapped)
return true;
-
-#ifdef _POSIX_SYNCHRONIZED_IO
- unsigned long long PSize = sysconf(_SC_PAGESIZE);
+
if ((Flags & ReadOnly) != ReadOnly)
{
if (SyncToFd != 0)
@@ -196,11 +196,13 @@ bool MMap::Sync(unsigned long Start,unsigned long Stop)
}
else
{
+#ifdef _POSIX_SYNCHRONIZED_IO
+ unsigned long long const PSize = sysconf(_SC_PAGESIZE);
if (msync((char *)Base+(unsigned long long)(Start/PSize)*PSize,Stop - Start,MS_SYNC) < 0)
return _error->Errno("msync", _("Unable to synchronize mmap"));
+#endif
}
}
-#endif
return true;
}
/*}}}*/
@@ -215,7 +217,17 @@ DynamicMMap::DynamicMMap(FileFd &F,unsigned long Flags,unsigned long const &Work
{
if (_error->PendingError() == true)
return;
-
+
+ // disable Moveable if we don't grow
+ if (Grow == 0)
+ this->Flags &= ~Moveable;
+
+#ifndef __linux__
+ // kfreebsd doesn't have mremap, so we use the fallback
+ if ((this->Flags & Moveable) == Moveable)
+ this->Flags |= Fallback;
+#endif
+
unsigned long long EndOfFile = Fd->Size();
if (EndOfFile > WorkSpace)
WorkSpace = EndOfFile;
@@ -282,8 +294,7 @@ DynamicMMap::DynamicMMap(unsigned long Flags,unsigned long const &WorkSpace,
}
#endif
// fallback to a static allocated space
- Base = new unsigned char[WorkSpace];
- memset(Base,0,WorkSpace);
+ Base = calloc(WorkSpace, 1);
iSize = 0;
}
/*}}}*/
@@ -299,7 +310,7 @@ DynamicMMap::~DynamicMMap()
#ifdef _POSIX_MAPPED_FILES
munmap(Base, WorkSpace);
#else
- delete [] (unsigned char *)Base;
+ free(Base);
#endif
return;
}
@@ -328,7 +339,7 @@ unsigned long DynamicMMap::RawAllocate(unsigned long long Size,unsigned long Aln
if(!Grow())
{
_error->Fatal(_("Dynamic MMap ran out of room. Please increase the size "
- "of APT::Cache-Limit. Current value: %lu. (man 5 apt.conf)"), WorkSpace);
+ "of APT::Cache-Start. Current value: %lu. (man 5 apt.conf)"), WorkSpace);
return 0;
}
}
@@ -463,6 +474,9 @@ bool DynamicMMap::Grow() {
Base = realloc(Base, newSize);
if (Base == NULL)
return false;
+ else
+ /* Set new memory to 0 */
+ memset((char*)Base + WorkSpace, 0, newSize - WorkSpace);
}
Pools =(Pool*) Base + poolOffset;
diff --git a/apt-pkg/contrib/netrc.cc b/apt-pkg/contrib/netrc.cc
index 06059dfc1..0a902f126 100644
--- a/apt-pkg/contrib/netrc.cc
+++ b/apt-pkg/contrib/netrc.cc
@@ -45,11 +45,11 @@ enum {
#define NETRC DOT_CHAR "netrc"
/* returns -1 on failure, 0 if the host is found, 1 is the host isn't found */
-int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL)
+static int parsenetrc_string (char *host, std::string &login, std::string &password, char *netrcfile = NULL)
{
FILE *file;
int retcode = 1;
- int specific_login = (login[0] != 0);
+ int specific_login = (login.empty() == false);
char *home = NULL;
bool netrc_alloc = false;
@@ -69,8 +69,7 @@ int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL)
if (!home)
return -1;
- asprintf (&netrcfile, "%s%s%s", home, DIR_CHAR, NETRC);
- if(!netrcfile)
+ if (asprintf (&netrcfile, "%s%s%s", home, DIR_CHAR, NETRC) == -1 || netrcfile == NULL)
return -1;
else
netrc_alloc = true;
@@ -81,16 +80,17 @@ int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL)
char *tok;
char *tok_buf;
bool done = false;
- char netrcbuffer[256];
+ char *netrcbuffer = NULL;
+ size_t netrcbuffer_size = 0;
int state = NOTHING;
char state_login = 0; /* Found a login keyword */
char state_password = 0; /* Found a password keyword */
- while (!done && fgets(netrcbuffer, sizeof (netrcbuffer), file)) {
+ while (!done && getline(&netrcbuffer, &netrcbuffer_size, file) != -1) {
tok = strtok_r (netrcbuffer, " \t\n", &tok_buf);
while (!done && tok) {
- if(login[0] && password[0]) {
+ if(login.empty() == false && password.empty() == false) {
done = true;
break;
}
@@ -122,23 +122,13 @@ int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL)
/* we are now parsing sub-keywords concerning "our" host */
if (state_login) {
if (specific_login)
- state_our_login = !strcasecmp (login, tok);
+ state_our_login = !strcasecmp (login.c_str(), tok);
else
- {
- if (strlen(tok) > LOGINSIZE)
- _error->Error("login token too long %i (max: %i)",
- strlen(tok), LOGINSIZE);
- strncpy (login, tok, LOGINSIZE - 1);
- }
+ login = tok;
state_login = 0;
} else if (state_password) {
- if (state_our_login || !specific_login)
- {
- if (strlen(tok) > PASSWORDSIZE)
- _error->Error("password token too long %i (max %i)",
- strlen(tok), PASSWORDSIZE);
- strncpy (password, tok, PASSWORDSIZE - 1);
- }
+ if (state_our_login || !specific_login)
+ password = tok;
state_password = 0;
} else if (!strcasecmp ("login", tok))
state_login = 1;
@@ -154,8 +144,9 @@ int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL)
tok = strtok_r (NULL, " \t\n", &tok_buf);
} /* while(tok) */
- } /* while fgets() */
+ } /* while getline() */
+ free(netrcbuffer);
fclose(file);
}
@@ -164,6 +155,18 @@ int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL)
return retcode;
}
+// for some unknown reason this method is exported so keep a compatible interface for now …
+int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL)
+{
+ std::string login_string, password_string;
+ int const ret = parsenetrc_string(host, login_string, password_string, netrcfile);
+ if (ret < 0)
+ return ret;
+ strncpy(login, login_string.c_str(), LOGINSIZE - 1);
+ strncpy(password, password_string.c_str(), PASSWORDSIZE - 1);
+ return ret;
+}
+
void maybe_add_auth (URI &Uri, string NetRCFile)
{
@@ -174,21 +177,20 @@ void maybe_add_auth (URI &Uri, string NetRCFile)
{
if (NetRCFile.empty () == false)
{
- char login[LOGINSIZE] = "";
- char password[PASSWORDSIZE] = "";
+ std::string login, password;
char *netrcfile = strdup(NetRCFile.c_str());
// first check for a generic host based netrc entry
char *host = strdup(Uri.Host.c_str());
- if (host && parsenetrc (host, login, password, netrcfile) == 0)
+ if (host && parsenetrc_string(host, login, password, netrcfile) == 0)
{
if (_config->FindB("Debug::Acquire::netrc", false) == true)
std::clog << "host: " << host
<< " user: " << login
- << " pass-size: " << strlen(password)
+ << " pass-size: " << password.size()
<< std::endl;
- Uri.User = string (login);
- Uri.Password = string (password);
+ Uri.User = login;
+ Uri.Password = password;
free(netrcfile);
free(host);
return;
@@ -199,15 +201,15 @@ void maybe_add_auth (URI &Uri, string NetRCFile)
// a lookup uri.startswith(host) in the netrc file parser (because
// of the "/"
char *hostpath = strdup(string(Uri.Host+Uri.Path).c_str());
- if (hostpath && parsenetrc (hostpath, login, password, netrcfile) == 0)
+ if (hostpath && parsenetrc_string(hostpath, login, password, netrcfile) == 0)
{
if (_config->FindB("Debug::Acquire::netrc", false) == true)
std::clog << "hostpath: " << hostpath
<< " user: " << login
- << " pass-size: " << strlen(password)
+ << " pass-size: " << password.size()
<< std::endl;
- Uri.User = string (login);
- Uri.Password = string (password);
+ Uri.User = login;
+ Uri.Password = password;
}
free(netrcfile);
free(hostpath);
diff --git a/apt-pkg/contrib/netrc.h b/apt-pkg/contrib/netrc.h
index 5931d4a42..6feb5b726 100644
--- a/apt-pkg/contrib/netrc.h
+++ b/apt-pkg/contrib/netrc.h
@@ -25,11 +25,9 @@
class URI;
-// Assume: password[0]=0, host[0] != 0.
-// If login[0] = 0, search for login and password within a machine section
-// in the netrc.
-// If login[0] != 0, search for password within machine and login.
-int parsenetrc (char *host, char *login, char *password, char *filename);
+// kill this export on the next ABI break - strongly doubt its in use anyway
+// outside of the apt itself, its really a internal interface
+__deprecated int parsenetrc (char *host, char *login, char *password, char *filename);
void maybe_add_auth (URI &Uri, std::string NetRCFile);
#endif
diff --git a/apt-pkg/contrib/sha2_internal.cc b/apt-pkg/contrib/sha2_internal.cc
index 6d27e8f2b..83b5a98d3 100644
--- a/apt-pkg/contrib/sha2_internal.cc
+++ b/apt-pkg/contrib/sha2_internal.cc
@@ -552,7 +552,9 @@ void SHA256_Update(SHA256_CTX* context, const sha2_byte *data, size_t len) {
}
while (len >= SHA256_BLOCK_LENGTH) {
/* Process as many complete blocks as we can */
- SHA256_Transform(context, (sha2_word32*)data);
+ sha2_byte buffer[SHA256_BLOCK_LENGTH];
+ MEMCPY_BCOPY(buffer, data, SHA256_BLOCK_LENGTH);
+ SHA256_Transform(context, (sha2_word32*)buffer);
context->bitcount += SHA256_BLOCK_LENGTH << 3;
len -= SHA256_BLOCK_LENGTH;
data += SHA256_BLOCK_LENGTH;
@@ -879,7 +881,9 @@ void SHA512_Update(SHA512_CTX* context, const sha2_byte *data, size_t len) {
}
while (len >= SHA512_BLOCK_LENGTH) {
/* Process as many complete blocks as we can */
- SHA512_Transform(context, (sha2_word64*)data);
+ sha2_byte buffer[SHA512_BLOCK_LENGTH];
+ MEMCPY_BCOPY(buffer, data, SHA512_BLOCK_LENGTH);
+ SHA512_Transform(context, (sha2_word64*)buffer);
ADDINC128(context->bitcount, SHA512_BLOCK_LENGTH << 3);
len -= SHA512_BLOCK_LENGTH;
data += SHA512_BLOCK_LENGTH;
diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc
index 99efa8d98..ca096d736 100644
--- a/apt-pkg/contrib/strutl.cc
+++ b/apt-pkg/contrib/strutl.cc
@@ -23,6 +23,7 @@
#include <ctype.h>
#include <string.h>
+#include <sstream>
#include <stdio.h>
#include <algorithm>
#include <unistd.h>
@@ -1168,34 +1169,50 @@ unsigned long RegexChoice(RxChoiceList *Rxs,const char **ListBegin,
return Hits;
}
/*}}}*/
-// ioprintf - C format string outputter to C++ iostreams /*{{{*/
+// {str,io}printf - C format string outputter to C++ strings/iostreams /*{{{*/
// ---------------------------------------------------------------------
/* This is used to make the internationalization strings easier to translate
and to allow reordering of parameters */
-void ioprintf(ostream &out,const char *format,...)
+static bool iovprintf(ostream &out, const char *format,
+ va_list &args, ssize_t &size) {
+ char *S = (char*)malloc(size);
+ ssize_t const n = vsnprintf(S, size, format, args);
+ if (n > -1 && n < size) {
+ out << S;
+ free(S);
+ return true;
+ } else {
+ if (n > -1)
+ size = n + 1;
+ else
+ size *= 2;
+ }
+ free(S);
+ return false;
+}
+void ioprintf(ostream &out,const char *format,...)
{
va_list args;
- va_start(args,format);
-
- // sprintf the description
- char S[4096];
- vsnprintf(S,sizeof(S),format,args);
- out << S;
+ ssize_t size = 400;
+ while (true) {
+ va_start(args,format);
+ if (iovprintf(out, format, args, size) == true)
+ return;
+ va_end(args);
+ }
}
- /*}}}*/
-// strprintf - C format string outputter to C++ strings /*{{{*/
-// ---------------------------------------------------------------------
-/* This is used to make the internationalization strings easier to translate
- and to allow reordering of parameters */
-void strprintf(string &out,const char *format,...)
+void strprintf(string &out,const char *format,...)
{
va_list args;
- va_start(args,format);
-
- // sprintf the description
- char S[4096];
- vsnprintf(S,sizeof(S),format,args);
- out = string(S);
+ ssize_t size = 400;
+ std::ostringstream outstr;
+ while (true) {
+ va_start(args,format);
+ if (iovprintf(outstr, format, args, size) == true)
+ break;
+ va_end(args);
+ }
+ out = outstr.str();
}
/*}}}*/
// safe_snprintf - Safer snprintf /*{{{*/
diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc
index 5dc2a2ac2..de645bb6e 100644
--- a/apt-pkg/deb/debindexfile.cc
+++ b/apt-pkg/deb/debindexfile.cc
@@ -161,7 +161,7 @@ unsigned long debSourcesIndex::Size() const
/* we need to ignore errors here; if the lists are absent, just return 0 */
_error->PushToStack();
- FileFd f = FileFd (IndexFile("Sources"), FileFd::ReadOnly, FileFd::Extension);
+ FileFd f(IndexFile("Sources"), FileFd::ReadOnly, FileFd::Extension);
if (!f.Failed())
size = f.Size();
@@ -290,7 +290,7 @@ unsigned long debPackagesIndex::Size() const
/* we need to ignore errors here; if the lists are absent, just return 0 */
_error->PushToStack();
- FileFd f = FileFd (IndexFile("Packages"), FileFd::ReadOnly, FileFd::Extension);
+ FileFd f(IndexFile("Packages"), FileFd::ReadOnly, FileFd::Extension);
if (!f.Failed())
size = f.Size();
@@ -488,7 +488,7 @@ unsigned long debTranslationsIndex::Size() const
/* we need to ignore errors here; if the lists are absent, just return 0 */
_error->PushToStack();
- FileFd f = FileFd (IndexFile(Language), FileFd::ReadOnly, FileFd::Extension);
+ FileFd f(IndexFile(Language), FileFd::ReadOnly, FileFd::Extension);
if (!f.Failed())
size = f.Size();
@@ -602,7 +602,8 @@ bool debStatusIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const
pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
CFile->Size = Pkg.FileSize();
CFile->mtime = Pkg.ModificationTime();
- CFile->Archive = Gen.WriteUniqString("now");
+ map_ptrloc const storage = Gen.WriteUniqString("now");
+ CFile->Archive = storage;
if (Gen.MergeList(Parser) == false)
return _error->Error("Problem with MergeList %s",File.c_str());
diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc
index 84e6c38c5..b84bd6fdd 100644
--- a/apt-pkg/deb/deblistparser.cc
+++ b/apt-pkg/deb/deblistparser.cc
@@ -15,6 +15,7 @@
#include <apt-pkg/deblistparser.h>
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/cachefilter.h>
#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
@@ -22,7 +23,6 @@
#include <apt-pkg/md5.h>
#include <apt-pkg/macros.h>
-#include <fnmatch.h>
#include <ctype.h>
/*}}}*/
@@ -215,15 +215,22 @@ string debListParser::DescriptionLanguage()
*/
MD5SumValue debListParser::Description_md5()
{
- string value = Section.FindS("Description-md5");
-
- if (value.empty())
+ string const value = Section.FindS("Description-md5");
+ if (value.empty() == true)
{
MD5Summation md5;
md5.Add((Description() + "\n").c_str());
return md5.Result();
- } else
- return MD5SumValue(value);
+ }
+ else if (likely(value.size() == 32))
+ {
+ if (likely(value.find_first_not_of("0123456789abcdefABCDEF") == string::npos))
+ return MD5SumValue(value);
+ _error->Error("Malformed Description-md5 line; includes invalid character '%s'", value.c_str());
+ return MD5SumValue();
+ }
+ _error->Error("Malformed Description-md5 line; doesn't have the required length (32 != %d) '%s'", (int)value.size(), value.c_str());
+ return MD5SumValue();
}
/*}}}*/
// ListParser::UsePackage - Update a package structure /*{{{*/
@@ -236,21 +243,26 @@ bool debListParser::UsePackage(pkgCache::PkgIterator &Pkg,
if (Pkg->Section == 0)
Pkg->Section = UniqFindTagWrite("Section");
- // Packages which are not from the "native" arch doesn't get the essential flag
- // in the default "native" mode - it is also possible to mark "all" or "none".
- // The "installed" mode is handled by ParseStatus(), See #544481 and friends.
string const static myArch = _config->Find("APT::Architecture");
- string const static essential = _config->Find("pkgCacheGen::Essential", "native");
- if ((essential == "native" && Pkg->Arch != 0 && myArch == Pkg.Arch()) ||
- essential == "all")
+ // Possible values are: "all", "native", "installed" and "none"
+ // The "installed" mode is handled by ParseStatus(), See #544481 and friends.
+ string const static essential = _config->Find("pkgCacheGen::Essential", "all");
+ if (essential == "all" ||
+ (essential == "native" && Pkg->Arch != 0 && myArch == Pkg.Arch()))
if (Section.FindFlag("Essential",Pkg->Flags,pkgCache::Flag::Essential) == false)
return false;
if (Section.FindFlag("Important",Pkg->Flags,pkgCache::Flag::Important) == false)
return false;
if (strcmp(Pkg.Name(),"apt") == 0)
- Pkg->Flags |= pkgCache::Flag::Essential | pkgCache::Flag::Important;
-
+ {
+ if ((essential == "native" && Pkg->Arch != 0 && myArch == Pkg.Arch()) ||
+ essential == "all")
+ Pkg->Flags |= pkgCache::Flag::Essential | pkgCache::Flag::Important;
+ else
+ Pkg->Flags |= pkgCache::Flag::Important;
+ }
+
if (ParseStatus(Pkg,Ver) == false)
return false;
return true;
@@ -452,22 +464,6 @@ const char *debListParser::ConvertRelation(const char *I,unsigned int &Op)
}
return I;
}
-
-/*
- * CompleteArch:
- *
- * The complete architecture, consisting of <kernel>-<cpu>.
- */
-static string CompleteArch(std::string const &arch) {
- if (arch == "armel") return "linux-arm";
- if (arch == "armhf") return "linux-arm";
- if (arch == "lpia") return "linux-i386";
- if (arch == "powerpcspe") return "linux-powerpc";
- if (arch == "uclibc-linux-armel") return "linux-arm";
- if (arch == "uclinux-armel") return "uclinux-arm";
-
- return (arch.find("-") != string::npos) ? arch : "linux-" + arch;
-}
/*}}}*/
// ListParser::ParseDepends - Parse a dependency element /*{{{*/
// ---------------------------------------------------------------------
@@ -544,58 +540,59 @@ const char *debListParser::ParseDepends(const char *Start,const char *Stop,
if (ParseArchFlags == true)
{
- string completeArch = CompleteArch(arch);
+ APT::CacheFilter::PackageArchitectureMatchesSpecification matchesArch(arch, false);
// Parse an architecture
if (I != Stop && *I == '[')
{
+ ++I;
// malformed
- I++;
- if (I == Stop)
- return 0;
-
- const char *End = I;
- bool Found = false;
- bool NegArch = false;
- while (I != Stop)
+ if (unlikely(I == Stop))
+ return 0;
+
+ const char *End = I;
+ bool Found = false;
+ bool NegArch = false;
+ while (I != Stop)
{
- // look for whitespace or ending ']'
- while (End != Stop && !isspace(*End) && *End != ']')
- End++;
-
- if (End == Stop)
+ // look for whitespace or ending ']'
+ for (;End != Stop && !isspace(*End) && *End != ']'; ++End);
+
+ if (unlikely(End == Stop))
return 0;
if (*I == '!')
- {
+ {
NegArch = true;
- I++;
- }
+ ++I;
+ }
- if (stringcmp(arch,I,End) == 0) {
+ std::string arch(I, End);
+ if (arch.empty() == false && matchesArch(arch.c_str()) == true)
+ {
Found = true;
- } else {
- std::string wildcard = SubstVar(string(I, End), "any", "*");
- if (fnmatch(wildcard.c_str(), completeArch.c_str(), 0) == 0)
- Found = true;
+ if (I[-1] != '!')
+ NegArch = false;
+ // we found a match, so fast-forward to the end of the wildcards
+ for (; End != Stop && *End != ']'; ++End);
}
-
+
if (*End++ == ']') {
I = End;
break;
}
-
+
I = End;
for (;I != Stop && isspace(*I) != 0; I++);
- }
+ }
- if (NegArch)
+ if (NegArch == true)
Found = !Found;
-
- if (Found == false)
+
+ if (Found == false)
Package = ""; /* not for this arch */
}
-
+
// Skip whitespace
for (;I != Stop && isspace(*I) != 0; I++);
}
@@ -625,18 +622,21 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver,
if (Section.Find(Tag,Start,Stop) == false)
return true;
- string Package;
string const pkgArch = Ver.Arch();
- string Version;
- unsigned int Op;
while (1)
{
+ string Package;
+ string Version;
+ unsigned int Op;
+
Start = ParseDepends(Start,Stop,Package,Version,Op,false,!MultiArchEnabled);
if (Start == 0)
return _error->Error("Problem parsing dependency %s",Tag);
+ size_t const found = Package.rfind(':');
- if (MultiArchEnabled == true &&
+ // If negative is unspecific it needs to apply on all architectures
+ if (MultiArchEnabled == true && found == string::npos &&
(Type == pkgCache::Dep::Conflicts ||
Type == pkgCache::Dep::DpkgBreaks ||
Type == pkgCache::Dep::Replaces))
@@ -645,9 +645,33 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver,
a != Architectures.end(); ++a)
if (NewDepends(Ver,Package,*a,Version,Op,Type) == false)
return false;
+ if (NewDepends(Ver,Package,"none",Version,Op,Type) == false)
+ return false;
+ }
+ else if (MultiArchEnabled == true && found != string::npos &&
+ strcmp(Package.c_str() + found, ":any") != 0)
+ {
+ string Arch = Package.substr(found+1, string::npos);
+ Package = Package.substr(0, found);
+ // Such dependencies are not supposed to be accepted …
+ // … but this is probably the best thing to do.
+ if (Arch == "native")
+ Arch = _config->Find("APT::Architecture");
+ if (NewDepends(Ver,Package,Arch,Version,Op,Type) == false)
+ return false;
+ }
+ else
+ {
+ if (NewDepends(Ver,Package,pkgArch,Version,Op,Type) == false)
+ return false;
+ if ((Type == pkgCache::Dep::Conflicts ||
+ Type == pkgCache::Dep::DpkgBreaks ||
+ Type == pkgCache::Dep::Replaces) &&
+ NewDepends(Ver, Package,
+ (pkgArch != "none") ? "none" : _config->Find("APT::Architecture"),
+ Version,Op,Type) == false)
+ return false;
}
- else if (NewDepends(Ver,Package,pkgArch,Version,Op,Type) == false)
- return false;
if (Start == Stop)
break;
}
@@ -741,13 +765,15 @@ bool debListParser::Step()
drop the whole section. A missing arch tag only happens (in theory)
inside the Status file, so that is a positive return */
string const Architecture = Section.FindS("Architecture");
- if (Architecture.empty() == true)
- return true;
if (Arch.empty() == true || Arch == "any" || MultiArchEnabled == false)
{
if (APT::Configuration::checkArchitecture(Architecture) == true)
return true;
+ /* parse version stanzas without an architecture only in the status file
+ (and as misfortune bycatch flat-archives) */
+ if ((Arch.empty() == true || Arch == "any") && Architecture.empty() == true)
+ return true;
}
else
{
@@ -771,7 +797,8 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,
{
// apt-secure does no longer download individual (per-section) Release
// file. to provide Component pinning we use the section name now
- FileI->Component = WriteUniqString(component);
+ map_ptrloc const storage = WriteUniqString(component);
+ FileI->Component = storage;
// FIXME: Code depends on the fact that Release files aren't compressed
FILE* release = fdopen(dup(File.Fd()), "r");
@@ -858,13 +885,14 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,
break;
*s = '\0';
}
+ map_ptrloc const storage = WriteUniqString(data);
switch (writeTo) {
- case Suite: FileI->Archive = WriteUniqString(data); break;
- case Component: FileI->Component = WriteUniqString(data); break;
- case Version: FileI->Version = WriteUniqString(data); break;
- case Origin: FileI->Origin = WriteUniqString(data); break;
- case Codename: FileI->Codename = WriteUniqString(data); break;
- case Label: FileI->Label = WriteUniqString(data); break;
+ case Suite: FileI->Archive = storage; break;
+ case Component: FileI->Component = storage; break;
+ case Version: FileI->Version = storage; break;
+ case Origin: FileI->Origin = storage; break;
+ case Codename: FileI->Codename = storage; break;
+ case Label: FileI->Label = storage; break;
case None: break;
}
}
diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc
index be11870fd..6cb8bc6b6 100644
--- a/apt-pkg/deb/dpkgpm.cc
+++ b/apt-pkg/deb/dpkgpm.cc
@@ -187,7 +187,7 @@ pkgDPkgPM::~pkgDPkgPM()
bool pkgDPkgPM::Install(PkgIterator Pkg,string File)
{
if (File.empty() == true || Pkg.end() == true)
- return _error->Error("Internal Error, No file name for %s",Pkg.Name());
+ return _error->Error("Internal Error, No file name for %s",Pkg.FullName().c_str());
// If the filename string begins with DPkg::Chroot-Directory, return the
// substr that is within the chroot so dpkg can access it.
@@ -425,7 +425,7 @@ void pkgDPkgPM::DoStdin(int master)
unsigned char input_buf[256] = {0,};
ssize_t len = read(0, input_buf, sizeof(input_buf));
if (len)
- write(master, input_buf, len);
+ FileFd::Write(master, input_buf, len);
else
d->stdin_is_dev_null = true;
}
@@ -451,7 +451,7 @@ void pkgDPkgPM::DoTerminalPty(int master)
}
if(len <= 0)
return;
- write(1, term_buf, len);
+ FileFd::Write(1, term_buf, len);
if(d->term_out)
fwrite(term_buf, len, sizeof(char), d->term_out);
}
@@ -526,7 +526,7 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line)
<< ":" << s
<< endl;
if(OutStatusFd > 0)
- write(OutStatusFd, status.str().c_str(), status.str().size());
+ FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size());
if (Debug == true)
std::clog << "send: '" << status.str() << "'" << endl;
@@ -550,7 +550,7 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line)
<< ":" << list[3]
<< endl;
if(OutStatusFd > 0)
- write(OutStatusFd, status.str().c_str(), status.str().size());
+ FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size());
if (Debug == true)
std::clog << "send: '" << status.str() << "'" << endl;
pkgFailures++;
@@ -564,7 +564,7 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line)
<< ":" << list[3]
<< endl;
if(OutStatusFd > 0)
- write(OutStatusFd, status.str().c_str(), status.str().size());
+ FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size());
if (Debug == true)
std::clog << "send: '" << status.str() << "'" << endl;
return;
@@ -592,7 +592,7 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line)
<< ":" << s
<< endl;
if(OutStatusFd > 0)
- write(OutStatusFd, status.str().c_str(), status.str().size());
+ FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size());
if (Debug == true)
std::clog << "send: '" << status.str() << "'" << endl;
}
@@ -726,7 +726,7 @@ bool pkgDPkgPM::OpenLog()
gr = getgrnam("adm");
if (pw != NULL && gr != NULL)
chown(logfile_name.c_str(), pw->pw_uid, gr->gr_gid);
- chmod(logfile_name.c_str(), 0644);
+ chmod(logfile_name.c_str(), 0640);
fprintf(d->term_out, "\nLog started: %s\n", timestr);
}
@@ -738,6 +738,7 @@ bool pkgDPkgPM::OpenLog()
d->history_out = fopen(history_name.c_str(),"a");
if (d->history_out == NULL)
return _error->WarningE("OpenLog", _("Could not open file '%s'"), history_name.c_str());
+ SetCloseExec(fileno(d->history_out), true);
chmod(history_name.c_str(), 0644);
fprintf(d->history_out, "\nStart-Date: %s\n", timestr);
string remove, purge, install, reinstall, upgrade, downgrade;
@@ -1055,7 +1056,8 @@ bool pkgDPkgPM::Go(int OutStatusFd)
}
int fd[2];
- pipe(fd);
+ if (pipe(fd) != 0)
+ return _error->Errno("pipe","Failed to create IPC pipe to dpkg");
#define ADDARG(X) Args.push_back(X); Size += strlen(X)
#define ADDARGC(X) Args.push_back(X); Size += sizeof(X) - 1
@@ -1129,7 +1131,9 @@ bool pkgDPkgPM::Go(int OutStatusFd)
if (I->Op == Item::Configure && disappearedPkgs.find(I->Pkg.Name()) != disappearedPkgs.end())
continue;
// We keep this here to allow "smooth" transitions from e.g. multiarch dpkg/ubuntu to dpkg/debian
- if (dpkgMultiArch == false && (I->Pkg.Arch() == nativeArch || !strcmp(I->Pkg.Arch(), "all")))
+ if (dpkgMultiArch == false && (I->Pkg.Arch() == nativeArch ||
+ strcmp(I->Pkg.Arch(), "all") == 0 ||
+ strcmp(I->Pkg.Arch(), "none") == 0))
{
char const * const name = I->Pkg.Name();
ADDARG(name);
@@ -1146,7 +1150,9 @@ bool pkgDPkgPM::Go(int OutStatusFd)
}
else
PkgVer = Cache[I->Pkg].InstVerIter(Cache);
- if (PkgVer.end() == false)
+ if (strcmp(I->Pkg.Arch(), "none") == 0)
+ ; // never arch-qualify a package without an arch
+ else if (PkgVer.end() == false)
name.append(":").append(PkgVer.Arch());
else
_error->Warning("Can not find PkgVer for '%s'", name.c_str());
@@ -1236,7 +1242,7 @@ bool pkgDPkgPM::Go(int OutStatusFd)
<< (PackagesDone/float(PackagesTotal)*100.0)
<< ":" << _("Running dpkg")
<< endl;
- write(OutStatusFd, status.str().c_str(), status.str().size());
+ FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size());
}
Child = ExecFork();
diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc
index 1eea55560..2ec346f0b 100644
--- a/apt-pkg/depcache.cc
+++ b/apt-pkg/depcache.cc
@@ -346,7 +346,7 @@ bool pkgDepCache::CheckDep(DepIterator Dep,int Type,PkgIterator &Res)
/* Check simple depends. A depends -should- never self match but
we allow it anyhow because dpkg does. Technically it is a packaging
bug. Conflicts may never self match */
- if (Dep.TargetPkg() != Dep.ParentPkg() || Dep.IsNegative() == false)
+ if (Dep.IsIgnorable(Res) == false)
{
PkgIterator Pkg = Dep.TargetPkg();
// Check the base package
@@ -913,11 +913,15 @@ bool pkgDepCache::IsModeChangeOk(ModeList const mode, PkgIterator const &Pkg,
return true;
StateCache &P = PkgState[Pkg->ID];
+ // not changing the mode is obviously also fine as we might want to call
+ // e.g. MarkInstall multiple times with different arguments for the same package
+ if (P.Mode == mode)
+ return true;
// if previous state was set by user only user can reset it
if ((P.iFlags & Protected) == Protected)
{
- if (unlikely(DebugMarker == true) && P.Mode != mode)
+ if (unlikely(DebugMarker == true))
std::clog << OutputInDepth(Depth) << "Ignore Mark" << PrintMode(mode)
<< " of " << Pkg << " as its mode (" << PrintMode(P.Mode)
<< ") is protected" << std::endl;
@@ -927,7 +931,7 @@ bool pkgDepCache::IsModeChangeOk(ModeList const mode, PkgIterator const &Pkg,
else if (mode != ModeKeep && Pkg->SelectedState == pkgCache::State::Hold &&
_config->FindB("APT::Ignore-Hold",false) == false)
{
- if (unlikely(DebugMarker == true) && P.Mode != mode)
+ if (unlikely(DebugMarker == true))
std::clog << OutputInDepth(Depth) << "Hold prevents Mark" << PrintMode(mode)
<< " of " << Pkg << std::endl;
return false;
@@ -1152,9 +1156,8 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
}
/* This bit is for processing the possibilty of an install/upgrade
- fixing the problem */
- if (Start->Type != Dep::DpkgBreaks &&
- (DepState[Start->ID] & DepCVer) == DepCVer)
+ fixing the problem for "positive" dependencies */
+ if (Start.IsNegative() == false && (DepState[Start->ID] & DepCVer) == DepCVer)
{
APT::VersionList verlist;
pkgCache::VerIterator Cand = PkgState[Start.TargetPkg()->ID].CandidateVerIter(*this);
@@ -1165,7 +1168,7 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
pkgCache::VerIterator V = Prv.OwnerVer();
pkgCache::VerIterator Cand = PkgState[Prv.OwnerPkg()->ID].CandidateVerIter(*this);
if (Cand.end() == true || V != Cand ||
- VS().CheckDep(Cand.VerStr(), Start->CompareOp, Start.TargetVer()) == false)
+ VS().CheckDep(Prv.ProvideVersion(), Start->CompareOp, Start.TargetVer()) == false)
continue;
verlist.insert(Cand);
}
@@ -1179,32 +1182,25 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
std::clog << OutputInDepth(Depth) << "Installing " << InstPkg.Name()
<< " as " << Start.DepType() << " of " << Pkg.Name()
<< std::endl;
- // now check if we should consider it a automatic dependency or not
- if(Pkg.Section() && ConfigValueInSubTree("APT::Never-MarkAuto-Sections", Pkg.Section()))
- {
+ MarkInstall(InstPkg, true, Depth + 1, false, ForceImportantDeps);
+ // now check if we should consider it a automatic dependency or not
+ if(InstPkg->CurrentVer == 0 && Pkg->Section != 0 && ConfigValueInSubTree("APT::Never-MarkAuto-Sections", Pkg.Section()))
+ {
if(DebugAutoInstall == true)
std::clog << OutputInDepth(Depth) << "Setting NOT as auto-installed (direct "
<< Start.DepType() << " of pkg in APT::Never-MarkAuto-Sections)" << std::endl;
- MarkInstall(InstPkg,true,Depth + 1, true);
- }
- else
- {
- // mark automatic dependency
- MarkInstall(InstPkg,true,Depth + 1, false, ForceImportantDeps);
- // Set the autoflag, after MarkInstall because MarkInstall unsets it
- if (InstPkg->CurrentVer == 0)
- PkgState[InstPkg->ID].Flags |= Flag::Auto;
- }
+ MarkAuto(InstPkg, false);
+ }
}
continue;
}
-
- /* For conflicts we just de-install the package and mark as auto,
- Conflicts may not have or groups. For dpkg's Breaks we try to
- upgrade the package. */
- if (Start.IsNegative() == true)
+ /* Negative dependencies have no or-group
+ If the dependency isn't versioned, we try if an upgrade might solve the problem.
+ Otherwise we remove the offender if needed */
+ else if (Start.IsNegative() == true && Start->Type != pkgCache::Dep::Obsoletes)
{
SPtrArray<Version *> List = Start.AllTargets();
+ pkgCache::PkgIterator TrgPkg = Start.TargetPkg();
for (Version **I = List; *I != 0; I++)
{
VerIterator Ver(*this,*I);
@@ -1215,15 +1211,17 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
if (PkgState[Pkg->ID].InstallVer == 0)
continue;
- if (PkgState[Pkg->ID].CandidateVer != *I &&
- Start->Type == Dep::DpkgBreaks &&
+ if ((Start->Version != 0 || TrgPkg != Pkg) &&
+ PkgState[Pkg->ID].CandidateVer != PkgState[Pkg->ID].InstallVer &&
+ PkgState[Pkg->ID].CandidateVer != *I &&
MarkInstall(Pkg,true,Depth + 1, false, ForceImportantDeps) == true)
continue;
- else if (MarkDelete(Pkg,false,Depth + 1, false) == false)
+ else if ((Start->Type == pkgCache::Dep::Conflicts || Start->Type == pkgCache::Dep::DpkgBreaks) &&
+ MarkDelete(Pkg,false,Depth + 1, false) == false)
break;
}
continue;
- }
+ }
}
return Dep.end() == true;
diff --git a/apt-pkg/edsp.cc b/apt-pkg/edsp.cc
index 791aac72f..6ce9da784 100644
--- a/apt-pkg/edsp.cc
+++ b/apt-pkg/edsp.cc
@@ -118,8 +118,7 @@ void EDSP::WriteScenarioDependency(pkgDepCache &Cache, FILE* output, pkgCache::P
bool orGroup = false;
for (pkgCache::DepIterator Dep = Ver.DependsList(); Dep.end() == false; ++Dep)
{
- // Ignore implicit dependencies for multiarch here
- if (strcmp(Pkg.Arch(), Dep.TargetPkg().Arch()) != 0)
+ if (Dep.IsMultiArchImplicit() == true)
continue;
if (orGroup == false)
dependencies[Dep->Type].append(", ");
@@ -140,8 +139,7 @@ void EDSP::WriteScenarioDependency(pkgDepCache &Cache, FILE* output, pkgCache::P
string provides;
for (pkgCache::PrvIterator Prv = Ver.ProvidesList(); Prv.end() == false; ++Prv)
{
- // Ignore implicit provides for multiarch here
- if (strcmp(Pkg.Arch(), Prv.ParentPkg().Arch()) != 0 || strcmp(Pkg.Name(),Prv.Name()) == 0)
+ if (Prv.IsMultiArchImplicit() == true)
continue;
provides.append(", ").append(Prv.Name());
}
@@ -159,8 +157,7 @@ void EDSP::WriteScenarioLimitedDependency(pkgDepCache &Cache, FILE* output,
bool orGroup = false;
for (pkgCache::DepIterator Dep = Ver.DependsList(); Dep.end() == false; ++Dep)
{
- // Ignore implicit dependencies for multiarch here
- if (strcmp(Pkg.Arch(), Dep.TargetPkg().Arch()) != 0)
+ if (Dep.IsMultiArchImplicit() == true)
continue;
if (orGroup == false)
{
@@ -193,8 +190,7 @@ void EDSP::WriteScenarioLimitedDependency(pkgDepCache &Cache, FILE* output,
string provides;
for (pkgCache::PrvIterator Prv = Ver.ProvidesList(); Prv.end() == false; ++Prv)
{
- // Ignore implicit provides for multiarch here
- if (strcmp(Pkg.Arch(), Prv.ParentPkg().Arch()) != 0 || strcmp(Pkg.Name(),Prv.Name()) == 0)
+ if (Prv.IsMultiArchImplicit() == true)
continue;
if (pkgset.find(Prv.ParentPkg()) == pkgset.end())
continue;
@@ -218,9 +214,11 @@ bool EDSP::WriteRequest(pkgDepCache &Cache, FILE* output, bool const Upgrade,
if (Progress != NULL && p % 100 == 0)
Progress->Progress(p);
string* req;
- if (Cache[Pkg].Delete() == true)
+ pkgDepCache::StateCache &P = Cache[Pkg];
+ if (P.Delete() == true)
req = &del;
- else if (Cache[Pkg].NewInstall() == true || Cache[Pkg].Upgrade() == true)
+ else if (P.NewInstall() == true || P.Upgrade() == true || P.ReInstall() == true ||
+ (P.Mode == pkgDepCache::ModeKeep && (P.iFlags & pkgDepCache::Protected) == pkgDepCache::Protected))
req = &inst;
else
continue;
diff --git a/apt-pkg/edsp/edspindexfile.cc b/apt-pkg/edsp/edspindexfile.cc
index 482581979..98ce4497a 100644
--- a/apt-pkg/edsp/edspindexfile.cc
+++ b/apt-pkg/edsp/edspindexfile.cc
@@ -51,7 +51,8 @@ bool edspIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const
pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
CFile->Size = Pkg.FileSize();
CFile->mtime = Pkg.ModificationTime();
- CFile->Archive = Gen.WriteUniqString("edsp::scenario");
+ map_ptrloc const storage = Gen.WriteUniqString("edsp::scenario");
+ CFile->Archive = storage;
if (Gen.MergeList(Parser) == false)
return _error->Error("Problem with MergeList %s",File.c_str());
diff --git a/apt-pkg/edsp/edspsystem.cc b/apt-pkg/edsp/edspsystem.cc
index 6b9207451..aae969d9d 100644
--- a/apt-pkg/edsp/edspsystem.cc
+++ b/apt-pkg/edsp/edspsystem.cc
@@ -91,7 +91,7 @@ signed edspSystem::Score(Configuration const &Cnf)
{
if (Cnf.Find("edsp::scenario", "") == "stdin")
return 1000;
- if (FileExists(Cnf.FindFile("edsp::scenario","")) == true)
+ if (RealFileExists(Cnf.FindFile("edsp::scenario","")) == true)
return 1000;
return -1000;
}
diff --git a/apt-pkg/indexcopy.cc b/apt-pkg/indexcopy.cc
index e29e2819c..aa1f01a4a 100644
--- a/apt-pkg/indexcopy.cc
+++ b/apt-pkg/indexcopy.cc
@@ -350,9 +350,6 @@ bool IndexCopy::ReconstructChop(unsigned long &Chop,string Dir,string File)
*/
void IndexCopy::ConvertToSourceList(string CD,string &Path)
{
- char S[300];
- snprintf(S,sizeof(S),"binary-%s",_config->Find("Apt::Architecture").c_str());
-
// Strip the cdrom base path
Path = string(Path,CD.length());
if (Path.empty() == true)
@@ -388,7 +385,13 @@ void IndexCopy::ConvertToSourceList(string CD,string &Path)
return;
string Binary = string(Path,Slash+1,BinSlash - Slash-1);
- if (Binary != S && Binary != "source")
+ if (strncmp(Binary.c_str(), "binary-", strlen("binary-")) == 0)
+ {
+ Binary.erase(0, strlen("binary-"));
+ if (APT::Configuration::checkArchitecture(Binary) == false)
+ continue;
+ }
+ else if (Binary != "source")
continue;
Path = Dist + ' ' + Comp;
@@ -494,17 +497,20 @@ bool SourceCopy::RewriteEntry(FILE *Target,string File)
bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex)
{
const indexRecords::checkSum *Record = MetaIndex->Lookup(file);
+ bool const Debug = _config->FindB("Debug::aptcdrom",false);
- // we skip non-existing files in the verifcation to support a cdrom
- // with no Packages file (just a Package.gz), see LP: #255545
- // (non-existing files are not considered a error)
+ // we skip non-existing files in the verifcation of the Release file
+ // as non-existing files do not harm, but a warning scares people and
+ // makes it hard to strip unneeded files from an ISO like uncompressed
+ // indexes as it is done on the mirrors (see also LP: #255545 )
if(!RealFileExists(prefix+file))
{
- _error->Warning(_("Skipping nonexistent file %s"), string(prefix+file).c_str());
+ if (Debug == true)
+ cout << "Skipping nonexistent in " << prefix << " file " << file << std::endl;
return true;
}
- if (!Record)
+ if (!Record)
{
_error->Warning(_("Can't find authentication record for: %s"), file.c_str());
return false;
@@ -516,7 +522,7 @@ bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex)
return false;
}
- if(_config->FindB("Debug::aptcdrom",false))
+ if(Debug == true)
{
cout << "File: " << prefix+file << endl;
cout << "Expected Hash " << Record->Hash.toStr() << endl;
@@ -810,9 +816,14 @@ bool TranslationsCopy::CopyTranslations(string CDROM,string Name, /*{{{*/
(*I).c_str() + CDROM.length());
string TargetF = _config->FindDir("Dir::State::lists") + "partial/";
TargetF += URItoFileName(S);
+ FileFd Target;
if (_config->FindB("APT::CDROM::NoAct",false) == true)
+ {
TargetF = "/dev/null";
- FileFd Target(TargetF,FileFd::WriteAtomic);
+ Target.Open(TargetF,FileFd::WriteExists);
+ } else {
+ Target.Open(TargetF,FileFd::WriteAtomic);
+ }
FILE *TargetFl = fdopen(dup(Target.Fd()),"w");
if (_error->PendingError() == true)
return false;
diff --git a/apt-pkg/init.cc b/apt-pkg/init.cc
index a1c47c030..76278921f 100644
--- a/apt-pkg/init.cc
+++ b/apt-pkg/init.cc
@@ -24,7 +24,7 @@
#define Stringfy_(x) # x
#define Stringfy(x) Stringfy_(x)
-const char *pkgVersion = VERSION;
+const char *pkgVersion = PACKAGE_VERSION;
const char *pkgLibVersion = Stringfy(APT_PKG_MAJOR) "."
Stringfy(APT_PKG_MINOR) "."
Stringfy(APT_PKG_RELEASE);
diff --git a/apt-pkg/makefile b/apt-pkg/makefile
index e1f69dd65..27d7ead24 100644
--- a/apt-pkg/makefile
+++ b/apt-pkg/makefile
@@ -14,7 +14,13 @@ include ../buildlib/libversion.mak
LIBRARY=apt-pkg
MAJOR=$(LIBAPTPKG_MAJOR)
MINOR=$(LIBAPTPKG_RELEASE)
-SLIBS=$(PTHREADLIB) $(INTLLIBS) -lutil -ldl -lz
+SLIBS=$(PTHREADLIB) $(INTLLIBS) -lutil -ldl
+ifeq ($(HAVE_ZLIB),yes)
+SLIBS+= -lz
+endif
+ifeq ($(HAVE_BZ2),yes)
+SLIBS+= -lbz2
+endif
APT_DOMAIN:=libapt-pkg$(LIBAPTPKG_MAJOR)
# Source code for the contributed non-core things
diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc
index dd8f306f2..e2d7dbf2a 100644
--- a/apt-pkg/packagemanager.cc
+++ b/apt-pkg/packagemanager.cc
@@ -25,9 +25,10 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/sptr.h>
-#include <apti18n.h>
#include <iostream>
#include <fcntl.h>
+
+#include <apti18n.h>
/*}}}*/
using namespace std;
@@ -337,7 +338,7 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth)
however if there is a loop (A depends on B, B depends on A) this will not
be the case, so check for dependencies before configuring. */
bool Bad = false, Changed = false;
- const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 100);
+ const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 500);
unsigned int i=0;
do
{
@@ -491,6 +492,7 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth)
P.end() == false; P = Pkg.Group().NextPkg(P))
{
if (Pkg == P || List->IsFlag(P,pkgOrderList::Configured) == true ||
+ List->IsFlag(P,pkgOrderList::UnPacked) == false ||
Cache[P].InstallVer == 0 || (P.CurrentVer() == Cache[P].InstallVer &&
(Cache[Pkg].iFlags & pkgDepCache::ReInstall) != pkgDepCache::ReInstall))
continue;
@@ -601,8 +603,8 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
This will be either dealt with if the package is configured as a dependency of Pkg (if and when Pkg is configured),
or by the ConfigureAll call at the end of the for loop in OrderInstall. */
bool Changed = false;
- const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 100);
- unsigned int i;
+ const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 500);
+ unsigned int i = 0;
do
{
Changed = false;
@@ -621,7 +623,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
// Look for easy targets: packages that are already okay
for (DepIterator Cur = Start; Bad == true; ++Cur)
{
- SPtrArray<Version *> VList = Start.AllTargets();
+ SPtrArray<Version *> VList = Cur.AllTargets();
for (Version **I = VList; *I != 0; ++I)
{
VerIterator Ver(Cache,*I);
@@ -642,9 +644,9 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
}
// Look for something that could be configured.
- for (DepIterator Cur = Start; Bad == true; ++Cur)
+ for (DepIterator Cur = Start; Bad == true && Cur.end() == false; ++Cur)
{
- SPtrArray<Version *> VList = Start.AllTargets();
+ SPtrArray<Version *> VList = Cur.AllTargets();
for (Version **I = VList; *I != 0; ++I)
{
VerIterator Ver(Cache,*I);
@@ -784,7 +786,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
VerIterator V(Cache,*I);
PkgIterator P = V.ParentPkg();
// we are checking for installation as an easy 'protection' against or-groups and (unchosen) providers
- if (P->CurrentVer == 0 || P != Pkg || (P.CurrentVer() != V && Cache[P].InstallVer != V))
+ if (P != Pkg || (P.CurrentVer() != V && Cache[P].InstallVer != V))
continue;
circle = true;
break;
@@ -830,7 +832,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
}
}
if (i++ > max_loops)
- return _error->Error("Internal error: MaxLoopCount reached in SmartConfigure for %s, aborting", Pkg.FullName().c_str());
+ return _error->Error("Internal error: APT::pkgPackageManager::MaxLoopCount reached in SmartConfigure for %s, aborting", Pkg.FullName().c_str());
} while (Changed == true);
// Check for reverse conflicts.
@@ -855,7 +857,10 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
This way we avoid that M-A: enabled packages are installed before
their older non-M-A enabled packages are replaced by newer versions */
bool const installed = Pkg->CurrentVer != 0;
- if (installed == true && Install(Pkg,FileNames[Pkg->ID]) == false)
+ if (installed == true &&
+ (instVer != Pkg.CurrentVer() ||
+ ((Cache[Pkg].iFlags & pkgDepCache::ReInstall) == pkgDepCache::ReInstall)) &&
+ Install(Pkg,FileNames[Pkg->ID]) == false)
return false;
for (PkgIterator P = Pkg.Group().PackageList();
P.end() == false; P = Pkg.Group().NextPkg(P))
@@ -873,6 +878,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
P.end() == false; P = Pkg.Group().NextPkg(P))
{
if (P->CurrentVer != 0 || P == Pkg || List->IsFlag(P,pkgOrderList::UnPacked) == true ||
+ List->IsFlag(P,pkgOrderList::Configured) == true ||
Cache[P].InstallVer == 0 || (P.CurrentVer() == Cache[P].InstallVer &&
(Cache[Pkg].iFlags & pkgDepCache::ReInstall) != pkgDepCache::ReInstall))
continue;
@@ -881,7 +887,9 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c
}
}
// packages which are already unpacked don't need to be unpacked again
- else if (Pkg.State() != pkgCache::PkgIterator::NeedsConfigure && Install(Pkg,FileNames[Pkg->ID]) == false)
+ else if ((instVer != Pkg.CurrentVer() ||
+ ((Cache[Pkg].iFlags & pkgDepCache::ReInstall) == pkgDepCache::ReInstall)) &&
+ Install(Pkg,FileNames[Pkg->ID]) == false)
return false;
if (Immediate == true) {
diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc
index 997c70768..1de33ff9b 100644
--- a/apt-pkg/pkgcache.cc
+++ b/apt-pkg/pkgcache.cc
@@ -238,7 +238,7 @@ pkgCache::PkgIterator pkgCache::FindPkg(const string &Name) {
// ---------------------------------------------------------------------
/* Returns 0 on error, pointer to the package otherwise */
pkgCache::PkgIterator pkgCache::FindPkg(const string &Name, string const &Arch) {
- if (MultiArchCache() == false) {
+ if (MultiArchCache() == false && Arch != "none") {
if (Arch == "native" || Arch == "all" || Arch == "any" ||
Arch == NativeArch())
return SingleArchFindPkg(Name);
@@ -376,6 +376,10 @@ pkgCache::PkgIterator pkgCache::GrpIterator::FindPreferredPkg(bool const &Prefer
if (Pkg.end() == false && (PreferNonVirtual == false || Pkg->VersionList != 0))
return Pkg;
}
+ // packages without an architecture
+ Pkg = FindPkg("none");
+ if (Pkg.end() == false && (PreferNonVirtual == false || Pkg->VersionList != 0))
+ return Pkg;
if (PreferNonVirtual == true)
return FindPreferredPkg(false);
@@ -686,8 +690,29 @@ void pkgCache::DepIterator::GlobOr(DepIterator &Start,DepIterator &End)
on virtual packages. */
bool pkgCache::DepIterator::IsIgnorable(PkgIterator const &Pkg) const
{
- if (ParentPkg() == TargetPkg())
- return IsNegative();
+ if (IsNegative() == false)
+ return false;
+
+ pkgCache::PkgIterator PP = ParentPkg();
+ pkgCache::PkgIterator PT = TargetPkg();
+ if (PP->Group != PT->Group)
+ return false;
+ // self-conflict
+ if (PP == PT)
+ return true;
+ pkgCache::VerIterator PV = ParentVer();
+ // ignore group-conflict on a M-A:same package - but not our implicit dependencies
+ // so that we can have M-A:same packages conflicting with their own real name
+ if ((PV->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same)
+ {
+ // Replaces: ${self}:other ( << ${binary:Version})
+ if (S->Type == pkgCache::Dep::Replaces && S->CompareOp == pkgCache::Dep::Less && strcmp(PV.VerStr(), TargetVer()) == 0)
+ return false;
+ // Breaks: ${self}:other (!= ${binary:Version})
+ if (S->Type == pkgCache::Dep::DpkgBreaks && S->CompareOp == pkgCache::Dep::NotEquals && strcmp(PV.VerStr(), TargetVer()) == 0)
+ return false;
+ return true;
+ }
return false;
}
@@ -708,6 +733,21 @@ bool pkgCache::DepIterator::IsIgnorable(PrvIterator const &Prv) const
return false;
}
/*}}}*/
+// DepIterator::IsMultiArchImplicit - added by the cache generation /*{{{*/
+// ---------------------------------------------------------------------
+/* MultiArch can be translated to SingleArch for an resolver and we did so,
+ by adding dependencies to help the resolver understand the problem, but
+ sometimes it is needed to identify these to ignore them… */
+bool pkgCache::DepIterator::IsMultiArchImplicit() const
+{
+ if (ParentPkg()->Arch != TargetPkg()->Arch &&
+ (S->Type == pkgCache::Dep::Replaces ||
+ S->Type == pkgCache::Dep::DpkgBreaks ||
+ S->Type == pkgCache::Dep::Conflicts))
+ return true;
+ return false;
+}
+ /*}}}*/
// ostream operator to handle string representation of a dependecy /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -946,3 +986,17 @@ pkgCache::DescIterator pkgCache::VerIterator::TranslatedDescription() const
};
/*}}}*/
+// PrvIterator::IsMultiArchImplicit - added by the cache generation /*{{{*/
+// ---------------------------------------------------------------------
+/* MultiArch can be translated to SingleArch for an resolver and we did so,
+ by adding provides to help the resolver understand the problem, but
+ sometimes it is needed to identify these to ignore them… */
+bool pkgCache::PrvIterator::IsMultiArchImplicit() const
+{
+ pkgCache::PkgIterator const Owner = OwnerPkg();
+ pkgCache::PkgIterator const Parent = ParentPkg();
+ if (strcmp(Owner.Arch(), Parent.Arch()) != 0 || Owner->Name == Parent->Name)
+ return true;
+ return false;
+}
+ /*}}}*/
diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc
index ec072fddd..373f6625c 100644
--- a/apt-pkg/pkgcachegen.cc
+++ b/apt-pkg/pkgcachegen.cc
@@ -38,7 +38,7 @@
typedef std::vector<pkgIndexFile *>::iterator FileIterator;
template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
-bool IsDuplicateDescription(pkgCache::DescIterator Desc,
+static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
MD5SumValue const &CurMd5, std::string const &CurLang);
using std::string;
@@ -69,7 +69,9 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
*Cache.HeaderP = pkgCache::Header();
map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
Cache.HeaderP->VerSysName = idxVerSysName;
- map_ptrloc const idxArchitecture = WriteStringInMap(_config->Find("APT::Architecture"));
+ // this pointer is set in ReMap, but we need it now for WriteUniqString
+ Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
+ map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
Cache.HeaderP->Architecture = idxArchitecture;
if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
return;
@@ -195,12 +197,27 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
string const Version = List.Version();
if (Version.empty() == true && Arch.empty() == true)
{
+ // package descriptions
if (MergeListGroup(List, PackageName) == false)
return false;
+ continue;
}
if (Arch.empty() == true)
- Arch = _config->Find("APT::Architecture");
+ {
+ // use the pseudo arch 'none' for arch-less packages
+ Arch = "none";
+ /* We might built a SingleArchCache here, which we don't want to blow up
+ just for these :none packages to a proper MultiArchCache, so just ensure
+ that we have always a native package structure first for SingleArch */
+ pkgCache::PkgIterator NP;
+ Dynamic<pkgCache::PkgIterator> DynPkg(NP);
+ if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
+ // TRANSLATOR: The first placeholder is a package name,
+ // the other two should be copied verbatim as they include debug info
+ return _error->Error(_("Error occurred while processing %s (%s%d)"),
+ PackageName.c_str(), "NewPackage", 0);
+ }
// Get a pointer to the package structure
pkgCache::PkgIterator Pkg;
@@ -286,7 +303,7 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator
pkgCache::DescIterator Desc = Ver.DescriptionList();
// a version can only have one md5 describing it
- if (MD5SumValue(Desc.md5()) != CurMd5)
+ if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
continue;
// don't add a new description if we have one for the given
@@ -304,6 +321,9 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator
void const * const oldMap = Map.Data();
map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
+ if (unlikely(descindex == 0 && _error->PendingError()))
+ return _error->Error(_("Error occurred while processing %s (%s%d)"),
+ Pkg.Name(), "NewDescription", 1);
if (oldMap != Map.Data())
LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
*LastDesc = descindex;
@@ -415,6 +435,43 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
return _error->Error(_("Error occurred while processing %s (%s%d)"),
Pkg.Name(), "AddImplicitDepends", 1);
}
+ /* :none packages are packages without an architecture. They are forbidden by
+ debian-policy, so usually they will only be in (old) dpkg status files -
+ and dpkg will complain about them - and are pretty rare. We therefore do
+ usually not create conflicts while the parent is created, but only if a :none
+ package (= the target) appears. This creates incorrect dependencies on :none
+ for architecture-specific dependencies on the package we copy from, but we
+ will ignore this bug as architecture-specific dependencies are only allowed
+ in jessie and until then the :none packages should be extinct (hopefully).
+ In other words: This should work long enough to allow graceful removal of
+ these packages, it is not supposed to allow users to keep using them … */
+ if (strcmp(Pkg.Arch(), "none") == 0)
+ {
+ pkgCache::PkgIterator M = Grp.FindPreferredPkg();
+ if (M.end() == false && Pkg != M)
+ {
+ pkgCache::DepIterator D = M.RevDependsList();
+ Dynamic<pkgCache::DepIterator> DynD(D);
+ for (; D.end() == false; ++D)
+ {
+ if ((D->Type != pkgCache::Dep::Conflicts &&
+ D->Type != pkgCache::Dep::DpkgBreaks &&
+ D->Type != pkgCache::Dep::Replaces) ||
+ D.ParentPkg().Group() == Grp)
+ continue;
+
+ map_ptrloc *OldDepLast = NULL;
+ pkgCache::VerIterator ConVersion = D.ParentVer();
+ Dynamic<pkgCache::VerIterator> DynV(ConVersion);
+ // duplicate the Conflicts/Breaks/Replaces for :none arch
+ if (D->Version == 0)
+ NewDepends(Pkg, ConVersion, "", 0, D->Type, OldDepLast);
+ else
+ NewDepends(Pkg, ConVersion, D.TargetVer(),
+ D->CompareOp, D->Type, OldDepLast);
+ }
+ }
+ }
}
if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
return _error->Error(_("Error occurred while processing %s (%s%d)"),
@@ -456,6 +513,9 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
oldMap = Map.Data();
map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
+ if (unlikely(descindex == 0 && _error->PendingError()))
+ return _error->Error(_("Error occurred while processing %s (%s%d)"),
+ Pkg.Name(), "NewDescription", 2);
if (oldMap != Map.Data())
LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
*LastDesc = descindex;
@@ -716,6 +776,7 @@ unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
// Fill it in
Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
+ //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
Ver->NextVer = Next;
Ver->ID = Cache.HeaderP->VersionCount++;
map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
@@ -865,6 +926,9 @@ bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
// Locate the target package
pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
+ // we don't create 'none' packages and their dependencies if we can avoid it …
+ if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
+ return true;
Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
if (Pkg.end() == true) {
if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
@@ -911,8 +975,12 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
Prv->Version = Ver.Index();
Prv->NextPkgProv = Ver->ProvidesList;
Ver->ProvidesList = Prv.Index();
- if (Version.empty() == false && unlikely((Prv->ProvideVersion = WriteString(Version)) == 0))
- return false;
+ if (Version.empty() == false) {
+ map_ptrloc const idxProvideVersion = WriteString(Version);
+ Prv->ProvideVersion = idxProvideVersion;
+ if (unlikely(idxProvideVersion == 0))
+ return false;
+ }
// Locate the target package
pkgCache::PkgIterator Pkg;
@@ -1310,10 +1378,11 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress
}
_error->RevertToStack();
}
- else if (Debug == true)
+ else
{
_error->MergeWithStack();
- std::clog << "Open filebased MMap" << std::endl;
+ if (Debug == true)
+ std::clog << "Open filebased MMap" << std::endl;
}
}
if (Writeable == false || CacheFile.empty() == true)
@@ -1449,11 +1518,11 @@ bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **O
}
/*}}}*/
// IsDuplicateDescription /*{{{*/
-bool IsDuplicateDescription(pkgCache::DescIterator Desc,
+static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
MD5SumValue const &CurMd5, std::string const &CurLang)
{
// Descriptions in the same link-list have all the same md5
- if (MD5SumValue(Desc.md5()) != CurMd5)
+ if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
return false;
for (; Desc.end() == false; ++Desc)
if (Desc.LanguageCode() == CurLang)
diff --git a/apt-pkg/policy.cc b/apt-pkg/policy.cc
index b47dab90c..4ae3b5f87 100644
--- a/apt-pkg/policy.cc
+++ b/apt-pkg/policy.cc
@@ -27,6 +27,7 @@
#include <apt-pkg/policy.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/cachefilter.h>
#include <apt-pkg/tagfile.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
@@ -259,17 +260,33 @@ void pkgPolicy::CreatePin(pkgVersionMatch::MatchType Type,string Name,
}
// find the package (group) this pin applies to
- pkgCache::GrpIterator Grp;
- pkgCache::PkgIterator Pkg;
- if (Arch.empty() == false)
- Pkg = Cache->FindPkg(Name, Arch);
- else {
- Grp = Cache->FindGrp(Name);
- if (Grp.end() == false)
- Pkg = Grp.PackageList();
+ pkgCache::GrpIterator Grp = Cache->FindGrp(Name);
+ bool matched = false;
+ if (Grp.end() == false)
+ {
+ std::string MatchingArch;
+ if (Arch.empty() == true)
+ MatchingArch = Cache->NativeArch();
+ else
+ MatchingArch = Arch;
+ APT::CacheFilter::PackageArchitectureMatchesSpecification pams(MatchingArch);
+ for (pkgCache::PkgIterator Pkg = Grp.PackageList(); Pkg.end() != true; Pkg = Grp.NextPkg(Pkg))
+ {
+ if (pams(Pkg.Arch()) == false)
+ continue;
+ Pin *P = Pins + Pkg->ID;
+ // the first specific stanza for a package is the ruler,
+ // all others need to be ignored
+ if (P->Type != pkgVersionMatch::None)
+ P = &*Unmatched.insert(Unmatched.end(),PkgPin(Pkg.FullName()));
+ P->Type = Type;
+ P->Priority = Priority;
+ P->Data = Data;
+ matched = true;
+ }
}
- if (Pkg.end() == true)
+ if (matched == false)
{
PkgPin *P = &*Unmatched.insert(Unmatched.end(),PkgPin(Name));
if (Arch.empty() == false)
@@ -279,20 +296,6 @@ void pkgPolicy::CreatePin(pkgVersionMatch::MatchType Type,string Name,
P->Data = Data;
return;
}
-
- for (; Pkg.end() != true; Pkg = Grp.NextPkg(Pkg))
- {
- Pin *P = Pins + Pkg->ID;
- // the first specific stanza for a package is the ruler,
- // all others need to be ignored
- if (P->Type != pkgVersionMatch::None)
- P = &*Unmatched.insert(Unmatched.end(),PkgPin(Pkg.FullName()));
- P->Type = Type;
- P->Priority = Priority;
- P->Data = Data;
- if (Grp.end() == true)
- break;
- }
}
/*}}}*/
// Policy::GetMatch - Get the matching version for a package pin /*{{{*/
diff --git a/apt-pkg/srcrecords.h b/apt-pkg/srcrecords.h
index 06f0dce6c..ed69d0d72 100644
--- a/apt-pkg/srcrecords.h
+++ b/apt-pkg/srcrecords.h
@@ -71,6 +71,7 @@ class pkgSrcRecords
virtual std::string Section() const = 0;
virtual const char **Binaries() = 0; // Ownership does not transfer
+ //FIXME: Add a parameter to specify which architecture to use for [wildcard] matching
virtual bool BuildDepends(std::vector<BuildDepRec> &BuildDeps, bool const &ArchOnly, bool const &StripMultiArch = true) = 0;
static const char *BuildDepType(unsigned char const &Type);