summaryrefslogtreecommitdiff
path: root/apt-pkg
diff options
context:
space:
mode:
Diffstat (limited to 'apt-pkg')
-rw-r--r--apt-pkg/acquire-item.cc486
-rw-r--r--apt-pkg/acquire-item.h101
-rw-r--r--apt-pkg/acquire-method.cc247
-rw-r--r--apt-pkg/acquire-method.h4
-rw-r--r--apt-pkg/acquire-worker.cc22
-rw-r--r--apt-pkg/algorithms.cc137
-rw-r--r--apt-pkg/algorithms.h5
-rw-r--r--apt-pkg/aptconfiguration.cc179
-rw-r--r--apt-pkg/aptconfiguration.h29
-rw-r--r--apt-pkg/cacheiterators.h27
-rw-r--r--apt-pkg/cacheset.cc2
-rw-r--r--apt-pkg/cacheset.h28
-rw-r--r--apt-pkg/cdrom.cc132
-rw-r--r--apt-pkg/cdrom.h10
-rw-r--r--apt-pkg/contrib/cdromutl.cc41
-rw-r--r--apt-pkg/contrib/cdromutl.h1
-rw-r--r--apt-pkg/contrib/configuration.h2
-rw-r--r--apt-pkg/contrib/error.cc166
-rw-r--r--apt-pkg/contrib/error.h5
-rw-r--r--apt-pkg/contrib/fileutl.cc71
-rw-r--r--apt-pkg/contrib/fileutl.h2
-rw-r--r--apt-pkg/contrib/hashes.cc9
-rw-r--r--apt-pkg/contrib/hashes.h5
-rw-r--r--apt-pkg/contrib/hashsum_template.h87
-rw-r--r--apt-pkg/contrib/md5.cc57
-rw-r--r--apt-pkg/contrib/md5.h23
-rw-r--r--apt-pkg/contrib/mmap.cc12
-rw-r--r--apt-pkg/contrib/mmap.h2
-rw-r--r--apt-pkg/contrib/netrc.cc12
-rw-r--r--apt-pkg/contrib/progress.cc29
-rw-r--r--apt-pkg/contrib/progress.h3
-rw-r--r--apt-pkg/contrib/sha1.cc65
-rw-r--r--apt-pkg/contrib/sha1.h23
-rw-r--r--apt-pkg/contrib/sha2.cc43
-rw-r--r--apt-pkg/contrib/sha2.h114
-rw-r--r--apt-pkg/contrib/sha256.cc424
-rw-r--r--apt-pkg/contrib/sha256.h68
-rw-r--r--apt-pkg/contrib/sha2_internal.cc1065
-rw-r--r--apt-pkg/contrib/sha2_internal.h197
-rw-r--r--apt-pkg/contrib/strutl.cc34
-rw-r--r--apt-pkg/contrib/strutl.h2
-rw-r--r--apt-pkg/contrib/weakptr.h2
-rw-r--r--apt-pkg/deb/debindexfile.cc13
-rw-r--r--apt-pkg/deb/debindexfile.h5
-rw-r--r--apt-pkg/deb/deblistparser.cc238
-rw-r--r--apt-pkg/deb/deblistparser.h10
-rw-r--r--apt-pkg/deb/debmetaindex.cc112
-rw-r--r--apt-pkg/deb/debmetaindex.h2
-rw-r--r--apt-pkg/deb/debrecords.cc10
-rw-r--r--apt-pkg/deb/debrecords.h1
-rw-r--r--apt-pkg/deb/debsrcrecords.cc8
-rw-r--r--apt-pkg/deb/dpkgpm.cc53
-rw-r--r--apt-pkg/depcache.cc873
-rw-r--r--apt-pkg/depcache.h48
-rw-r--r--apt-pkg/edsp.cc564
-rw-r--r--apt-pkg/edsp.h222
-rw-r--r--apt-pkg/edsp/edspindexfile.cc78
-rw-r--r--apt-pkg/edsp/edspindexfile.h25
-rw-r--r--apt-pkg/edsp/edsplistparser.cc90
-rw-r--r--apt-pkg/edsp/edsplistparser.h38
-rw-r--r--apt-pkg/edsp/edspsystem.cc124
-rw-r--r--apt-pkg/edsp/edspsystem.h38
-rw-r--r--apt-pkg/indexcopy.cc13
-rw-r--r--apt-pkg/indexrecords.cc17
-rw-r--r--apt-pkg/init.cc13
-rw-r--r--apt-pkg/makefile18
-rw-r--r--apt-pkg/orderlist.cc22
-rw-r--r--apt-pkg/packagemanager.cc77
-rw-r--r--apt-pkg/packagemanager.h2
-rw-r--r--apt-pkg/pkgcache.cc75
-rw-r--r--apt-pkg/pkgcache.h23
-rw-r--r--apt-pkg/pkgcachegen.cc42
-rw-r--r--apt-pkg/pkgrecords.h1
-rw-r--r--apt-pkg/policy.cc29
-rw-r--r--apt-pkg/policy.h7
-rw-r--r--apt-pkg/sourcelist.cc6
-rw-r--r--apt-pkg/tagfile.cc11
-rw-r--r--apt-pkg/tagfile.h2
-rw-r--r--apt-pkg/vendor.h3
-rw-r--r--apt-pkg/vendorlist.cc15
-rw-r--r--apt-pkg/vendorlist.h3
81 files changed, 4779 insertions, 2125 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index 2c4ce91a0..36abe0ac3 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -17,7 +17,6 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/sourcelist.h>
-#include <apt-pkg/vendorlist.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
@@ -184,6 +183,151 @@ void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
}
}
/*}}}*/
+// AcqSubIndex::AcqSubIndex - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Get the Index file first and see if there are languages available
+ * If so, create a pkgAcqIndexTrans for the found language(s).
+ */
+pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,
+ string const &URIDesc, string const &ShortDesc,
+ HashString const &ExpectedHash)
+ : Item(Owner), ExpectedHash(ExpectedHash)
+{
+ Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false);
+
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(URI);
+
+ Desc.URI = URI;
+ Desc.Description = URIDesc;
+ Desc.Owner = this;
+ Desc.ShortDesc = ShortDesc;
+
+ QueueURI(Desc);
+
+ if(Debug)
+ std::clog << "pkgAcqSubIndex: " << Desc.URI << std::endl;
+}
+ /*}}}*/
+// AcqSubIndex::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqSubIndex::Custom600Headers()
+{
+ string Final = _config->FindDir("Dir::State::lists");
+ Final += URItoFileName(Desc.URI);
+
+ struct stat Buf;
+ if (stat(Final.c_str(),&Buf) != 0)
+ return "\nIndex-File: true\nFail-Ignore: true\n";
+ return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+}
+ /*}}}*/
+void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+{
+ if(Debug)
+ std::clog << "pkgAcqSubIndex failed: " << Desc.URI << std::endl;
+
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+
+ // No good Index is provided, so try guessing
+ std::vector<std::string> langs = APT::Configuration::getLanguages(true);
+ for (std::vector<std::string>::const_iterator l = langs.begin();
+ l != langs.end(); ++l)
+ {
+ if (*l == "none") continue;
+ string const file = "Translation-" + *l;
+ new pkgAcqIndexTrans(Owner, Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file),
+ Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file),
+ file);
+ }
+}
+ /*}}}*/
+void pkgAcqSubIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+ pkgAcquire::MethodConfig *Cnf)
+{
+ if(Debug)
+ std::clog << "pkgAcqSubIndex::Done(): " << Desc.URI << std::endl;
+
+ string FileName = LookupTag(Message,"Filename");
+ if (FileName.empty() == true)
+ {
+ Status = StatError;
+ ErrorText = "Method gave a blank filename";
+ return;
+ }
+
+ if (FileName != DestFile)
+ {
+ Local = true;
+ Desc.URI = "copy:" + FileName;
+ QueueURI(Desc);
+ return;
+ }
+
+ Item::Done(Message,Size,Md5Hash,Cnf);
+
+ string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI);
+
+ // sucess in downloading the index
+ // rename the index
+ if(Debug)
+ std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl;
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+ DestFile = FinalFile;
+
+ if(ParseIndex(DestFile) == false)
+ return Failed("", NULL);
+
+ Complete = true;
+ Status = StatDone;
+ Dequeue();
+ return;
+}
+ /*}}}*/
+bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/
+{
+ indexRecords SubIndexParser;
+ if (FileExists(IndexFile) == false || SubIndexParser.Load(IndexFile) == false)
+ return false;
+
+ std::vector<std::string> lang = APT::Configuration::getLanguages(true);
+ for (std::vector<std::string>::const_iterator l = lang.begin();
+ l != lang.end(); ++l)
+ {
+ if (*l == "none")
+ continue;
+
+ string file = "Translation-" + *l;
+ indexRecords::checkSum const *Record = SubIndexParser.Lookup(file);
+ HashString expected;
+ if (Record == NULL)
+ {
+ // FIXME: the Index file provided by debian currently only includes bz2 records
+ Record = SubIndexParser.Lookup(file + ".bz2");
+ if (Record == NULL)
+ continue;
+ }
+ else
+ {
+ expected = Record->Hash;
+ if (expected.empty() == true)
+ continue;
+ }
+
+ IndexTarget target;
+ target.Description = Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file);
+ target.MetaKey = file;
+ target.ShortDesc = file;
+ target.URI = Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file);
+ new pkgAcqIndexTrans(Owner, &target, expected, &SubIndexParser);
+ }
+ return true;
+}
+ /*}}}*/
// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* Get the DiffIndex file first and see if there are patches availabe
@@ -570,24 +714,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /*
FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
// sucess in downloading a diff, enter ApplyDiff state
- if(State == StateFetchDiff)
- {
-
- if(Debug)
- std::clog << "Sending to gzip method: " << FinalFile << std::endl;
-
- string FileName = LookupTag(Message,"Filename");
- State = StateUnzipDiff;
- Local = true;
- Desc.URI = "gzip:" + FileName;
- DestFile += ".decomp";
- QueueURI(Desc);
- Mode = "gzip";
- return;
- }
-
- // sucess in downloading a diff, enter ApplyDiff state
- if(State == StateUnzipDiff)
+ if(State == StateFetchDiff)
{
// rred excepts the patch as $FinalFile.ed
@@ -639,29 +766,61 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
HashString ExpectedHash, string comprExt)
: Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash)
{
+ if(comprExt.empty() == true)
+ {
+ // autoselect the compression method
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+ for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+ comprExt.append(*t).append(" ");
+ if (comprExt.empty() == false)
+ comprExt.erase(comprExt.end()-1);
+ }
+ CompressionExtension = comprExt;
+
+ Init(URI, URIDesc, ShortDesc);
+}
+pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target,
+ HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
+ : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash)
+{
+ // autoselect the compression method
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+ CompressionExtension = "";
+ if (ExpectedHash.empty() == false)
+ {
+ for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+ if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true)
+ CompressionExtension.append(*t).append(" ");
+ }
+ else
+ {
+ for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+ CompressionExtension.append(*t).append(" ");
+ }
+ if (CompressionExtension.empty() == false)
+ CompressionExtension.erase(CompressionExtension.end()-1);
+
+ Init(Target->URI, Target->Description, Target->ShortDesc);
+}
+ /*}}}*/
+// AcqIndex::Init - defered Constructor /*{{{*/
+void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) {
Decompression = false;
Erase = false;
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
DestFile += URItoFileName(URI);
- if(comprExt.empty())
- {
- // autoselect the compression method
- std::vector<std::string> types = APT::Configuration::getCompressionTypes();
- if (types.empty() == true)
- comprExt = "plain";
- else
- comprExt = "." + types[0];
- }
- CompressionExtension = ((comprExt == "plain" || comprExt == ".") ? "" : comprExt);
-
- Desc.URI = URI + CompressionExtension;
+ std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
+ if (comprExt == "uncompressed")
+ Desc.URI = URI;
+ else
+ Desc.URI = URI + '.' + comprExt;
Desc.Description = URIDesc;
Desc.Owner = this;
Desc.ShortDesc = ShortDesc;
-
+
QueueURI(Desc);
}
/*}}}*/
@@ -675,45 +834,32 @@ string pkgAcqIndex::Custom600Headers()
if (_config->FindB("Acquire::GzipIndexes",false))
Final += ".gz";
+ string msg = "\nIndex-File: true";
+ // FIXME: this really should use "IndexTarget::IsOptional()" but that
+ // seems to be difficult without breaking ABI
+ if (ShortDesc().find("Translation") != 0)
+ msg += "\nFail-Ignore: true";
struct stat Buf;
- if (stat(Final.c_str(),&Buf) != 0)
- return "\nIndex-File: true";
- return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ if (stat(Final.c_str(),&Buf) == 0)
+ msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+
+ return msg;
}
/*}}}*/
void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
{
- std::vector<std::string> types = APT::Configuration::getCompressionTypes();
-
- for (std::vector<std::string>::const_iterator t = types.begin();
- t != types.end(); t++)
+ size_t const nextExt = CompressionExtension.find(' ');
+ if (nextExt != std::string::npos)
{
- // jump over all already tried compression types
- const unsigned int nameLen = Desc.URI.size() - (*t).size();
- if(Desc.URI.substr(nameLen) != *t)
- continue;
-
- // we want to try it with the next extension (and make sure to
- // not skip over the end)
- t++;
- if (t == types.end())
- break;
-
- // queue new download
- Desc.URI = Desc.URI.substr(0, nameLen) + *t;
- new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc,
- ExpectedHash, string(".").append(*t));
-
- Status = StatDone;
- Complete = false;
- Dequeue();
+ CompressionExtension = CompressionExtension.substr(nextExt+1);
+ Init(RealURI, Desc.Description, Desc.ShortDesc);
return;
}
// on decompression failure, remove bad versions in partial/
- if(Decompression && Erase) {
+ if (Decompression && Erase) {
string s = _config->FindDir("Dir::State::lists") + "partial/";
- s += URItoFileName(RealURI);
+ s.append(URItoFileName(RealURI));
unlink(s.c_str());
}
@@ -790,8 +936,8 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
Status = StatError;
ErrorText = "Method gave a blank filename";
}
-
- string compExt = flExtension(flNotDir(URI(Desc.URI).Path));
+
+ std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
// The files timestamp matches
if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) {
@@ -824,12 +970,7 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
// get the binary name for your used compression type
decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
if(decompProg.empty() == false);
- // flExtensions returns the full name if no extension is found
- // this is why we have this complicated compare operation here
- // FIMXE: add a new flJustExtension() that return "" if no
- // extension is found and use that above so that it can
- // be tested against ""
- else if(compExt == flNotDir(URI(Desc.URI).Path))
+ else if(compExt == "uncompressed")
decompProg = "copy";
else {
_error->Error("Unsupported extension: %s", compExt.c_str());
@@ -851,6 +992,11 @@ pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
: pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
{
}
+pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const *Target,
+ HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
+ : pkgAcqIndex(Owner, Target, ExpectedHash, MetaIndexParser)
+{
+}
/*}}}*/
// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
@@ -861,8 +1007,8 @@ string pkgAcqIndexTrans::Custom600Headers()
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
- return "\nFail-Ignore: true";
- return "\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ return "\nFail-Ignore: true\nIndex-File: true";
+ return "\nFail-Ignore: true\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/
@@ -870,6 +1016,15 @@ string pkgAcqIndexTrans::Custom600Headers()
/* */
void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
{
+ size_t const nextExt = CompressionExtension.find(' ');
+ if (nextExt != std::string::npos)
+ {
+ CompressionExtension = CompressionExtension.substr(nextExt+1);
+ Init(RealURI, Desc.Description, Desc.ShortDesc);
+ Status = StatIdle;
+ return;
+ }
+
if (Cnf->LocalOnly == true ||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
{
@@ -879,7 +1034,7 @@ void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
Dequeue();
return;
}
-
+
Item::Failed(Message,Cnf);
}
/*}}}*/
@@ -1094,6 +1249,8 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*
{
string FinalFile = _config->FindDir("Dir::State::lists");
FinalFile += URItoFileName(RealURI);
+ if (SigFile == DestFile)
+ SigFile = FinalFile;
Rename(DestFile,FinalFile);
chmod(FinalFile.c_str(),0644);
DestFile = FinalFile;
@@ -1127,6 +1284,8 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/
{
string FinalFile = _config->FindDir("Dir::State::lists");
FinalFile += URItoFileName(RealURI);
+ if (SigFile == DestFile)
+ SigFile = FinalFile;
DestFile = FinalFile;
}
Complete = true;
@@ -1158,6 +1317,10 @@ void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/
// Download further indexes with verification
QueueIndexes(true);
+ // is it a clearsigned MetaIndex file?
+ if (DestFile == SigFile)
+ return;
+
// Done, move signature file into position
string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
URItoFileName(RealURI) + ".gpg";
@@ -1174,27 +1337,41 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
HashString ExpectedIndexHash;
if (verify)
{
- const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
- if (!Record)
- {
- Status = StatAuthError;
- ErrorText = "Unable to find expected entry "
- + (*Target)->MetaKey + " in Meta-index file (malformed Release file?)";
- return;
- }
- ExpectedIndexHash = Record->Hash;
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
- std::cerr << "Queueing: " << (*Target)->URI << std::endl;
- std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
- }
- if (ExpectedIndexHash.empty())
- {
- Status = StatAuthError;
- ErrorText = "Unable to find hash sum for "
- + (*Target)->MetaKey + " in Meta-index file";
- return;
- }
+ const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
+ if (Record == NULL)
+ {
+ if ((*Target)->IsOptional() == false)
+ {
+ Status = StatAuthError;
+ strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str());
+ return;
+ }
+ }
+ else
+ {
+ ExpectedIndexHash = Record->Hash;
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ {
+ std::cerr << "Queueing: " << (*Target)->URI << std::endl;
+ std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
+ }
+ if (ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false)
+ {
+ Status = StatAuthError;
+ strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str());
+ return;
+ }
+ }
+ }
+
+ if ((*Target)->IsOptional() == true)
+ {
+ if ((*Target)->IsSubIndex() == true)
+ new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description,
+ (*Target)->ShortDesc, ExpectedIndexHash);
+ else
+ new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ continue;
}
/* Queue Packages file (either diff or full packages files, depending
@@ -1206,36 +1383,12 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
(*Target)->ShortDesc, ExpectedIndexHash);
else
- new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
}
}
/*}}}*/
bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/
{
-// // Maybe this should be made available from above so we don't have
-// // to read and parse it every time?
-// pkgVendorList List;
-// List.ReadMainList();
-
-// const Vendor* Vndr = NULL;
-// for (std::vector<string>::const_iterator I = GPGVOutput.begin(); I != GPGVOutput.end(); I++)
-// {
-// string::size_type pos = (*I).find("VALIDSIG ");
-// if (_config->FindB("Debug::Vendor", false))
-// std::cerr << "Looking for VALIDSIG in \"" << (*I) << "\": pos " << pos
-// << std::endl;
-// if (pos != std::string::npos)
-// {
-// string Fingerprint = (*I).substr(pos+sizeof("VALIDSIG"));
-// if (_config->FindB("Debug::Vendor", false))
-// std::cerr << "Looking for \"" << Fingerprint << "\" in vendor..." <<
-// std::endl;
-// Vndr = List.FindVendor(Fingerprint) != "";
-// if (Vndr != NULL);
-// break;
-// }
-// }
string::size_type pos;
// check for missing sigs (that where not fatal because otherwise we had
@@ -1317,13 +1470,20 @@ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
if (AuthPass == true)
{
// gpgv method failed, if we have a good signature
- string LastGoodSigFile = _config->FindDir("Dir::State::lists") +
- "partial/" + URItoFileName(RealURI) + ".gpg.reverify";
+ string LastGoodSigFile = _config->FindDir("Dir::State::lists");
+ if (DestFile == SigFile)
+ LastGoodSigFile.append(URItoFileName(RealURI));
+ else
+ LastGoodSigFile.append("partial/").append(URItoFileName(RealURI)).append(".gpg.reverify");
+
if(FileExists(LastGoodSigFile))
{
- string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
- URItoFileName(RealURI) + ".gpg";
- Rename(LastGoodSigFile,VerifiedSigFile);
+ if (DestFile != SigFile)
+ {
+ string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
+ URItoFileName(RealURI) + ".gpg";
+ Rename(LastGoodSigFile,VerifiedSigFile);
+ }
Status = StatTransientNetworkError;
_error->Warning(_("A error occurred during the signature "
"verification. The repository is not updated "
@@ -1342,11 +1502,75 @@ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
ReportMirrorFailure("GPGFailure");
}
+ /* Always move the meta index, even if gpgv failed. This ensures
+ * that PackageFile objects are correctly filled in */
+ if (FileExists(DestFile)) {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ /* InRelease files become Release files, otherwise
+ * they would be considered as trusted later on */
+ if (SigFile == DestFile) {
+ RealURI = RealURI.replace(RealURI.rfind("InRelease"), 9,
+ "Release");
+ FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9,
+ "Release");
+ SigFile = FinalFile;
+ }
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+
+ DestFile = FinalFile;
+ }
+
// No Release file was present, or verification failed, so fall
// back to queueing Packages files without verification
QueueIndexes(false);
}
/*}}}*/
+pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/
+ string const &URI, string const &URIDesc, string const &ShortDesc,
+ string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc,
+ string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc,
+ const vector<struct IndexTarget*>* IndexTargets,
+ indexRecords* MetaIndexParser) :
+ pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser),
+ MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
+ MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc)
+{
+ SigFile = DestFile;
+}
+ /*}}}*/
+// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+// FIXME: this can go away once the InRelease file is used widely
+string pkgAcqMetaClearSig::Custom600Headers()
+{
+ string Final = _config->FindDir("Dir::State::lists");
+ Final += URItoFileName(RealURI);
+
+ struct stat Buf;
+ if (stat(Final.c_str(),&Buf) != 0)
+ return "\nIndex-File: true\nFail-Ignore: true\n";
+
+ return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+}
+ /*}}}*/
+void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+{
+ if (AuthPass == false)
+ {
+ new pkgAcqMetaSig(Owner,
+ MetaSigURI, MetaSigURIDesc, MetaSigShortDesc,
+ MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
+ IndexTargets, MetaIndexParser);
+ if (Cnf->LocalOnly == true ||
+ StringToBool(LookupTag(Message, "Transient-Failure"), false) == false)
+ Dequeue();
+ }
+ else
+ pkgAcqMetaIndex::Failed(Message, Cnf);
+}
+ /*}}}*/
// AcqArchive::AcqArchive - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* This just sets up the initial fetch environment and queues the first
@@ -1421,7 +1645,7 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
// Select a source
if (QueueNext() == false && _error->PendingError() == false)
- _error->Error(_("I wasn't able to locate file for the %s package. "
+ _error->Error(_("I wasn't able to locate a file for the %s package. "
"This might mean you need to manually fix this package."),
Version.ParentPkg().Name());
}
@@ -1458,6 +1682,8 @@ bool pkgAcqArchive::QueueNext()
string PkgFile = Parse.FileName();
if (ForceHash.empty() == false)
{
+ if(stringcasecmp(ForceHash, "sha512") == 0)
+ ExpectedHash = HashString("SHA512", Parse.SHA512Hash());
if(stringcasecmp(ForceHash, "sha256") == 0)
ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
else if (stringcasecmp(ForceHash, "sha1") == 0)
@@ -1468,7 +1694,9 @@ bool pkgAcqArchive::QueueNext()
else
{
string Hash;
- if ((Hash = Parse.SHA256Hash()).empty() == false)
+ if ((Hash = Parse.SHA512Hash()).empty() == false)
+ ExpectedHash = HashString("SHA512", Hash);
+ else if ((Hash = Parse.SHA256Hash()).empty() == false)
ExpectedHash = HashString("SHA256", Hash);
else if ((Hash = Parse.SHA1Hash()).empty() == false)
ExpectedHash = HashString("SHA1", Hash);
@@ -1793,3 +2021,13 @@ string pkgAcqFile::Custom600Headers()
return "";
}
/*}}}*/
+bool IndexTarget::IsOptional() const {
+ if (strncmp(ShortDesc.c_str(), "Translation", 11) != 0)
+ return false;
+ return true;
+}
+bool IndexTarget::IsSubIndex() const {
+ if (ShortDesc != "TranslationIndex")
+ return false;
+ return true;
+}
diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h
index 943c61876..f763577ee 100644
--- a/apt-pkg/acquire-item.h
+++ b/apt-pkg/acquire-item.h
@@ -287,6 +287,50 @@ struct DiffInfo {
unsigned long size;
};
/*}}}*/
+/** \brief An item that is responsible for fetching a SubIndex {{{
+ *
+ * The MetaIndex file includes only records for important indexes
+ * and records for these SubIndex files so these can carry records
+ * for addition files like PDiffs and Translations
+ */
+class pkgAcqSubIndex : public pkgAcquire::Item
+{
+ protected:
+ /** \brief If \b true, debugging information will be written to std::clog. */
+ bool Debug;
+
+ /** \brief The item that is currently being downloaded. */
+ pkgAcquire::ItemDesc Desc;
+
+ /** \brief The Hash that this file should have after download
+ */
+ HashString ExpectedHash;
+
+ public:
+ // Specialized action members
+ virtual void Failed(string Message,pkgAcquire::MethodConfig *Cnf);
+ virtual void Done(string Message,unsigned long Size,string Md5Hash,
+ pkgAcquire::MethodConfig *Cnf);
+ virtual string DescURI() {return Desc.URI;};
+ virtual string Custom600Headers();
+ virtual bool ParseIndex(string const &IndexFile);
+
+ /** \brief Create a new pkgAcqSubIndex.
+ *
+ * \param Owner The Acquire object that owns this item.
+ *
+ * \param URI The URI of the list file to download.
+ *
+ * \param URIDesc A long description of the list file to download.
+ *
+ * \param ShortDesc A short description of the list file to download.
+ *
+ * \param ExpectedHash The list file's MD5 signature.
+ */
+ pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,string const &URIDesc,
+ string const &ShortDesc, HashString const &ExpectedHash);
+};
+ /*}}}*/
/** \brief An item that is responsible for fetching an index file of {{{
* package list diffs and starting the package list's download.
*
@@ -449,7 +493,7 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
StateFetchDiff,
/** \brief The diff is currently being uncompressed. */
- StateUnzipDiff,
+ StateUnzipDiff, // FIXME: No longer used
/** \brief The diff is currently being applied. */
StateApplyDiff
@@ -528,8 +572,8 @@ class pkgAcqIndex : public pkgAcquire::Item
/** \brief The expected hashsum of the decompressed index file. */
HashString ExpectedHash;
- /** \brief The compression-related file extension that is being
- * added to the downloaded file (e.g., ".gz" or ".bz2").
+ /** \brief The compression-related file extensions that are being
+ * added to the downloaded file one by one if first fails (e.g., "gz bz2").
*/
string CompressionExtension;
@@ -540,7 +584,7 @@ class pkgAcqIndex : public pkgAcquire::Item
virtual void Done(string Message,unsigned long Size,string Md5Hash,
pkgAcquire::MethodConfig *Cnf);
virtual string Custom600Headers();
- virtual string DescURI() {return RealURI + CompressionExtension;};
+ virtual string DescURI() {return Desc.URI;};
virtual string HashSum() {return ExpectedHash.toStr(); };
/** \brief Create a pkgAcqIndex.
@@ -565,6 +609,9 @@ class pkgAcqIndex : public pkgAcquire::Item
pkgAcqIndex(pkgAcquire *Owner,string URI,string URIDesc,
string ShortDesc, HashString ExpectedHash,
string compressExt="");
+ pkgAcqIndex(pkgAcquire *Owner, struct IndexTarget const * const Target,
+ HashString const &ExpectedHash, indexRecords const *MetaIndexParser);
+ void Init(string const &URI, string const &URIDesc, string const &ShortDesc);
};
/*}}}*/
/** \brief An acquire item that is responsible for fetching a {{{
@@ -594,6 +641,8 @@ class pkgAcqIndexTrans : public pkgAcqIndex
*/
pkgAcqIndexTrans(pkgAcquire *Owner,string URI,string URIDesc,
string ShortDesc);
+ pkgAcqIndexTrans(pkgAcquire *Owner, struct IndexTarget const * const Target,
+ HashString const &ExpectedHash, indexRecords const *MetaIndexParser);
};
/*}}}*/
/** \brief Information about an index file. */ /*{{{*/
@@ -612,8 +661,18 @@ struct IndexTarget
* looked up within the meta signature file.
*/
string MetaKey;
+
+ //FIXME: We should use virtual methods here instead…
+ bool IsOptional() const;
+ bool IsSubIndex() const;
};
/*}}}*/
+/** \brief Information about an optional index file. */ /*{{{*/
+struct OptionalIndexTarget : public IndexTarget
+{
+};
+ /*}}}*/
+
/** \brief An acquire item that downloads the detached signature {{{
* of a meta-index (Release) file, then queues up the release
* file itself.
@@ -772,6 +831,40 @@ class pkgAcqMetaIndex : public pkgAcquire::Item
indexRecords* MetaIndexParser);
};
/*}}}*/
+/** \brief An item repsonsible for downloading clearsigned metaindexes {{{*/
+class pkgAcqMetaClearSig : public pkgAcqMetaIndex
+{
+ /** \brief The URI of the meta-index file for the detached signature */
+ string MetaIndexURI;
+
+ /** \brief A "URI-style" description of the meta-index file */
+ string MetaIndexURIDesc;
+
+ /** \brief A brief description of the meta-index file */
+ string MetaIndexShortDesc;
+
+ /** \brief The URI of the detached meta-signature file if the clearsigned one failed. */
+ string MetaSigURI;
+
+ /** \brief A "URI-style" description of the meta-signature file */
+ string MetaSigURIDesc;
+
+ /** \brief A brief description of the meta-signature file */
+ string MetaSigShortDesc;
+
+public:
+ void Failed(string Message,pkgAcquire::MethodConfig *Cnf);
+ virtual string Custom600Headers();
+
+ /** \brief Create a new pkgAcqMetaClearSig. */
+ pkgAcqMetaClearSig(pkgAcquire *Owner,
+ string const &URI, string const &URIDesc, string const &ShortDesc,
+ string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc,
+ string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc,
+ const vector<struct IndexTarget*>* IndexTargets,
+ indexRecords* MetaIndexParser);
+};
+ /*}}}*/
/** \brief An item that is responsible for fetching a package file. {{{
*
* If the package file already exists in the cache, nothing will be
diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc
index 17d52cf51..8c353beb2 100644
--- a/apt-pkg/acquire-method.cc
+++ b/apt-pkg/acquire-method.cc
@@ -23,9 +23,7 @@
#include <apt-pkg/hashes.h>
#include <iostream>
-#include <stdarg.h>
#include <stdio.h>
-#include <unistd.h>
#include <sys/signal.h>
/*}}}*/
@@ -36,32 +34,28 @@ using namespace std;
/* This constructs the initialization text */
pkgAcqMethod::pkgAcqMethod(const char *Ver,unsigned long Flags)
{
- char S[300] = "";
- char *End = S;
- strcat(End,"100 Capabilities\n");
- sprintf(End+strlen(End),"Version: %s\n",Ver);
+ std::cout << "100 Capabilities\n"
+ << "Version: " << Ver << "\n";
if ((Flags & SingleInstance) == SingleInstance)
- strcat(End,"Single-Instance: true\n");
-
+ std::cout << "Single-Instance: true\n";
+
if ((Flags & Pipeline) == Pipeline)
- strcat(End,"Pipeline: true\n");
-
+ std::cout << "Pipeline: true\n";
+
if ((Flags & SendConfig) == SendConfig)
- strcat(End,"Send-Config: true\n");
+ std::cout << "Send-Config: true\n";
if ((Flags & LocalOnly) == LocalOnly)
- strcat(End,"Local-Only: true\n");
+ std::cout <<"Local-Only: true\n";
if ((Flags & NeedsCleanup) == NeedsCleanup)
- strcat(End,"Needs-Cleanup: true\n");
+ std::cout << "Needs-Cleanup: true\n";
if ((Flags & Removable) == Removable)
- strcat(End,"Removable: true\n");
- strcat(End,"\n");
+ std::cout << "Removable: true\n";
- if (write(STDOUT_FILENO,S,strlen(S)) != (signed)strlen(S))
- exit(100);
+ std::cout << "\n" << std::flush;
SetNonBlock(STDIN_FILENO,true);
@@ -94,13 +88,11 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
if (*I == '\n')
*I = ' ';
}
-
- char S[1024];
- char *End = S;
+
if (Queue != 0)
{
- End += snprintf(S,sizeof(S)-50,"400 URI Failure\nURI: %s\n"
- "Message: %s %s\n",Queue->Uri.c_str(), Err.c_str(), IP.c_str());
+ std::cout << "400 URI Failure\nURI: " << Queue->Uri << "\n"
+ << "Message: " << Err << " " << IP << "\n";
// Dequeue
FetchItem *Tmp = Queue;
Queue = Queue->Next;
@@ -109,22 +101,17 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
QueueBack = Queue;
}
else
- {
- End += snprintf(S,sizeof(S)-50,"400 URI Failure\nURI: <UNKNOWN>\n"
- "Message: %s\n",Err.c_str());
- }
+ std::cout << "400 URI Failure\nURI: <UNKNOWN>\nMessage: " << Err << "\n";
+
if(FailReason.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"FailReason: %s\n",FailReason.c_str());
+ std::cout << "FailReason: " << FailReason << "\n";
if (UsedMirror.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"UsedMirror: %s\n",UsedMirror.c_str());
- // Set the transient flag
+ std::cout << "UsedMirror: " << UsedMirror << "\n";
+ // Set the transient flag
if (Transient == true)
- strcat(S,"Transient-Failure: true\n\n");
- else
- strcat(S,"\n");
-
- if (write(STDOUT_FILENO,S,strlen(S)) != (signed)strlen(S))
- exit(100);
+ std::cout << "Transient-Failure: true\n";
+
+ std::cout << "\n" << std::flush;
}
/*}}}*/
// AcqMethod::URIStart - Indicate a download is starting /*{{{*/
@@ -134,25 +121,22 @@ void pkgAcqMethod::URIStart(FetchResult &Res)
{
if (Queue == 0)
abort();
-
- char S[1024] = "";
- char *End = S;
-
- End += snprintf(S,sizeof(S),"200 URI Start\nURI: %s\n",Queue->Uri.c_str());
+
+ std::cout << "200 URI Start\n"
+ << "URI: " << Queue->Uri << "\n";
if (Res.Size != 0)
- End += snprintf(End,sizeof(S)-4 - (End - S),"Size: %lu\n",Res.Size);
-
+ std::cout << "Size: " << Res.Size << "\n";
+
if (Res.LastModified != 0)
- End += snprintf(End,sizeof(S)-4 - (End - S),"Last-Modified: %s\n",
- TimeRFC1123(Res.LastModified).c_str());
-
+ std::cout << "Last-Modified: " << TimeRFC1123(Res.LastModified) << "\n";
+
if (Res.ResumePoint != 0)
- End += snprintf(End,sizeof(S)-4 - (End - S),"Resume-Point: %lu\n",
- Res.ResumePoint);
-
- strcat(End,"\n");
- if (write(STDOUT_FILENO,S,strlen(S)) != (signed)strlen(S))
- exit(100);
+ std::cout << "Resume-Point: " << Res.ResumePoint << "\n";
+
+ if (UsedMirror.empty() == false)
+ std::cout << "UsedMirror: " << UsedMirror << "\n";
+
+ std::cout << "\n" << std::flush;
}
/*}}}*/
// AcqMethod::URIDone - A URI is finished /*{{{*/
@@ -162,76 +146,69 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
{
if (Queue == 0)
abort();
-
- char S[1024] = "";
- char *End = S;
-
- End += snprintf(S,sizeof(S),"201 URI Done\nURI: %s\n",Queue->Uri.c_str());
+
+ std::cout << "201 URI Done\n"
+ << "URI: " << Queue->Uri << "\n";
if (Res.Filename.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Filename: %s\n",Res.Filename.c_str());
-
+ std::cout << "Filename: " << Res.Filename << "\n";
+
if (Res.Size != 0)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Size: %lu\n",Res.Size);
-
+ std::cout << "Size: " << Res.Size << "\n";
+
if (Res.LastModified != 0)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Last-Modified: %s\n",
- TimeRFC1123(Res.LastModified).c_str());
+ std::cout << "Last-Modified: " << TimeRFC1123(Res.LastModified) << "\n";
if (Res.MD5Sum.empty() == false)
- {
- End += snprintf(End,sizeof(S)-50 - (End - S),"MD5-Hash: %s\n",Res.MD5Sum.c_str());
- End += snprintf(End,sizeof(S)-50 - (End - S),"MD5Sum-Hash: %s\n",Res.MD5Sum.c_str());
- }
+ std::cout << "MD5-Hash: " << Res.MD5Sum << "\n"
+ << "MD5Sum-Hash: " << Res.MD5Sum << "\n";
if (Res.SHA1Sum.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"SHA1-Hash: %s\n",Res.SHA1Sum.c_str());
+ std::cout << "SHA1-Hash: " << Res.SHA1Sum << "\n";
if (Res.SHA256Sum.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"SHA256-Hash: %s\n",Res.SHA256Sum.c_str());
+ std::cout << "SHA256-Hash: " << Res.SHA256Sum << "\n";
+ if (Res.SHA512Sum.empty() == false)
+ std::cout << "SHA512-Hash: " << Res.SHA512Sum << "\n";
if (UsedMirror.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"UsedMirror: %s\n",UsedMirror.c_str());
- if (Res.GPGVOutput.size() > 0)
- End += snprintf(End,sizeof(S)-50 - (End - S),"GPGVOutput:\n");
- for (vector<string>::iterator I = Res.GPGVOutput.begin();
- I != Res.GPGVOutput.end(); I++)
- End += snprintf(End,sizeof(S)-50 - (End - S), " %s\n", (*I).c_str());
+ std::cout << "UsedMirror: " << UsedMirror << "\n";
+ if (Res.GPGVOutput.empty() == false)
+ {
+ std::cout << "GPGVOutput:\n";
+ for (vector<string>::const_iterator I = Res.GPGVOutput.begin();
+ I != Res.GPGVOutput.end(); ++I)
+ std::cout << " " << *I << "\n";
+ }
if (Res.ResumePoint != 0)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Resume-Point: %lu\n",
- Res.ResumePoint);
+ std::cout << "Resume-Point: " << Res.ResumePoint << "\n";
if (Res.IMSHit == true)
- strcat(End,"IMS-Hit: true\n");
- End = S + strlen(S);
-
+ std::cout << "IMS-Hit: true\n";
+
if (Alt != 0)
{
if (Alt->Filename.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Alt-Filename: %s\n",Alt->Filename.c_str());
-
+ std::cout << "Alt-Filename: " << Alt->Filename << "\n";
+
if (Alt->Size != 0)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Alt-Size: %lu\n",Alt->Size);
-
+ std::cout << "Alt-Size: " << Alt->Size << "\n";
+
if (Alt->LastModified != 0)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Alt-Last-Modified: %s\n",
- TimeRFC1123(Alt->LastModified).c_str());
-
+ std::cout << "Alt-Last-Modified: " << TimeRFC1123(Alt->LastModified) << "\n";
+
if (Alt->MD5Sum.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Alt-MD5-Hash: %s\n",
- Alt->MD5Sum.c_str());
+ std::cout << "Alt-MD5-Hash: " << Alt->MD5Sum << "\n";
if (Alt->SHA1Sum.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Alt-SHA1-Hash: %s\n",
- Alt->SHA1Sum.c_str());
+ std::cout << "Alt-SHA1-Hash: " << Alt->SHA1Sum << "\n";
if (Alt->SHA256Sum.empty() == false)
- End += snprintf(End,sizeof(S)-50 - (End - S),"Alt-SHA256-Hash: %s\n",
- Alt->SHA256Sum.c_str());
-
+ std::cout << "Alt-SHA256-Hash: " << Alt->SHA256Sum << "\n";
+ if (Alt->SHA512Sum.empty() == false)
+ std::cout << "Alt-SHA512-Hash: " << Alt->SHA512Sum << "\n";
+
if (Alt->IMSHit == true)
- strcat(End,"Alt-IMS-Hit: true\n");
+ std::cout << "Alt-IMS-Hit: true\n";
}
-
- strcat(End,"\n");
- if (write(STDOUT_FILENO,S,strlen(S)) != (signed)strlen(S))
- exit(100);
+
+ std::cout << "\n" << std::flush;
// Dequeue
FetchItem *Tmp = Queue;
@@ -247,13 +224,10 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
to be ackd */
bool pkgAcqMethod::MediaFail(string Required,string Drive)
{
- char S[1024];
- snprintf(S,sizeof(S),"403 Media Failure\nMedia: %s\nDrive: %s\n\n",
+ fprintf(stdout, "403 Media Failure\nMedia: %s\nDrive: %s\n",
Required.c_str(),Drive.c_str());
+ std::cout << "\n" << std::flush;
- if (write(STDOUT_FILENO,S,strlen(S)) != (signed)strlen(S))
- exit(100);
-
vector<string> MyMessages;
/* Here we read messages until we find a 603, each non 603 message is
@@ -404,28 +378,34 @@ int pkgAcqMethod::Run(bool Single)
return 0;
}
/*}}}*/
-// AcqMethod::Log - Send a log message /*{{{*/
+// AcqMethod::PrintStatus - privately really send a log/status message /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqMethod::Log(const char *Format,...)
+void pkgAcqMethod::PrintStatus(char const * const header, const char* Format,
+ va_list &args) const
{
string CurrentURI = "<UNKNOWN>";
if (Queue != 0)
CurrentURI = Queue->Uri;
-
+ if (UsedMirror.empty() == true)
+ fprintf(stdout, "%s\nURI: %s\nMessage: ",
+ header, CurrentURI.c_str());
+ else
+ fprintf(stdout, "%s\nURI: %s\nUsedMirror: %s\nMessage: ",
+ header, CurrentURI.c_str(), UsedMirror.c_str());
+ vfprintf(stdout,Format,args);
+ std::cout << "\n\n" << std::flush;
+}
+ /*}}}*/
+// AcqMethod::Log - Send a log message /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgAcqMethod::Log(const char *Format,...)
+{
va_list args;
va_start(args,Format);
-
- // sprintf the description
- char S[1024];
- unsigned int Len = snprintf(S,sizeof(S)-4,"101 Log\nURI: %s\n"
- "Message: ",CurrentURI.c_str());
-
- vsnprintf(S+Len,sizeof(S)-4-Len,Format,args);
- strcat(S,"\n\n");
-
- if (write(STDOUT_FILENO,S,strlen(S)) != (signed)strlen(S))
- exit(100);
+ PrintStatus("101 Log", Format, args);
+ va_end(args);
}
/*}}}*/
// AcqMethod::Status - Send a status message /*{{{*/
@@ -433,23 +413,10 @@ void pkgAcqMethod::Log(const char *Format,...)
/* */
void pkgAcqMethod::Status(const char *Format,...)
{
- string CurrentURI = "<UNKNOWN>";
- if (Queue != 0)
- CurrentURI = Queue->Uri;
-
va_list args;
va_start(args,Format);
-
- // sprintf the description
- char S[1024];
- unsigned int Len = snprintf(S,sizeof(S)-4,"102 Status\nURI: %s\n"
- "Message: ",CurrentURI.c_str());
-
- vsnprintf(S+Len,sizeof(S)-4-Len,Format,args);
- strcat(S,"\n\n");
-
- if (write(STDOUT_FILENO,S,strlen(S)) != (signed)strlen(S))
- exit(100);
+ PrintStatus("102 Status", Format, args);
+ va_end(args);
}
/*}}}*/
// AcqMethod::Redirect - Send a redirect message /*{{{*/
@@ -458,16 +425,13 @@ void pkgAcqMethod::Status(const char *Format,...)
to keep the pipeline synchronized. */
void pkgAcqMethod::Redirect(const string &NewURI)
{
- string CurrentURI = "<UNKNOWN>";
+ std::cout << "103 Redirect\nURI: ";
if (Queue != 0)
- CurrentURI = Queue->Uri;
-
- char S[1024];
- snprintf(S, sizeof(S)-50, "103 Redirect\nURI: %s\nNew-URI: %s\n\n",
- CurrentURI.c_str(), NewURI.c_str());
-
- if (write(STDOUT_FILENO,S,strlen(S)) != (ssize_t)strlen(S))
- exit(100);
+ std::cout << Queue->Uri << "\n";
+ else
+ std::cout << "<UNKNOWN>\n";
+ std::cout << "New-URI: " << NewURI << "\n"
+ << "\n" << std::flush;
// Change the URI for the request.
Queue->Uri = NewURI;
@@ -500,5 +464,6 @@ void pkgAcqMethod::FetchResult::TakeHashes(Hashes &Hash)
MD5Sum = Hash.MD5.Result();
SHA1Sum = Hash.SHA1.Result();
SHA256Sum = Hash.SHA256.Result();
+ SHA512Sum = Hash.SHA512.Result();
}
/*}}}*/
diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h
index 03851e823..2d0fa4590 100644
--- a/apt-pkg/acquire-method.h
+++ b/apt-pkg/acquire-method.h
@@ -23,6 +23,7 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/strutl.h>
+#include <stdarg.h>
class Hashes;
class pkgAcqMethod
@@ -45,6 +46,7 @@ class pkgAcqMethod
string MD5Sum;
string SHA1Sum;
string SHA256Sum;
+ string SHA512Sum;
vector<string> GPGVOutput;
time_t LastModified;
bool IMSHit;
@@ -78,6 +80,8 @@ class pkgAcqMethod
bool MediaFail(string Required,string Drive);
virtual void Exit() {};
+ void PrintStatus(char const * const header, const char* Format, va_list &args) const;
+
public:
enum CnfFlags {SingleInstance = (1<<0),
Pipeline = (1<<1), SendConfig = (1<<2),
diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc
index 4f0b52af9..75e03232a 100644
--- a/apt-pkg/acquire-worker.cc
+++ b/apt-pkg/acquire-worker.cc
@@ -199,6 +199,17 @@ bool pkgAcquire::Worker::RunMessages()
pkgAcquire::Queue::QItem *Itm = 0;
if (URI.empty() == false)
Itm = OwnerQ->FindItem(URI,this);
+
+ // update used mirror
+ string UsedMirror = LookupTag(Message,"UsedMirror", "");
+ if (!UsedMirror.empty() &&
+ Itm &&
+ Itm->Description.find(" ") != string::npos)
+ {
+ Itm->Description.replace(0, Itm->Description.find(" "), UsedMirror);
+ // FIXME: will we need this as well?
+ //Itm->ShortDesc = UsedMirror;
+ }
// Determine the message number and dispatch
switch (Number)
@@ -276,10 +287,10 @@ bool pkgAcquire::Worker::RunMessages()
Log->Pulse(Owner->GetOwner());
OwnerQ->ItemDone(Itm);
- if (TotalSize != 0 &&
- (unsigned)atoi(LookupTag(Message,"Size","0").c_str()) != TotalSize)
- _error->Warning("Bizarre Error - File size is not what the server reported %s %lu",
- LookupTag(Message,"Size","0").c_str(),TotalSize);
+ unsigned long const ServerSize = atol(LookupTag(Message,"Size","0").c_str());
+ if (TotalSize != 0 && ServerSize != TotalSize)
+ _error->Warning("Size of file %s is not what the server reported %s %lu",
+ Owner->DestFile.c_str(), LookupTag(Message,"Size","0").c_str(),TotalSize);
// see if there is a hash to verify
string RecivedHash;
@@ -298,8 +309,7 @@ bool pkgAcquire::Worker::RunMessages()
<< endl << endl;
}
}
- Owner->Done(Message,atoi(LookupTag(Message,"Size","0").c_str()),
- RecivedHash.c_str(), Config);
+ Owner->Done(Message, ServerSize, RecivedHash.c_str(), Config);
ItemDone();
// Log that we are done
diff --git a/apt-pkg/algorithms.cc b/apt-pkg/algorithms.cc
index 0fbce3c2a..47bdd4aba 100644
--- a/apt-pkg/algorithms.cc
+++ b/apt-pkg/algorithms.cc
@@ -20,12 +20,15 @@
#include <apt-pkg/version.h>
#include <apt-pkg/sptr.h>
#include <apt-pkg/acquire-item.h>
-
+#include <apt-pkg/edsp.h>
+
#include <apti18n.h>
#include <sys/types.h>
#include <cstdlib>
#include <algorithm>
#include <iostream>
+
+#include <stdio.h>
/*}}}*/
using namespace std;
@@ -90,21 +93,6 @@ bool pkgSimulate::Install(PkgIterator iPkg,string /*File*/)
Describe(Pkg,cout,true,true);
Sim.MarkInstall(Pkg,false);
- if (strcmp(Pkg.Arch(),"all") == 0)
- {
- pkgCache::GrpIterator G = Pkg.Group();
- pkgCache::GrpIterator iG = iPkg.Group();
- for (pkgCache::PkgIterator P = G.FindPkg("any"); P.end() != true; P = G.NextPkg(P))
- {
- if (strcmp(P.Arch(), "all") == 0)
- continue;
- if (iG.FindPkg(P.Arch())->CurrentVer == 0)
- continue;
- Flags[P->ID] = 1;
- Sim.MarkInstall(P, false);
- }
- }
-
// Look for broken conflicts+predepends.
for (PkgIterator I = Sim.PkgBegin(); I.end() == false; I++)
{
@@ -116,9 +104,7 @@ bool pkgSimulate::Install(PkgIterator iPkg,string /*File*/)
DepIterator Start;
DepIterator End;
D.GlobOr(Start,End);
- if (Start->Type == pkgCache::Dep::Conflicts ||
- Start->Type == pkgCache::Dep::DpkgBreaks ||
- Start->Type == pkgCache::Dep::Obsoletes ||
+ if (Start.IsNegative() == true ||
End->Type == pkgCache::Dep::PreDepends)
{
if ((Sim[End] & pkgDepCache::DepGInstall) == 0)
@@ -150,19 +136,6 @@ bool pkgSimulate::Configure(PkgIterator iPkg)
Flags[Pkg->ID] = 2;
- if (strcmp(Pkg.Arch(),"all") == 0)
- {
- pkgCache::GrpIterator G = Pkg.Group();
- for (pkgCache::PkgIterator P = G.FindPkg("any"); P.end() != true; P = G.NextPkg(P))
- {
- if (strcmp(P.Arch(), "all") == 0)
- continue;
- if (Flags[P->ID] == 1)
- Flags[P->ID] = 2;
- }
- }
-
-// Sim.MarkInstall(Pkg,false);
if (Sim[Pkg].InstBroken() == true)
{
cout << "Conf " << Pkg.FullName(false) << " broken" << endl;
@@ -214,21 +187,6 @@ bool pkgSimulate::Remove(PkgIterator iPkg,bool Purge)
Flags[Pkg->ID] = 3;
Sim.MarkDelete(Pkg);
- if (strcmp(Pkg.Arch(),"all") == 0)
- {
- pkgCache::GrpIterator G = Pkg.Group();
- pkgCache::GrpIterator iG = iPkg.Group();
- for (pkgCache::PkgIterator P = G.FindPkg("any"); P.end() != true; P = G.NextPkg(P))
- {
- if (strcmp(P.Arch(), "all") == 0)
- continue;
- if (iG.FindPkg(P.Arch())->CurrentVer == 0)
- continue;
- Flags[P->ID] = 3;
- Sim.MarkDelete(P);
- }
- }
-
if (Purge == true)
cout << "Purg ";
else
@@ -372,6 +330,12 @@ bool pkgFixBroken(pkgDepCache &Cache)
*/
bool pkgDistUpgrade(pkgDepCache &Cache)
{
+ std::string const solver = _config->Find("APT::Solver", "internal");
+ if (solver != "internal") {
+ OpTextProgress Prog(*_config);
+ return EDSP::ResolveExternal(solver.c_str(), Cache, false, true, false, &Prog);
+ }
+
pkgDepCache::ActionGroup group(Cache);
/* Upgrade all installed packages first without autoinst to help the resolver
@@ -424,6 +388,12 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
to install packages not marked for install */
bool pkgAllUpgrade(pkgDepCache &Cache)
{
+ std::string const solver = _config->Find("APT::Solver", "internal");
+ if (solver != "internal") {
+ OpTextProgress Prog(*_config);
+ return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, &Prog);
+ }
+
pkgDepCache::ActionGroup group(Cache);
pkgProblemResolver Fix(&Cache);
@@ -690,12 +660,10 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
// Compute a single dependency element (glob or)
pkgCache::DepIterator Start = D;
pkgCache::DepIterator End = D;
- unsigned char State = 0;
for (bool LastOR = true; D.end() == false && LastOR == true;)
{
- State |= Cache[D];
LastOR = (D->CompareOp & pkgCache::Dep::Or) == pkgCache::Dep::Or;
- D++;
+ ++D;
if (LastOR == true)
End = D;
}
@@ -740,9 +708,7 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
{
/* We let the algorithm deal with conflicts on its next iteration,
it is much smarter than us */
- if (Start->Type == pkgCache::Dep::Conflicts ||
- Start->Type == pkgCache::Dep::DpkgBreaks ||
- Start->Type == pkgCache::Dep::Obsoletes)
+ if (Start.IsNegative() == true)
break;
if (Debug == true)
@@ -774,7 +740,20 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
return true;
}
/*}}}*/
-// ProblemResolver::Resolve - Run the resolution pass /*{{{*/
+// ProblemResolver::Resolve - calls a resolver to fix the situation /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgProblemResolver::Resolve(bool BrokenFix)
+{
+ std::string const solver = _config->Find("APT::Solver", "internal");
+ if (solver != "internal") {
+ OpTextProgress Prog(*_config);
+ return EDSP::ResolveExternal(solver.c_str(), Cache, false, false, false, &Prog);
+ }
+ return ResolveInternal(BrokenFix);
+}
+ /*}}}*/
+// ProblemResolver::ResolveInternal - Run the resolution pass /*{{{*/
// ---------------------------------------------------------------------
/* This routines works by calculating a score for each package. The score
is derived by considering the package's priority and all reverse
@@ -788,12 +767,10 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
The BrokenFix flag enables a mode where the algorithm tries to
upgrade packages to advoid problems. */
-bool pkgProblemResolver::Resolve(bool BrokenFix)
+bool pkgProblemResolver::ResolveInternal(bool const BrokenFix)
{
pkgDepCache::ActionGroup group(Cache);
- unsigned long Size = Cache.Head().PackageCount;
-
// Record which packages are marked for install
bool Again = false;
do
@@ -823,7 +800,9 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
clog << "Starting" << endl;
MakeScores();
-
+
+ unsigned long const Size = Cache.Head().PackageCount;
+
/* We have to order the packages so that the broken fixing pass
operates from highest score to lowest. This prevents problems when
high score packages cause the removal of lower score packages that
@@ -971,9 +950,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
if a package has a dep on another package that cant be found */
SPtrArray<pkgCache::Version *> VList = Start.AllTargets();
if (*VList == 0 && (Flags[I->ID] & Protected) != Protected &&
- Start->Type != pkgCache::Dep::Conflicts &&
- Start->Type != pkgCache::Dep::DpkgBreaks &&
- Start->Type != pkgCache::Dep::Obsoletes &&
+ Start.IsNegative() == false &&
Cache[I].NowBroken() == false)
{
if (InOr == true)
@@ -998,10 +975,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
at is not the currently selected version of the
package, which means it is not necessary to
remove/keep */
- if (Cache[Pkg].InstallVer != Ver &&
- (Start->Type == pkgCache::Dep::Conflicts ||
- Start->Type == pkgCache::Dep::DpkgBreaks ||
- Start->Type == pkgCache::Dep::Obsoletes))
+ if (Cache[Pkg].InstallVer != Ver && Start.IsNegative() == true)
{
if (Debug)
clog << " Conflicts//Breaks against version "
@@ -1019,9 +993,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
fiddle with the VList package */
if (Scores[I->ID] <= Scores[Pkg->ID] ||
((Cache[Start] & pkgDepCache::DepNow) == 0 &&
- End->Type != pkgCache::Dep::Conflicts &&
- End->Type != pkgCache::Dep::DpkgBreaks &&
- End->Type != pkgCache::Dep::Obsoletes))
+ End.IsNegative() == false))
{
// Try a little harder to fix protected packages..
if ((Flags[I->ID] & Protected) == Protected)
@@ -1128,10 +1100,8 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
}
// Hm, nothing can possibly satisify this dep. Nuke it.
- if (VList[0] == 0 &&
- Start->Type != pkgCache::Dep::Conflicts &&
- Start->Type != pkgCache::Dep::DpkgBreaks &&
- Start->Type != pkgCache::Dep::Obsoletes &&
+ if (VList[0] == 0 &&
+ Start.IsNegative() == false &&
(Flags[I->ID] & Protected) != Protected)
{
bool Installed = Cache[I].Install();
@@ -1177,9 +1147,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
Change = true;
if ((Cache[J->Dep] & pkgDepCache::DepGNow) == 0)
{
- if (J->Dep->Type == pkgCache::Dep::Conflicts ||
- J->Dep->Type == pkgCache::Dep::DpkgBreaks ||
- J->Dep->Type == pkgCache::Dep::Obsoletes)
+ if (J->Dep.IsNegative() == true)
{
if (Debug == true)
clog << " Fixing " << I.FullName(false) << " via remove of " << J->Pkg.FullName(false) << endl;
@@ -1243,6 +1211,21 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
system was non-broken previously. */
bool pkgProblemResolver::ResolveByKeep()
{
+ std::string const solver = _config->Find("APT::Solver", "internal");
+ if (solver != "internal") {
+ OpTextProgress Prog(*_config);
+ return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, &Prog);
+ }
+ return ResolveByKeepInternal();
+}
+ /*}}}*/
+// ProblemResolver::ResolveByKeepInternal - Resolve problems using keep /*{{{*/
+// ---------------------------------------------------------------------
+/* This is the work horse of the soft upgrade routine. It is very gental
+ in that it does not install or remove any packages. It is assumed that the
+ system was non-broken previously. */
+bool pkgProblemResolver::ResolveByKeepInternal()
+{
pkgDepCache::ActionGroup group(Cache);
unsigned long Size = Cache.Head().PackageCount;
@@ -1493,9 +1476,9 @@ bool ListUpdate(pkgAcquireStatus &Stat,
}
if (TransientNetworkFailure == true)
- _error->Warning(_("Some index files failed to download, they have been ignored, or old ones used instead."));
+ _error->Warning(_("Some index files failed to download. They have been ignored, or old ones used instead."));
else if (Failed == true)
- return _error->Error(_("Some index files failed to download, they have been ignored, or old ones used instead."));
+ return _error->Error(_("Some index files failed to download. They have been ignored, or old ones used instead."));
// Run the success scripts if all was fine
diff --git a/apt-pkg/algorithms.h b/apt-pkg/algorithms.h
index 42945a6f2..050934bad 100644
--- a/apt-pkg/algorithms.h
+++ b/apt-pkg/algorithms.h
@@ -108,10 +108,13 @@ class pkgProblemResolver /*{{{*/
void MakeScores();
bool DoUpgrade(pkgCache::PkgIterator Pkg);
+
+ bool ResolveInternal(bool const BrokenFix = false);
+ bool ResolveByKeepInternal();
public:
- inline void Protect(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] |= Protected;};
+ inline void Protect(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] |= Protected; Cache.MarkProtected(Pkg);};
inline void Remove(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] |= ToRemove;};
inline void Clear(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] &= ~(Protected | ToRemove);};
diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc
index 52f54073c..ca602d4bf 100644
--- a/apt-pkg/aptconfiguration.cc
+++ b/apt-pkg/aptconfiguration.cc
@@ -17,6 +17,7 @@
#include <sys/types.h>
#include <dirent.h>
+#include <stdio.h>
#include <algorithm>
#include <string>
@@ -38,12 +39,11 @@ const Configuration::getCompressionTypes(bool const &Cached) {
// setup the defaults for the compressiontypes => method mapping
_config->CndSet("Acquire::CompressionTypes::bz2","bzip2");
+ _config->CndSet("Acquire::CompressionTypes::xz","xz");
_config->CndSet("Acquire::CompressionTypes::lzma","lzma");
_config->CndSet("Acquire::CompressionTypes::gz","gzip");
- // Set default application paths to check for optional compression types
- _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma");
- _config->CndSet("Dir::Bin::bzip2", "/bin/bzip2");
+ setDefaultConfigurationForCompressors();
// accept non-list order as override setting for config settings on commandline
std::string const overrideOrder = _config->Find("Acquire::CompressionTypes::Order","");
@@ -90,6 +90,14 @@ const Configuration::getCompressionTypes(bool const &Cached) {
types.push_back(Types->Tag);
}
+ // add the special "uncompressed" type
+ if (std::find(types.begin(), types.end(), "uncompressed") == types.end())
+ {
+ string const uncompr = _config->FindFile("Dir::Bin::uncompressed", "");
+ if (uncompr.empty() == true || FileExists(uncompr) == true)
+ types.push_back("uncompressed");
+ }
+
return types;
}
/*}}}*/
@@ -155,33 +163,6 @@ std::vector<std::string> const Configuration::getLanguages(bool const &All,
}
closedir(D);
- // get the environment language codes: LC_MESSAGES (and later LANGUAGE)
- // we extract both, a long and a short code and then we will
- // check if we actually need both (rare) or if the short is enough
- string const envMsg = string(Locale == 0 ? std::setlocale(LC_MESSAGES, NULL) : *Locale);
- size_t const lenShort = (envMsg.find('_') != string::npos) ? envMsg.find('_') : 2;
- size_t const lenLong = (envMsg.find_first_of(".@") != string::npos) ? envMsg.find_first_of(".@") : (lenShort + 3);
-
- string envLong = envMsg.substr(0,lenLong);
- string const envShort = envLong.substr(0,lenShort);
- bool envLongIncluded = true;
-
- // to save the servers from unneeded queries, we only try also long codes
- // for languages it is realistic to have a long code translation file…
- // TODO: Improve translation acquire system to drop them dynamic
- char const *needLong[] = { "cs", "en", "pt", "sv", "zh", NULL };
- if (envLong != envShort) {
- for (char const **l = needLong; *l != NULL; l++)
- if (envShort.compare(*l) == 0) {
- envLongIncluded = false;
- break;
- }
- }
-
- // we don't add the long code, but we allow the user to do so
- if (envLongIncluded == true)
- envLong.clear();
-
// FIXME: Remove support for the old APT::Acquire::Translation
// it was undocumented and so it should be not very widthly used
string const oldAcquire = _config->Find("APT::Acquire::Translation","");
@@ -203,12 +184,22 @@ std::vector<std::string> const Configuration::getLanguages(bool const &All,
return codes;
}
- // It is very likely we will need to environment codes later,
+ // get the environment language codes: LC_MESSAGES (and later LANGUAGE)
+ // we extract both, a long and a short code and then we will
+ // check if we actually need both (rare) or if the short is enough
+ string const envMsg = string(Locale == 0 ? std::setlocale(LC_MESSAGES, NULL) : *Locale);
+ size_t const lenShort = (envMsg.find('_') != string::npos) ? envMsg.find('_') : 2;
+ size_t const lenLong = (envMsg.find_first_of(".@") != string::npos) ? envMsg.find_first_of(".@") : (lenShort + 3);
+
+ string const envLong = envMsg.substr(0,lenLong);
+ string const envShort = envLong.substr(0,lenShort);
+
+ // It is very likely we will need the environment codes later,
// so let us generate them now from LC_MESSAGES and LANGUAGE
std::vector<string> environment;
if (envShort != "C") {
// take care of LC_MESSAGES
- if (envLongIncluded == false)
+ if (envLong != envShort)
environment.push_back(envLong);
environment.push_back(envShort);
// take care of LANGUAGE
@@ -225,16 +216,6 @@ std::vector<std::string> const Configuration::getLanguages(bool const &All,
continue;
if (std::find(environment.begin(), environment.end(), *e) != environment.end())
continue;
- if (e->find('_') != string::npos) {
- // Drop LongCodes here - ShortCodes are also included
- string const shorty = e->substr(0, e->find('_'));
- char const **n = needLong;
- for (; *n != NULL; ++n)
- if (shorty == *n)
- break;
- if (*n == NULL)
- continue;
- }
++addedLangs;
environment.push_back(*e);
}
@@ -334,11 +315,41 @@ std::vector<std::string> const Configuration::getArchitectures(bool const &Cache
if (likely(Cached == true) && archs.empty() == false)
return archs;
- archs = _config->FindVector("APT::Architectures");
string const arch = _config->Find("APT::Architecture");
+ archs = _config->FindVector("APT::Architectures");
+
if (unlikely(arch.empty() == true))
return archs;
+ // FIXME: It is a bit unclean to have debian specific code here…
+ if (archs.empty() == true) {
+ archs.push_back(arch);
+ string dpkgcall = _config->Find("Dir::Bin::dpkg", "dpkg");
+ std::vector<string> const dpkgoptions = _config->FindVector("DPkg::options");
+ for (std::vector<string>::const_iterator o = dpkgoptions.begin();
+ o != dpkgoptions.end(); ++o)
+ dpkgcall.append(" ").append(*o);
+ dpkgcall.append(" --print-foreign-architectures 2> /dev/null");
+ FILE *dpkg = popen(dpkgcall.c_str(), "r");
+ char buf[1024];
+ if(dpkg != NULL) {
+ if (fgets(buf, sizeof(buf), dpkg) != NULL) {
+ char* arch = strtok(buf, " ");
+ while (arch != NULL) {
+ for (; isspace(*arch) != 0; ++arch);
+ if (arch[0] != '\0') {
+ char const* archend = arch;
+ for (; isspace(*archend) == 0 && *archend != '\0'; ++archend);
+ archs.push_back(string(arch, (archend - arch)));
+ }
+ arch = strtok(NULL, " ");
+ }
+ }
+ pclose(dpkg);
+ }
+ return archs;
+ }
+
if (archs.empty() == true ||
std::find(archs.begin(), archs.end(), arch) == archs.end())
archs.push_back(arch);
@@ -363,4 +374,86 @@ bool const Configuration::checkArchitecture(std::string const &Arch) {
return (std::find(archs.begin(), archs.end(), Arch) != archs.end());
}
/*}}}*/
+// setDefaultConfigurationForCompressors /*{{{*/
+void Configuration::setDefaultConfigurationForCompressors() {
+ // Set default application paths to check for optional compression types
+ _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma");
+ _config->CndSet("Dir::Bin::xz", "/usr/bin/xz");
+ _config->CndSet("Dir::Bin::bzip2", "/bin/bzip2");
+}
+ /*}}}*/
+// getCompressors - Return Vector of usbale compressors /*{{{*/
+// ---------------------------------------------------------------------
+/* return a vector of compressors used by apt-ftparchive in the
+ multicompress functionality or to detect data.tar files */
+std::vector<APT::Configuration::Compressor>
+const Configuration::getCompressors(bool const Cached) {
+ static std::vector<APT::Configuration::Compressor> compressors;
+ if (compressors.empty() == false) {
+ if (Cached == true)
+ return compressors;
+ else
+ compressors.clear();
+ }
+
+ setDefaultConfigurationForCompressors();
+
+ compressors.push_back(Compressor(".", "", "", "", "", 1));
+ if (_config->Exists("Dir::Bin::gzip") == false || FileExists(_config->FindFile("Dir::Bin::gzip")) == true)
+ compressors.push_back(Compressor("gzip",".gz","gzip","-9n","-d",2));
+ if (_config->Exists("Dir::Bin::bzip2") == false || FileExists(_config->FindFile("Dir::Bin::bzip2")) == true)
+ compressors.push_back(Compressor("bzip2",".bz2","bzip2","-9","-d",3));
+ if (_config->Exists("Dir::Bin::lzma") == false || FileExists(_config->FindFile("Dir::Bin::lzma")) == true)
+ compressors.push_back(Compressor("lzma",".lzma","lzma","-9","-d",4));
+ if (_config->Exists("Dir::Bin::xz") == false || FileExists(_config->FindFile("Dir::Bin::xz")) == true)
+ compressors.push_back(Compressor("xz",".xz","xz","-6","-d",5));
+
+ std::vector<std::string> const comp = _config->FindVector("APT::Compressor");
+ for (std::vector<std::string>::const_iterator c = comp.begin();
+ c != comp.end(); ++c) {
+ if (*c == "." || *c == "gzip" || *c == "bzip2" || *c == "lzma" || *c == "xz")
+ continue;
+ compressors.push_back(Compressor(c->c_str(), std::string(".").append(*c).c_str(), c->c_str(), "-9", "-d", 100));
+ }
+
+ return compressors;
+}
+ /*}}}*/
+// getCompressorExtensions - supported data.tar extensions /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+std::vector<std::string> const Configuration::getCompressorExtensions() {
+ std::vector<APT::Configuration::Compressor> const compressors = getCompressors();
+ std::vector<std::string> ext;
+ for (std::vector<APT::Configuration::Compressor>::const_iterator c = compressors.begin();
+ c != compressors.end(); ++c)
+ if (c->Extension.empty() == false && c->Extension != ".")
+ ext.push_back(c->Extension);
+ return ext;
+}
+ /*}}}*/
+// Compressor constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+Configuration::Compressor::Compressor(char const *name, char const *extension,
+ char const *binary,
+ char const *compressArg, char const *uncompressArg,
+ unsigned short const cost) {
+ std::string const config = string("APT:Compressor::").append(name).append("::");
+ Name = _config->Find(std::string(config).append("Name"), name);
+ Extension = _config->Find(std::string(config).append("Extension"), extension);
+ Binary = _config->Find(std::string(config).append("Binary"), binary);
+ Cost = _config->FindI(std::string(config).append("Cost"), cost);
+ std::string const compConf = std::string(config).append("CompressArg");
+ if (_config->Exists(compConf) == true)
+ CompressArgs = _config->FindVector(compConf);
+ else if (compressArg != NULL)
+ CompressArgs.push_back(compressArg);
+ std::string const uncompConf = std::string(config).append("UncompressArg");
+ if (_config->Exists(uncompConf) == true)
+ UncompressArgs = _config->FindVector(uncompConf);
+ else if (uncompressArg != NULL)
+ UncompressArgs.push_back(uncompressArg);
+}
+ /*}}}*/
}
diff --git a/apt-pkg/aptconfiguration.h b/apt-pkg/aptconfiguration.h
index dd339d841..815db6cae 100644
--- a/apt-pkg/aptconfiguration.h
+++ b/apt-pkg/aptconfiguration.h
@@ -82,6 +82,35 @@ public: /*{{{*/
*/
bool static const checkArchitecture(std::string const &Arch);
+ /** \brief Representation of supported compressors */
+ struct Compressor {
+ std::string Name;
+ std::string Extension;
+ std::string Binary;
+ std::vector<std::string> CompressArgs;
+ std::vector<std::string> UncompressArgs;
+ unsigned short Cost;
+
+ Compressor(char const *name, char const *extension, char const *binary,
+ char const *compressArg, char const *uncompressArg,
+ unsigned short const cost);
+ Compressor() {};
+ };
+
+ /** \brief Return a vector of Compressors supported for data.tar's
+ *
+ * \param Cached saves the result so we need to calculated it only once
+ * this parameter should ony be used for testing purposes.
+ *
+ * \return a vector of Compressors
+ */
+ std::vector<Compressor> static const getCompressors(bool const Cached = true);
+
+ /** \brief Return a vector of extensions supported for data.tar's */
+ std::vector<std::string> static const getCompressorExtensions();
+ /*}}}*/
+ private: /*{{{*/
+ void static setDefaultConfigurationForCompressors();
/*}}}*/
};
/*}}}*/
diff --git a/apt-pkg/cacheiterators.h b/apt-pkg/cacheiterators.h
index 26070636e..b97a1a589 100644
--- a/apt-pkg/cacheiterators.h
+++ b/apt-pkg/cacheiterators.h
@@ -96,7 +96,7 @@ class pkgCache::GrpIterator: public Iterator<Group, GrpIterator> {
protected:
inline Group* OwnerPointer() const {
- return Owner->GrpP;
+ return (Owner != 0) ? Owner->GrpP : 0;
};
public:
@@ -137,7 +137,7 @@ class pkgCache::PkgIterator: public Iterator<Package, PkgIterator> {
protected:
inline Package* OwnerPointer() const {
- return Owner->PkgP;
+ return (Owner != 0) ? Owner->PkgP : 0;
};
public:
@@ -184,7 +184,7 @@ class pkgCache::PkgIterator: public Iterator<Package, PkgIterator> {
class pkgCache::VerIterator : public Iterator<Version, VerIterator> {
protected:
inline Version* OwnerPointer() const {
- return Owner->VerP;
+ return (Owner != 0) ? Owner->VerP : 0;
};
public:
@@ -206,15 +206,10 @@ class pkgCache::VerIterator : public Iterator<Version, VerIterator> {
inline const char *VerStr() const {return S->VerStr == 0?0:Owner->StrP + S->VerStr;};
inline const char *Section() const {return S->Section == 0?0:Owner->StrP + S->Section;};
inline const char *Arch() const {
- if(S->MultiArch == pkgCache::Version::All)
+ if (S->MultiArch == pkgCache::Version::All)
return "all";
return S->ParentPkg == 0?0:Owner->StrP + ParentPkg()->Arch;
};
- inline const char *Arch(bool const pseudo) const {
- if(pseudo == false)
- return Arch();
- return S->ParentPkg == 0?0:Owner->StrP + ParentPkg()->Arch;
- };
inline PkgIterator ParentPkg() const {return PkgIterator(*Owner,Owner->PkgP + S->ParentPkg);};
inline DescIterator DescriptionList() const;
@@ -227,7 +222,6 @@ class pkgCache::VerIterator : public Iterator<Version, VerIterator> {
string RelStr() const;
bool Automatic() const;
- bool Pseudo() const;
VerFileIterator NewestFile() const;
inline VerIterator(pkgCache &Owner,Version *Trg = 0) : Iterator<Version, VerIterator>(Owner, Trg) {
@@ -241,7 +235,7 @@ class pkgCache::VerIterator : public Iterator<Version, VerIterator> {
class pkgCache::DescIterator : public Iterator<Description, DescIterator> {
protected:
inline Description* OwnerPointer() const {
- return Owner->DescP;
+ return (Owner != 0) ? Owner->DescP : 0;
};
public:
@@ -270,7 +264,7 @@ class pkgCache::DepIterator : public Iterator<Dependency, DepIterator> {
protected:
inline Dependency* OwnerPointer() const {
- return Owner->DepP;
+ return (Owner != 0) ? Owner->DepP : 0;
};
public:
@@ -287,6 +281,7 @@ class pkgCache::DepIterator : public Iterator<Dependency, DepIterator> {
inline PkgIterator ParentPkg() const {return PkgIterator(*Owner,Owner->PkgP + Owner->VerP[S->ParentVer].ParentPkg);};
inline bool Reverse() const {return Type == DepRev;};
bool IsCritical() const;
+ bool IsNegative() const;
void GlobOr(DepIterator &Start,DepIterator &End);
Version **AllTargets() const;
bool SmartTargetPkg(PkgIterator &Result) const;
@@ -315,7 +310,7 @@ class pkgCache::PrvIterator : public Iterator<Provides, PrvIterator> {
protected:
inline Provides* OwnerPointer() const {
- return Owner->ProvideP;
+ return (Owner != 0) ? Owner->ProvideP : 0;
};
public:
@@ -349,7 +344,7 @@ class pkgCache::PrvIterator : public Iterator<Provides, PrvIterator> {
class pkgCache::PkgFileIterator : public Iterator<PackageFile, PkgFileIterator> {
protected:
inline PackageFile* OwnerPointer() const {
- return Owner->PkgFileP;
+ return (Owner != 0) ? Owner->PkgFileP : 0;
};
public:
@@ -382,7 +377,7 @@ class pkgCache::PkgFileIterator : public Iterator<PackageFile, PkgFileIterator>
class pkgCache::VerFileIterator : public pkgCache::Iterator<VerFile, VerFileIterator> {
protected:
inline VerFile* OwnerPointer() const {
- return Owner->VerFileP;
+ return (Owner != 0) ? Owner->VerFileP : 0;
};
public:
@@ -401,7 +396,7 @@ class pkgCache::VerFileIterator : public pkgCache::Iterator<VerFile, VerFileIter
class pkgCache::DescFileIterator : public Iterator<DescFile, DescFileIterator> {
protected:
inline DescFile* OwnerPointer() const {
- return Owner->DescFileP;
+ return (Owner != 0) ? Owner->DescFileP : 0;
};
public:
diff --git a/apt-pkg/cacheset.cc b/apt-pkg/cacheset.cc
index bbfdfd4f2..a1de613e2 100644
--- a/apt-pkg/cacheset.cc
+++ b/apt-pkg/cacheset.cc
@@ -464,7 +464,7 @@ PackageSet CacheSetHelper::canNotFindPackage(pkgCacheFile &Cache, std::string co
VersionSet CacheSetHelper::canNotFindAllVer(pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg) {
if (ShowError == true)
- _error->Insert(ErrorType, _("Can't select versions from package '%s' as it purely virtual"), Pkg.FullName(true).c_str());
+ _error->Insert(ErrorType, _("Can't select versions from package '%s' as it is purely virtual"), Pkg.FullName(true).c_str());
return VersionSet();
}
/*}}}*/
diff --git a/apt-pkg/cacheset.h b/apt-pkg/cacheset.h
index 3f4f0066b..061d0a2f4 100644
--- a/apt-pkg/cacheset.h
+++ b/apt-pkg/cacheset.h
@@ -135,7 +135,7 @@ public: /*{{{*/
static APT::PackageSet FromTask(pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
static APT::PackageSet FromTask(pkgCacheFile &Cache, std::string const &pattern) {
CacheSetHelper helper;
- return APT::PackageSet::FromTask(Cache, pattern, helper);
+ return FromTask(Cache, pattern, helper);
}
/** \brief returns all packages in the cache whose name matchs a given pattern
@@ -149,7 +149,7 @@ public: /*{{{*/
static APT::PackageSet FromRegEx(pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
static APT::PackageSet FromRegEx(pkgCacheFile &Cache, std::string const &pattern) {
CacheSetHelper helper;
- return APT::PackageSet::FromRegEx(Cache, pattern, helper);
+ return FromRegEx(Cache, pattern, helper);
}
/** \brief returns all packages specified by a string
@@ -160,7 +160,7 @@ public: /*{{{*/
static APT::PackageSet FromString(pkgCacheFile &Cache, std::string const &string, CacheSetHelper &helper);
static APT::PackageSet FromString(pkgCacheFile &Cache, std::string const &string) {
CacheSetHelper helper;
- return APT::PackageSet::FromString(Cache, string, helper);
+ return FromString(Cache, string, helper);
}
/** \brief returns a package specified by a string
@@ -171,7 +171,7 @@ public: /*{{{*/
static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &string, CacheSetHelper &helper);
static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &string) {
CacheSetHelper helper;
- return APT::PackageSet::FromName(Cache, string, helper);
+ return FromName(Cache, string, helper);
}
/** \brief returns all packages specified on the commandline
@@ -184,7 +184,7 @@ public: /*{{{*/
static APT::PackageSet FromCommandLine(pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper);
static APT::PackageSet FromCommandLine(pkgCacheFile &Cache, const char **cmdline) {
CacheSetHelper helper;
- return APT::PackageSet::FromCommandLine(Cache, cmdline, helper);
+ return FromCommandLine(Cache, cmdline, helper);
}
struct Modifier {
@@ -215,7 +215,7 @@ public: /*{{{*/
std::list<PackageSet::Modifier> const &mods,
unsigned short const &fallback) {
CacheSetHelper helper;
- return APT::PackageSet::GroupedFromCommandLine(Cache, cmdline,
+ return GroupedFromCommandLine(Cache, cmdline,
mods, fallback, helper);
}
@@ -257,7 +257,6 @@ public: /*{{{*/
inline const char *VerStr() const { return (**this).VerStr(); };
inline const char *Section() const { return (**this).Section(); };
inline const char *Arch() const { return (**this).Arch(); };
- inline const char *Arch(bool const pseudo) const { return (**this).Arch(pseudo); };
inline pkgCache::PkgIterator ParentPkg() const { return (**this).ParentPkg(); };
inline pkgCache::DescIterator DescriptionList() const { return (**this).DescriptionList(); };
inline pkgCache::DescIterator TranslatedDescription() const { return (**this).TranslatedDescription(); };
@@ -268,7 +267,6 @@ public: /*{{{*/
inline const char *PriorityType() const { return (**this).PriorityType(); };
inline string RelStr() const { return (**this).RelStr(); };
inline bool Automatic() const { return (**this).Automatic(); };
- inline bool Pseudo() const { return (**this).Pseudo(); };
inline pkgCache::VerFileIterator NewestFile() const { return (**this).NewestFile(); };
};
/*}}}*/
@@ -309,10 +307,10 @@ public: /*{{{*/
static APT::VersionSet FromCommandLine(pkgCacheFile &Cache, const char **cmdline,
APT::VersionSet::Version const &fallback) {
CacheSetHelper helper;
- return APT::VersionSet::FromCommandLine(Cache, cmdline, fallback, helper);
+ return FromCommandLine(Cache, cmdline, fallback, helper);
}
static APT::VersionSet FromCommandLine(pkgCacheFile &Cache, const char **cmdline) {
- return APT::VersionSet::FromCommandLine(Cache, cmdline, CANDINST);
+ return FromCommandLine(Cache, cmdline, CANDINST);
}
static APT::VersionSet FromString(pkgCacheFile &Cache, std::string pkg,
@@ -321,10 +319,10 @@ public: /*{{{*/
static APT::VersionSet FromString(pkgCacheFile &Cache, std::string pkg,
APT::VersionSet::Version const &fallback) {
CacheSetHelper helper;
- return APT::VersionSet::FromString(Cache, pkg, fallback, helper);
+ return FromString(Cache, pkg, fallback, helper);
}
static APT::VersionSet FromString(pkgCacheFile &Cache, std::string pkg) {
- return APT::VersionSet::FromString(Cache, pkg, CANDINST);
+ return FromString(Cache, pkg, CANDINST);
}
/** \brief returns all versions specified for the package
@@ -338,10 +336,10 @@ public: /*{{{*/
static APT::VersionSet FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P,
APT::VersionSet::Version const &fallback) {
CacheSetHelper helper;
- return APT::VersionSet::FromPackage(Cache, P, fallback, helper);
+ return FromPackage(Cache, P, fallback, helper);
}
static APT::VersionSet FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P) {
- return APT::VersionSet::FromPackage(Cache, P, CANDINST);
+ return FromPackage(Cache, P, CANDINST);
}
struct Modifier {
@@ -364,7 +362,7 @@ public: /*{{{*/
std::list<VersionSet::Modifier> const &mods,
unsigned short const &fallback) {
CacheSetHelper helper;
- return APT::VersionSet::GroupedFromCommandLine(Cache, cmdline,
+ return GroupedFromCommandLine(Cache, cmdline,
mods, fallback, helper);
}
/*}}}*/
diff --git a/apt-pkg/cdrom.cc b/apt-pkg/cdrom.cc
index 0e36f44a2..2a914c665 100644
--- a/apt-pkg/cdrom.cc
+++ b/apt-pkg/cdrom.cc
@@ -155,7 +155,11 @@ bool pkgCdrom::FindPackages(string CD,
break;
if (chdir(CD.c_str()) != 0)
- return _error->Errno("chdir","Unable to change to %s",CD.c_str());
+ {
+ _error->Errno("chdir","Unable to change to %s", CD.c_str());
+ closedir(D);
+ return false;
+ }
};
closedir(D);
@@ -198,7 +202,7 @@ int pkgCdrom::Score(string Path)
// a symlink gets a big penalty
struct stat Buf;
string statPath = flNotFile(Path);
- string cdromPath = _config->FindDir("Acquire::cdrom::mount","/cdrom/");
+ string cdromPath = _config->FindDir("Acquire::cdrom::mount");
while(statPath != cdromPath && statPath != "./") {
statPath.resize(statPath.size()-1); // remove the trailing '/'
if (lstat(statPath.c_str(),&Buf) == 0) {
@@ -257,8 +261,10 @@ bool pkgCdrom::DropRepeats(vector<string> &List,const char *Name)
Inodes[I] = Buf.st_ino;
}
- if (_error->PendingError() == true)
+ if (_error->PendingError() == true) {
+ delete[] Inodes;
return false;
+ }
// Look for dups
for (unsigned int I = 0; I != List.size(); I++)
@@ -509,11 +515,12 @@ bool pkgCdrom::Ident(string &ident, pkgCdromStatus *log) /*{{{*/
stringstream msg;
// Startup
- string CDROM = _config->FindDir("Acquire::cdrom::mount","/cdrom/");
+ string CDROM = _config->FindDir("Acquire::cdrom::mount");
if (CDROM[0] == '.')
CDROM= SafeGetCWD() + '/' + CDROM;
- if(log) {
+ if (log != NULL)
+ {
msg.str("");
ioprintf(msg, _("Using CD-ROM mount point %s\nMounting CD-ROM\n"),
CDROM.c_str());
@@ -523,7 +530,7 @@ bool pkgCdrom::Ident(string &ident, pkgCdromStatus *log) /*{{{*/
return _error->Error("Failed to mount the cdrom.");
// Hash the CD to get an ID
- if(log)
+ if (log != NULL)
log->Update(_("Identifying.. "));
@@ -533,10 +540,12 @@ bool pkgCdrom::Ident(string &ident, pkgCdromStatus *log) /*{{{*/
return false;
}
- msg.str("");
- ioprintf(msg, "[%s]\n",ident.c_str());
- log->Update(msg.str());
-
+ if (log != NULL)
+ {
+ msg.str("");
+ ioprintf(msg, "[%s]\n",ident.c_str());
+ log->Update(msg.str());
+ }
// Read the database
Configuration Database;
@@ -547,7 +556,8 @@ bool pkgCdrom::Ident(string &ident, pkgCdromStatus *log) /*{{{*/
return _error->Error("Unable to read the cdrom database %s",
DFile.c_str());
}
- if(log) {
+ if (log != NULL)
+ {
msg.str("");
ioprintf(msg, _("Stored label: %s\n"),
Database.Find("CD::"+ident).c_str());
@@ -555,8 +565,10 @@ bool pkgCdrom::Ident(string &ident, pkgCdromStatus *log) /*{{{*/
}
// Unmount and finish
- if (_config->FindB("APT::CDROM::NoMount",false) == false) {
- log->Update(_("Unmounting CD-ROM...\n"), STEP_LAST);
+ if (_config->FindB("APT::CDROM::NoMount",false) == false)
+ {
+ if (log != NULL)
+ log->Update(_("Unmounting CD-ROM...\n"), STEP_LAST);
UnmountCdrom(CDROM);
}
@@ -568,11 +580,12 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
stringstream msg;
// Startup
- string CDROM = _config->FindDir("Acquire::cdrom::mount","/cdrom/");
+ string CDROM = _config->FindDir("Acquire::cdrom::mount");
if (CDROM[0] == '.')
CDROM= SafeGetCWD() + '/' + CDROM;
- if(log) {
+ if(log != NULL)
+ {
log->SetTotal(STEP_LAST);
msg.str("");
ioprintf(msg, _("Using CD-ROM mount point %s\n"), CDROM.c_str());
@@ -592,11 +605,12 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
// Unmount the CD and get the user to put in the one they want
if (_config->FindB("APT::CDROM::NoMount",false) == false)
{
- if(log)
+ if(log != NULL)
log->Update(_("Unmounting CD-ROM\n"), STEP_UNMOUNT);
UnmountCdrom(CDROM);
- if(log) {
+ if(log != NULL)
+ {
log->Update(_("Waiting for disc...\n"), STEP_WAIT);
if(!log->ChangeCdrom()) {
// user aborted
@@ -605,26 +619,29 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
}
// Mount the new CDROM
- log->Update(_("Mounting CD-ROM...\n"), STEP_MOUNT);
+ if(log != NULL)
+ log->Update(_("Mounting CD-ROM...\n"), STEP_MOUNT);
+
if (MountCdrom(CDROM) == false)
return _error->Error("Failed to mount the cdrom.");
}
// Hash the CD to get an ID
- if(log)
+ if(log != NULL)
log->Update(_("Identifying.. "), STEP_IDENT);
string ID;
if (IdentCdrom(CDROM,ID) == false)
{
- log->Update("\n");
+ if (log != NULL)
+ log->Update("\n");
return false;
}
- if(log)
+ if(log != NULL)
+ {
log->Update("["+ID+"]\n");
-
- if(log)
log->Update(_("Scanning disc for index files..\n"),STEP_SCAN);
-
+ }
+
// Get the CD structure
vector<string> List;
vector<string> SourceList;
@@ -634,7 +651,8 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
string InfoDir;
if (FindPackages(CDROM,List,SourceList, SigList,TransList,InfoDir,log) == false)
{
- log->Update("\n");
+ if (log != NULL)
+ log->Update("\n");
return false;
}
@@ -661,7 +679,7 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
DropRepeats(SourceList,"Sources");
DropRepeats(SigList,"Release.gpg");
DropRepeats(TransList,"");
- if(log) {
+ if(log != NULL) {
msg.str("");
ioprintf(msg, _("Found %zu package indexes, %zu source indexes, "
"%zu translation indexes and %zu signatures\n"),
@@ -698,7 +716,8 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
if (*J == '"' || *J == ']' || *J == '[')
*J = '_';
- if(log) {
+ if(log != NULL)
+ {
msg.str("");
ioprintf(msg, _("Found label '%s'\n"), Name.c_str());
log->Update(msg.str());
@@ -710,7 +729,7 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
if (_config->FindB("APT::CDROM::Rename",false) == true ||
Name.empty() == true)
{
- if(!log)
+ if(log == NULL)
{
if (_config->FindB("APT::CDROM::NoMount",false) == false)
UnmountCdrom(CDROM);
@@ -743,13 +762,13 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
*J = '_';
Database.Set("CD::" + ID,Name);
- if(log) {
+ if(log != NULL)
+ {
msg.str("");
ioprintf(msg, _("This disc is called: \n'%s'\n"), Name.c_str());
log->Update(msg.str());
+ log->Update(_("Copying package lists..."), STEP_COPY);
}
-
- log->Update(_("Copying package lists..."), STEP_COPY);
// take care of the signatures and copy them if they are ok
// (we do this before PackageCopy as it modifies "List" and "SourceList")
SigVerify SignVerify;
@@ -774,16 +793,15 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
if (WriteDatabase(Database) == false)
return false;
- if(log) {
+ if(log != NULL)
log->Update(_("Writing new source list\n"), STEP_WRITE);
- }
if (WriteSourceList(Name,List,false) == false ||
WriteSourceList(Name,SourceList,true) == false)
return false;
}
// Print the sourcelist entries
- if(log)
+ if(log != NULL)
log->Update(_("Source list entries for this disc are:\n"));
for (vector<string>::iterator I = List.begin(); I != List.end(); I++)
@@ -796,7 +814,8 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
return _error->Error("Internal error");
}
- if(log) {
+ if(log != NULL)
+ {
msg.str("");
msg << "deb cdrom:[" << Name << "]/" << string(*I,0,Space) <<
" " << string(*I,Space+1) << endl;
@@ -814,7 +833,7 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
return _error->Error("Internal error");
}
- if(log) {
+ if(log != NULL) {
msg.str("");
msg << "deb-src cdrom:[" << Name << "]/" << string(*I,0,Space) <<
" " << string(*I,Space+1) << endl;
@@ -824,7 +843,8 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
// Unmount and finish
if (_config->FindB("APT::CDROM::NoMount",false) == false) {
- log->Update(_("Unmounting CD-ROM...\n"), STEP_LAST);
+ if (log != NULL)
+ log->Update(_("Unmounting CD-ROM...\n"), STEP_LAST);
UnmountCdrom(CDROM);
}
@@ -854,6 +874,9 @@ pkgUdevCdromDevices::Dlopen() /*{{{*/
libudev_handle = h;
udev_new = (udev* (*)(void)) dlsym(h, "udev_new");
udev_enumerate_add_match_property = (int (*)(udev_enumerate*, const char*, const char*))dlsym(h, "udev_enumerate_add_match_property");
+#if 0 // FIXME: uncomment on next ABI break
+ udev_enumerate_add_match_sysattr = (int (*)(udev_enumerate*, const char*, const char*))dlsym(h, "udev_enumerate_add_match_sysattr");
+#endif
udev_enumerate_scan_devices = (int (*)(udev_enumerate*))dlsym(h, "udev_enumerate_scan_devices");
udev_enumerate_get_list_entry = (udev_list_entry* (*)(udev_enumerate*))dlsym(h, "udev_enumerate_get_list_entry");
udev_device_new_from_syspath = (udev_device* (*)(udev*, const char*))dlsym(h, "udev_device_new_from_syspath");
@@ -867,8 +890,20 @@ pkgUdevCdromDevices::Dlopen() /*{{{*/
return true;
}
/*}}}*/
+
+ /*{{{*/
+// compatiblity only with the old API/ABI, can be removed on the next
+// ABI break
+vector<CdromDevice>
+pkgUdevCdromDevices::Scan()
+{
+ bool CdromOnly = _config->FindB("APT::cdrom::CdromOnly", true);
+ return ScanForRemovable(CdromOnly);
+};
+ /*}}}*/
+ /*{{{*/
vector<CdromDevice>
-pkgUdevCdromDevices::Scan() /*{{{*/
+pkgUdevCdromDevices::ScanForRemovable(bool CdromOnly)
{
vector<CdromDevice> cdrom_devices;
struct udev_enumerate *enumerate;
@@ -880,7 +915,15 @@ pkgUdevCdromDevices::Scan() /*{{{*/
udev_ctx = udev_new();
enumerate = udev_enumerate_new (udev_ctx);
- udev_enumerate_add_match_property(enumerate, "ID_CDROM", "1");
+ if (CdromOnly)
+ udev_enumerate_add_match_property(enumerate, "ID_CDROM", "1");
+ else {
+#if 1 // FIXME: remove the next two lines on the next ABI break
+ int (*udev_enumerate_add_match_sysattr)(struct udev_enumerate *udev_enumerate, const char *property, const char *value);
+ udev_enumerate_add_match_sysattr = (int (*)(udev_enumerate*, const char*, const char*))dlsym(libudev_handle, "udev_enumerate_add_match_sysattr");
+#endif
+ udev_enumerate_add_match_sysattr(enumerate, "removable", "1");
+ }
udev_enumerate_scan_devices (enumerate);
devices = udev_enumerate_get_list_entry (enumerate);
@@ -892,11 +935,18 @@ pkgUdevCdromDevices::Scan() /*{{{*/
if (udevice == NULL)
continue;
const char* devnode = udev_device_get_devnode(udevice);
- const char* mountpath = udev_device_get_property_value(udevice, "FSTAB_DIR");
+
+ // try fstab_dir first
+ string mountpath;
+ const char* mp = udev_device_get_property_value(udevice, "FSTAB_DIR");
+ if (mp)
+ mountpath = string(mp);
+ else
+ mountpath = FindMountPointForDevice(devnode);
// fill in the struct
cdrom.DeviceName = string(devnode);
- if (mountpath) {
+ if (mountpath != "") {
cdrom.MountPath = mountpath;
string s = string(mountpath);
cdrom.Mounted = IsMounted(s);
diff --git a/apt-pkg/cdrom.h b/apt-pkg/cdrom.h
index 14ca1d810..e83c38582 100644
--- a/apt-pkg/cdrom.h
+++ b/apt-pkg/cdrom.h
@@ -68,7 +68,7 @@ class pkgCdrom /*{{{*/
/*}}}*/
-// class that uses libudev to find cdrom devices dynamically
+// class that uses libudev to find cdrom/removable devices dynamically
struct CdromDevice /*{{{*/
{
string DeviceName;
@@ -92,6 +92,9 @@ class pkgUdevCdromDevices /*{{{*/
struct udev_enumerate *(*udev_enumerate_new) (struct udev *udev);
struct udev_list_entry *(*udev_list_entry_get_next)(struct udev_list_entry *list_entry);
const char* (*udev_device_get_property_value)(struct udev_device *udev_device, const char *key);
+#if 0 // FIXME: uncomment on next ABI break
+ int (*udev_enumerate_add_match_sysattr)(struct udev_enumerate *udev_enumerate, const char *property, const char *value);
+#endif
// end libudev dlopen
public:
@@ -100,7 +103,12 @@ class pkgUdevCdromDevices /*{{{*/
// try to open
bool Dlopen();
+
+ // this is the new interface
+ vector<CdromDevice> ScanForRemovable(bool CdromOnly);
+ // FIXME: compat with the old interface/API/ABI only
vector<CdromDevice> Scan();
+
};
/*}}}*/
diff --git a/apt-pkg/contrib/cdromutl.cc b/apt-pkg/contrib/cdromutl.cc
index 68b980407..821e6d688 100644
--- a/apt-pkg/contrib/cdromutl.cc
+++ b/apt-pkg/contrib/cdromutl.cc
@@ -15,11 +15,11 @@
#include <apt-pkg/md5.h>
#include <apt-pkg/fileutl.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/strutl.h>
#include <apti18n.h>
#include <sys/wait.h>
-#include <sys/errno.h>
#include <sys/statvfs.h>
#include <dirent.h>
#include <fcntl.h>
@@ -205,8 +205,11 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version)
Hash.Add(Dir->d_name);
};
- if (chdir(StartDir.c_str()) != 0)
- return _error->Errno("chdir",_("Unable to change to %s"),StartDir.c_str());
+ if (chdir(StartDir.c_str()) != 0) {
+ _error->Errno("chdir",_("Unable to change to %s"),StartDir.c_str());
+ closedir(D);
+ return false;
+ }
closedir(D);
// Some stats from the fsys
@@ -234,3 +237,35 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version)
return true;
}
/*}}}*/
+
+// FindMountPointForDevice - Find mountpoint for the given device /*{{{*/
+string FindMountPointForDevice(const char *devnode)
+{
+ char buf[255];
+ char *out[10];
+ int i=0;
+
+ // this is the order that mount uses as well
+ const char *mount[] = { "/etc/mtab",
+ "/proc/mount",
+ NULL };
+
+ for (i=0; mount[i] != NULL; i++) {
+ if (FileExists(mount[i])) {
+ FILE *f=fopen(mount[i], "r");
+ while ( fgets(buf, sizeof(buf), f) != NULL) {
+ if (strncmp(buf, devnode, strlen(devnode)) == 0) {
+ if(TokSplitString(' ', buf, out, 10))
+ {
+ fclose(f);
+ return string(out[1]);
+ }
+ }
+ }
+ fclose(f);
+ }
+ }
+
+ return string();
+}
+ /*}}}*/
diff --git a/apt-pkg/contrib/cdromutl.h b/apt-pkg/contrib/cdromutl.h
index 9d14249c5..38ed2996e 100644
--- a/apt-pkg/contrib/cdromutl.h
+++ b/apt-pkg/contrib/cdromutl.h
@@ -19,5 +19,6 @@ bool MountCdrom(string Path, string DeviceName="");
bool UnmountCdrom(string Path);
bool IdentCdrom(string CD,string &Res,unsigned int Version = 2);
bool IsMounted(string &Path);
+string FindMountPointForDevice(const char *device);
#endif
diff --git a/apt-pkg/contrib/configuration.h b/apt-pkg/contrib/configuration.h
index 175c1bef3..71e5a0e47 100644
--- a/apt-pkg/contrib/configuration.h
+++ b/apt-pkg/contrib/configuration.h
@@ -72,8 +72,8 @@ class Configuration
string Find(string const &Name, string const &Default) const {return Find(Name.c_str(),Default.c_str());};
string FindFile(const char *Name,const char *Default = 0) const;
string FindDir(const char *Name,const char *Default = 0) const;
- std::vector<string> FindVector(string const &Name) const;
std::vector<string> FindVector(const char *Name) const;
+ std::vector<string> FindVector(string const &Name) const { return FindVector(Name.c_str()); };
int FindI(const char *Name,int const &Default = 0) const;
int FindI(string const &Name,int const &Default = 0) const {return FindI(Name.c_str(),Default);};
bool FindB(const char *Name,bool const &Default = false) const;
diff --git a/apt-pkg/contrib/error.cc b/apt-pkg/contrib/error.cc
index e2e8d6e57..18810d2a4 100644
--- a/apt-pkg/contrib/error.cc
+++ b/apt-pkg/contrib/error.cc
@@ -18,6 +18,7 @@
#include <iostream>
#include <errno.h>
#include <stdio.h>
+#include <stdlib.h>
#include <unistd.h>
#include <string>
@@ -57,108 +58,112 @@
// GlobalError::GlobalError - Constructor /*{{{*/
GlobalError::GlobalError() : PendingFlag(false) {}
/*}}}*/
-// GlobalError::FatalE - Get part of the error string from errno /*{{{*/
-bool GlobalError::FatalE(const char *Function,const char *Description,...) {
- va_list args;
- va_start(args,Description);
- return InsertErrno(FATAL, Function, Description, args);
-}
- /*}}}*/
-// GlobalError::Errno - Get part of the error string from errno /*{{{*/
-bool GlobalError::Errno(const char *Function,const char *Description,...) {
- va_list args;
- va_start(args,Description);
- return InsertErrno(ERROR, Function, Description, args);
-}
- /*}}}*/
-// GlobalError::WarningE - Get part of the warning string from errno /*{{{*/
-bool GlobalError::WarningE(const char *Function,const char *Description,...) {
- va_list args;
- va_start(args,Description);
- return InsertErrno(WARNING, Function, Description, args);
-}
- /*}}}*/
-// GlobalError::NoticeE - Get part of the notice string from errno /*{{{*/
-bool GlobalError::NoticeE(const char *Function,const char *Description,...) {
- va_list args;
- va_start(args,Description);
- return InsertErrno(NOTICE, Function, Description, args);
-}
- /*}}}*/
-// GlobalError::DebugE - Get part of the debug string from errno /*{{{*/
-bool GlobalError::DebugE(const char *Function,const char *Description,...) {
- va_list args;
- va_start(args,Description);
- return InsertErrno(DEBUG, Function, Description, args);
+// GlobalError::FatalE, Errno, WarningE, NoticeE and DebugE - Add to the list/*{{{*/
+#define GEMessage(NAME, TYPE) \
+bool GlobalError::NAME (const char *Function, const char *Description,...) { \
+ va_list args; \
+ size_t msgSize = 400; \
+ int const errsv = errno; \
+ while (true) { \
+ va_start(args,Description); \
+ if (InsertErrno(TYPE, Function, Description, args, errsv, msgSize) == false) \
+ break; \
+ va_end(args); \
+ } \
+ return false; \
}
+GEMessage(FatalE, FATAL)
+GEMessage(Errno, ERROR)
+GEMessage(WarningE, WARNING)
+GEMessage(NoticeE, NOTICE)
+GEMessage(DebugE, DEBUG)
+#undef GEMessage
/*}}}*/
// GlobalError::InsertErrno - Get part of the errortype string from errno/*{{{*/
bool GlobalError::InsertErrno(MsgType const &type, const char *Function,
const char *Description,...) {
va_list args;
- va_start(args,Description);
- return InsertErrno(type, Function, Description, args);
+ size_t msgSize = 400;
+ int const errsv = errno;
+ while (true) {
+ va_start(args,Description);
+ if (InsertErrno(type, Function, Description, args, errsv, msgSize) == false)
+ break;
+ va_end(args);
+ }
+ return false;
}
/*}}}*/
// GlobalError::InsertErrno - formats an error message with the errno /*{{{*/
bool GlobalError::InsertErrno(MsgType type, const char* Function,
- const char* Description, va_list &args) {
- char S[400];
- snprintf(S, sizeof(S), "%s - %s (%i: %s)", Description,
- Function, errno, strerror(errno));
- return Insert(type, S, args);
-}
- /*}}}*/
-// GlobalError::Fatal - Add a fatal error to the list /*{{{*/
-bool GlobalError::Fatal(const char *Description,...) {
- va_list args;
- va_start(args,Description);
- return Insert(FATAL, Description, args);
-}
- /*}}}*/
-// GlobalError::Error - Add an error to the list /*{{{*/
-bool GlobalError::Error(const char *Description,...) {
- va_list args;
- va_start(args,Description);
- return Insert(ERROR, Description, args);
-}
- /*}}}*/
-// GlobalError::Warning - Add a warning to the list /*{{{*/
-bool GlobalError::Warning(const char *Description,...) {
- va_list args;
- va_start(args,Description);
- return Insert(WARNING, Description, args);
-}
- /*}}}*/
-// GlobalError::Notice - Add a notice to the list /*{{{*/
-bool GlobalError::Notice(const char *Description,...)
-{
- va_list args;
- va_start(args,Description);
- return Insert(NOTICE, Description, args);
+ const char* Description, va_list &args,
+ int const errsv, size_t &msgSize) {
+ char* S = (char*) malloc(msgSize);
+ int const n = snprintf(S, msgSize, "%s - %s (%i: %s)", Description,
+ Function, errsv, strerror(errsv));
+ if (n > -1 && ((unsigned int) n) < msgSize);
+ else {
+ if (n > -1)
+ msgSize = n + 1;
+ else
+ msgSize *= 2;
+ free(S);
+ return true;
+ }
+
+ bool const geins = Insert(type, S, args, msgSize);
+ free(S);
+ return geins;
}
/*}}}*/
-// GlobalError::Debug - Add a debug to the list /*{{{*/
-bool GlobalError::Debug(const char *Description,...)
-{
- va_list args;
- va_start(args,Description);
- return Insert(DEBUG, Description, args);
+// GlobalError::Fatal, Error, Warning, Notice and Debug - Add to the list/*{{{*/
+#define GEMessage(NAME, TYPE) \
+bool GlobalError::NAME (const char *Description,...) { \
+ va_list args; \
+ size_t msgSize = 400; \
+ while (true) { \
+ va_start(args,Description); \
+ if (Insert(TYPE, Description, args, msgSize) == false) \
+ break; \
+ va_end(args); \
+ } \
+ return false; \
}
+GEMessage(Fatal, FATAL)
+GEMessage(Error, ERROR)
+GEMessage(Warning, WARNING)
+GEMessage(Notice, NOTICE)
+GEMessage(Debug, DEBUG)
+#undef GEMessage
/*}}}*/
// GlobalError::Insert - Add a errotype message to the list /*{{{*/
bool GlobalError::Insert(MsgType const &type, const char *Description,...)
{
va_list args;
- va_start(args,Description);
- return Insert(type, Description, args);
+ size_t msgSize = 400;
+ while (true) {
+ va_start(args,Description);
+ if (Insert(type, Description, args, msgSize) == false)
+ break;
+ va_end(args);
+ }
+ return false;
}
/*}}}*/
// GlobalError::Insert - Insert a new item at the end /*{{{*/
bool GlobalError::Insert(MsgType type, const char* Description,
- va_list &args) {
- char S[400];
- vsnprintf(S,sizeof(S),Description,args);
+ va_list &args, size_t &msgSize) {
+ char* S = (char*) malloc(msgSize);
+ int const n = vsnprintf(S, msgSize, Description, args);
+ if (n > -1 && ((unsigned int) n) < msgSize);
+ else {
+ if (n > -1)
+ msgSize = n + 1;
+ else
+ msgSize *= 2;
+ free(S);
+ return true;
+ }
Item const m(S, type);
Messages.push_back(m);
@@ -169,6 +174,7 @@ bool GlobalError::Insert(MsgType type, const char* Description,
if (type == FATAL || type == DEBUG)
std::clog << m << std::endl;
+ free(S);
return false;
}
/*}}}*/
diff --git a/apt-pkg/contrib/error.h b/apt-pkg/contrib/error.h
index ae756dbc4..21c51c1be 100644
--- a/apt-pkg/contrib/error.h
+++ b/apt-pkg/contrib/error.h
@@ -307,9 +307,10 @@ private: /*{{{*/
std::list<MsgStack> Stacks;
bool InsertErrno(MsgType type, const char* Function,
- const char* Description, va_list &args);
+ const char* Description, va_list &args,
+ int const errsv, size_t &msgSize);
bool Insert(MsgType type, const char* Description,
- va_list &args);
+ va_list &args, size_t &msgSize);
/*}}}*/
};
/*}}}*/
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index f4ab066d7..50019872e 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -42,6 +42,11 @@
#include <errno.h>
#include <set>
#include <algorithm>
+
+#include <config.h>
+#ifdef WORDS_BIGENDIAN
+#include <inttypes.h>
+#endif
/*}}}*/
using namespace std;
@@ -62,6 +67,15 @@ bool RunScripts(const char *Cnf)
// This is the child
if (Child == 0)
{
+ if (_config->FindDir("DPkg::Chroot-Directory","/") != "/")
+ {
+ std::cerr << "Chrooting into "
+ << _config->FindDir("DPkg::Chroot-Directory")
+ << std::endl;
+ if (chroot(_config->FindDir("DPkg::Chroot-Directory","/").c_str()) != 0)
+ _exit(100);
+ }
+
if (chdir("/tmp/") != 0)
_exit(100);
@@ -191,7 +205,7 @@ int GetLock(string File,bool Errors)
/*}}}*/
// FileExists - Check if a file exists /*{{{*/
// ---------------------------------------------------------------------
-/* */
+/* Beware: Directories are also files! */
bool FileExists(string File)
{
struct stat Buf;
@@ -200,6 +214,17 @@ bool FileExists(string File)
return true;
}
/*}}}*/
+// RealFileExists - Check if a file exists and if it is really a file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool RealFileExists(string File)
+{
+ struct stat Buf;
+ if (stat(File.c_str(),&Buf) != 0)
+ return false;
+ return ((Buf.st_mode & S_IFREG) != 0);
+}
+ /*}}}*/
// DirectoryExists - Check if a directory exists and is really one /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -304,6 +329,13 @@ std::vector<string> GetListOfFilesInDir(string const &Dir, std::vector<string> c
}
std::vector<string> List;
+
+ if (DirectoryExists(Dir.c_str()) == false)
+ {
+ _error->Error(_("List of files can't be created as '%s' is not a directory"), Dir.c_str());
+ return List;
+ }
+
Configuration::MatchAgainstConfig SilentIgnore("Dir::Ignore-Files-Silently");
DIR *D = opendir(Dir.c_str());
if (D == 0)
@@ -318,6 +350,20 @@ std::vector<string> GetListOfFilesInDir(string const &Dir, std::vector<string> c
if (Ent->d_name[0] == '.')
continue;
+ // Make sure it is a file and not something else
+ string const File = flCombine(Dir,Ent->d_name);
+#ifdef _DIRENT_HAVE_D_TYPE
+ if (Ent->d_type != DT_REG)
+#endif
+ {
+ if (RealFileExists(File.c_str()) == false)
+ {
+ if (SilentIgnore.Match(Ent->d_name) == false)
+ _error->Notice(_("Ignoring '%s' in directory '%s' as it is not a regular file"), Ent->d_name, Dir.c_str());
+ continue;
+ }
+ }
+
// check for accepted extension:
// no extension given -> periods are bad as hell!
// extensions given -> "" extension allows no extension
@@ -331,7 +377,7 @@ std::vector<string> GetListOfFilesInDir(string const &Dir, std::vector<string> c
if (Debug == true)
std::clog << "Bad file: " << Ent->d_name << " → no extension" << std::endl;
if (SilentIgnore.Match(Ent->d_name) == false)
- _error->Notice("Ignoring file '%s' in directory '%s' as it has no filename extension", Ent->d_name, Dir.c_str());
+ _error->Notice(_("Ignoring file '%s' in directory '%s' as it has no filename extension"), Ent->d_name, Dir.c_str());
continue;
}
}
@@ -340,7 +386,7 @@ std::vector<string> GetListOfFilesInDir(string const &Dir, std::vector<string> c
if (Debug == true)
std::clog << "Bad file: " << Ent->d_name << " → bad extension »" << flExtension(Ent->d_name) << "«" << std::endl;
if (SilentIgnore.Match(Ent->d_name) == false)
- _error->Notice("Ignoring file '%s' in directory '%s' as it has an invalid filename extension", Ent->d_name, Dir.c_str());
+ _error->Notice(_("Ignoring file '%s' in directory '%s' as it has an invalid filename extension"), Ent->d_name, Dir.c_str());
continue;
}
}
@@ -373,16 +419,6 @@ std::vector<string> GetListOfFilesInDir(string const &Dir, std::vector<string> c
continue;
}
- // Make sure it is a file and not something else
- string const File = flCombine(Dir,Ent->d_name);
- struct stat St;
- if (stat(File.c_str(),&St) != 0 || S_ISREG(St.st_mode) == 0)
- {
- if (Debug == true)
- std::clog << "Bad file: " << Ent->d_name << " → stat says not a good file" << std::endl;
- continue;
- }
-
if (Debug == true)
std::clog << "Accept file: " << Ent->d_name << " in " << Dir << std::endl;
List.push_back(File);
@@ -940,9 +976,16 @@ unsigned long FileFd::Size()
off_t orig_pos = lseek(iFd, 0, SEEK_CUR);
if (lseek(iFd, -4, SEEK_END) < 0)
return _error->Errno("lseek","Unable to seek to end of gzipped file");
+ size = 0L;
if (read(iFd, &size, 4) != 4)
return _error->Errno("read","Unable to read original size of gzipped file");
- size &= 0xFFFFFFFF;
+
+#ifdef WORDS_BIGENDIAN
+ uint32_t tmp_size = size;
+ uint8_t const * const p = (uint8_t const * const) &tmp_size;
+ tmp_size = (p[3] << 24) | (p[2] << 16) | (p[1] << 8) | p[0];
+ size = tmp_size;
+#endif
if (lseek(iFd, orig_pos, SEEK_SET) < 0)
return _error->Errno("lseek","Unable to seek in gzipped file");
diff --git a/apt-pkg/contrib/fileutl.h b/apt-pkg/contrib/fileutl.h
index 1380f06b4..cde288ad2 100644
--- a/apt-pkg/contrib/fileutl.h
+++ b/apt-pkg/contrib/fileutl.h
@@ -72,6 +72,7 @@ class FileFd
// Simple manipulators
inline int Fd() {return iFd;};
inline void Fd(int fd) {iFd = fd;};
+ inline gzFile gzFd() {return gz;};
inline bool IsOpen() {return iFd >= 0;};
inline bool Failed() {return (Flags & Fail) == Fail;};
inline void EraseOnFailure() {Flags |= DelOnFail;};
@@ -93,6 +94,7 @@ bool RunScripts(const char *Cnf);
bool CopyFile(FileFd &From,FileFd &To);
int GetLock(string File,bool Errors = true);
bool FileExists(string File);
+bool RealFileExists(string File);
bool DirectoryExists(string const &Path) __attrib_const;
bool CreateDirectory(string const &Parent, string const &Path);
diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc
index 985d89d90..66ae33146 100644
--- a/apt-pkg/contrib/hashes.cc
+++ b/apt-pkg/contrib/hashes.cc
@@ -23,7 +23,7 @@
const char* HashString::_SupportedHashes[] =
{
- "SHA256", "SHA1", "MD5Sum", NULL
+ "SHA512", "SHA256", "SHA1", "MD5Sum", NULL
};
HashString::HashString()
@@ -57,6 +57,7 @@ bool HashString::VerifyFile(string filename) const /*{{{*/
MD5Summation MD5;
SHA1Summation SHA1;
SHA256Summation SHA256;
+ SHA256Summation SHA512;
string fileHash;
FileFd Fd(filename, FileFd::ReadOnly);
@@ -75,6 +76,11 @@ bool HashString::VerifyFile(string filename) const /*{{{*/
SHA256.AddFD(Fd.Fd(), Fd.Size());
fileHash = (string)SHA256.Result();
}
+ else if (Type == "SHA512")
+ {
+ SHA512.AddFD(Fd.Fd(), Fd.Size());
+ fileHash = (string)SHA512.Result();
+ }
Fd.Close();
if(_config->FindB("Debug::Hashes",false) == true)
@@ -119,6 +125,7 @@ bool Hashes::AddFD(int Fd,unsigned long Size)
MD5.Add(Buf,Res);
SHA1.Add(Buf,Res);
SHA256.Add(Buf,Res);
+ SHA512.Add(Buf,Res);
}
return true;
}
diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h
index 264f7fe90..4b6a08b1f 100644
--- a/apt-pkg/contrib/hashes.h
+++ b/apt-pkg/contrib/hashes.h
@@ -16,7 +16,7 @@
#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
-#include <apt-pkg/sha256.h>
+#include <apt-pkg/sha2.h>
#include <algorithm>
#include <vector>
@@ -60,10 +60,11 @@ class Hashes
MD5Summation MD5;
SHA1Summation SHA1;
SHA256Summation SHA256;
+ SHA512Summation SHA512;
inline bool Add(const unsigned char *Data,unsigned long Size)
{
- return MD5.Add(Data,Size) && SHA1.Add(Data,Size) && SHA256.Add(Data,Size);
+ return MD5.Add(Data,Size) && SHA1.Add(Data,Size) && SHA256.Add(Data,Size) && SHA512.Add(Data,Size);
};
inline bool Add(const char *Data) {return Add((unsigned char *)Data,strlen(Data));};
bool AddFD(int Fd,unsigned long Size);
diff --git a/apt-pkg/contrib/hashsum_template.h b/apt-pkg/contrib/hashsum_template.h
new file mode 100644
index 000000000..7667baf92
--- /dev/null
+++ b/apt-pkg/contrib/hashsum_template.h
@@ -0,0 +1,87 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: hashsum_template.h,v 1.3 2001/05/07 05:05:47 jgg Exp $
+/* ######################################################################
+
+ HashSumValueTemplate - Generic Storage for a hash value
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef APTPKG_HASHSUM_TEMPLATE_H
+#define APTPKG_HASHSUM_TEMPLATE_H
+
+#include <string>
+#include <cstring>
+#include <algorithm>
+#include <stdint.h>
+
+using std::string;
+using std::min;
+
+template<int N>
+class HashSumValue
+{
+ unsigned char Sum[N/8];
+
+ public:
+
+ // Accessors
+ bool operator ==(const HashSumValue &rhs) const
+ {
+ return memcmp(Sum,rhs.Sum,sizeof(Sum)) == 0;
+ };
+
+ string Value() const
+ {
+ char Conv[16] =
+ { '0','1','2','3','4','5','6','7','8','9','a','b',
+ 'c','d','e','f'
+ };
+ char Result[((N/8)*2)+1];
+ Result[(N/8)*2] = 0;
+
+ // Convert each char into two letters
+ int J = 0;
+ int I = 0;
+ for (; I != (N/8)*2; J++,I += 2)
+ {
+ Result[I] = Conv[Sum[J] >> 4];
+ Result[I + 1] = Conv[Sum[J] & 0xF];
+ }
+ return string(Result);
+ };
+
+ inline void Value(unsigned char S[N/8])
+ {
+ for (int I = 0; I != sizeof(Sum); I++)
+ S[I] = Sum[I];
+ };
+
+ inline operator string() const
+ {
+ return Value();
+ };
+
+ bool Set(string Str)
+ {
+ return Hex2Num(Str,Sum,sizeof(Sum));
+ };
+
+ inline void Set(unsigned char S[N/8])
+ {
+ for (int I = 0; I != sizeof(Sum); I++)
+ Sum[I] = S[I];
+ };
+
+ HashSumValue(string Str)
+ {
+ memset(Sum,0,sizeof(Sum));
+ Set(Str);
+ }
+ HashSumValue()
+ {
+ memset(Sum,0,sizeof(Sum));
+ }
+};
+
+#endif
diff --git a/apt-pkg/contrib/md5.cc b/apt-pkg/contrib/md5.cc
index c0fa8493d..6c60ffd74 100644
--- a/apt-pkg/contrib/md5.cc
+++ b/apt-pkg/contrib/md5.cc
@@ -165,61 +165,6 @@ static void MD5Transform(uint32_t buf[4], uint32_t const in[16])
buf[3] += d;
}
/*}}}*/
-// MD5SumValue::MD5SumValue - Constructs the summation from a string /*{{{*/
-// ---------------------------------------------------------------------
-/* The string form of a MD5 is a 32 character hex number */
-MD5SumValue::MD5SumValue(string Str)
-{
- memset(Sum,0,sizeof(Sum));
- Set(Str);
-}
- /*}}}*/
-// MD5SumValue::MD5SumValue - Default constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* Sets the value to 0 */
-MD5SumValue::MD5SumValue()
-{
- memset(Sum,0,sizeof(Sum));
-}
- /*}}}*/
-// MD5SumValue::Set - Set the sum from a string /*{{{*/
-// ---------------------------------------------------------------------
-/* Converts the hex string into a set of chars */
-bool MD5SumValue::Set(string Str)
-{
- return Hex2Num(Str,Sum,sizeof(Sum));
-}
- /*}}}*/
-// MD5SumValue::Value - Convert the number into a string /*{{{*/
-// ---------------------------------------------------------------------
-/* Converts the set of chars into a hex string in lower case */
-string MD5SumValue::Value() const
-{
- char Conv[16] = {'0','1','2','3','4','5','6','7','8','9','a','b',
- 'c','d','e','f'};
- char Result[33];
- Result[32] = 0;
-
- // Convert each char into two letters
- int J = 0;
- int I = 0;
- for (; I != 32; J++, I += 2)
- {
- Result[I] = Conv[Sum[J] >> 4];
- Result[I + 1] = Conv[Sum[J] & 0xF];
- }
-
- return string(Result);
-}
- /*}}}*/
-// MD5SumValue::operator == - Comparitor /*{{{*/
-// ---------------------------------------------------------------------
-/* Call memcmp on the buffer */
-bool MD5SumValue::operator ==(const MD5SumValue &rhs) const
-{
- return memcmp(Sum,rhs.Sum,sizeof(Sum)) == 0;
-}
- /*}}}*/
// MD5Summation::MD5Summation - Initialize the summer /*{{{*/
// ---------------------------------------------------------------------
/* This assigns the deep magic initial values */
@@ -353,7 +298,7 @@ MD5SumValue MD5Summation::Result()
}
MD5SumValue V;
- memcpy(V.Sum,buf,16);
+ V.Set((char *)buf);
return V;
}
/*}}}*/
diff --git a/apt-pkg/contrib/md5.h b/apt-pkg/contrib/md5.h
index 96c8975b4..9cc88cfbe 100644
--- a/apt-pkg/contrib/md5.h
+++ b/apt-pkg/contrib/md5.h
@@ -32,28 +32,11 @@
using std::string;
using std::min;
-class MD5Summation;
+#include "hashsum_template.h"
-class MD5SumValue
-{
- friend class MD5Summation;
- unsigned char Sum[4*4];
-
- public:
-
- // Accessors
- bool operator ==(const MD5SumValue &rhs) const;
- string Value() const;
- inline void Value(unsigned char S[16])
- {for (int I = 0; I != sizeof(Sum); I++) S[I] = Sum[I];};
- inline operator string() const {return Value();};
- bool Set(string Str);
- inline void Set(unsigned char S[16])
- {for (int I = 0; I != sizeof(Sum); I++) Sum[I] = S[I];};
+class MD5Summation;
- MD5SumValue(string Str);
- MD5SumValue();
-};
+typedef HashSumValue<128> MD5SumValue;
class MD5Summation
{
diff --git a/apt-pkg/contrib/mmap.cc b/apt-pkg/contrib/mmap.cc
index 69fb61fca..19381ae47 100644
--- a/apt-pkg/contrib/mmap.cc
+++ b/apt-pkg/contrib/mmap.cc
@@ -106,7 +106,7 @@ bool MMap::Map(FileFd &Fd)
/* */
bool MMap::Close(bool DoSync)
{
- if ((Flags & UnMapped) == UnMapped || Base == 0 || iSize == 0)
+ if ((Flags & UnMapped) == UnMapped || validData() == false || iSize == 0)
return true;
if (DoSync == true)
@@ -237,11 +237,19 @@ DynamicMMap::DynamicMMap(unsigned long Flags,unsigned long const &WorkSpace,
if ((this->Flags & Fallback) != Fallback) {
// Set the permissions.
int Prot = PROT_READ;
+#ifdef MAP_ANONYMOUS
int Map = MAP_PRIVATE | MAP_ANONYMOUS;
+#else
+ int Map = MAP_PRIVATE | MAP_ANON;
+#endif
if ((this->Flags & ReadOnly) != ReadOnly)
Prot |= PROT_WRITE;
if ((this->Flags & Public) == Public)
+#ifdef MAP_ANONYMOUS
Map = MAP_SHARED | MAP_ANONYMOUS;
+#else
+ Map = MAP_SHARED | MAP_ANON;
+#endif
// use anonymous mmap() to get the memory
Base = (unsigned char*) mmap(0, WorkSpace, Prot, Map, -1, 0);
@@ -266,6 +274,8 @@ DynamicMMap::~DynamicMMap()
{
if (Fd == 0)
{
+ if (validData() == false)
+ return;
#ifdef _POSIX_MAPPED_FILES
munmap(Base, WorkSpace);
#else
diff --git a/apt-pkg/contrib/mmap.h b/apt-pkg/contrib/mmap.h
index 5ca951204..2bf2c1540 100644
--- a/apt-pkg/contrib/mmap.h
+++ b/apt-pkg/contrib/mmap.h
@@ -61,6 +61,8 @@ class MMap
inline operator void *() {return Base;};
inline void *Data() {return Base;};
inline unsigned long Size() {return iSize;};
+ inline void AddSize(unsigned long const size) {iSize += size;};
+ inline bool validData() const { return Base != (void *)-1 && Base != 0; };
// File manipulators
bool Sync();
diff --git a/apt-pkg/contrib/netrc.cc b/apt-pkg/contrib/netrc.cc
index d8027fc24..34f472ee1 100644
--- a/apt-pkg/contrib/netrc.cc
+++ b/apt-pkg/contrib/netrc.cc
@@ -160,10 +160,10 @@ void maybe_add_auth (URI &Uri, string NetRCFile)
{
char login[64] = "";
char password[64] = "";
- char *netrcfile = strdupa (NetRCFile.c_str ());
+ char *netrcfile = strdup(NetRCFile.c_str());
// first check for a generic host based netrc entry
- char *host = strdupa (Uri.Host.c_str ());
+ char *host = strdup(Uri.Host.c_str());
if (host && parsenetrc (host, login, password, netrcfile) == 0)
{
if (_config->FindB("Debug::Acquire::netrc", false) == true)
@@ -173,13 +173,16 @@ void maybe_add_auth (URI &Uri, string NetRCFile)
<< std::endl;
Uri.User = string (login);
Uri.Password = string (password);
+ free(netrcfile);
+ free(host);
return;
}
+ free(host);
// if host did not work, try Host+Path next, this will trigger
// a lookup uri.startswith(host) in the netrc file parser (because
// of the "/"
- char *hostpath = strdupa (string(Uri.Host+Uri.Path).c_str ());
+ char *hostpath = strdup(string(Uri.Host+Uri.Path).c_str());
if (hostpath && parsenetrc (hostpath, login, password, netrcfile) == 0)
{
if (_config->FindB("Debug::Acquire::netrc", false) == true)
@@ -189,8 +192,9 @@ void maybe_add_auth (URI &Uri, string NetRCFile)
<< std::endl;
Uri.User = string (login);
Uri.Password = string (password);
- return;
}
+ free(netrcfile);
+ free(hostpath);
}
}
}
diff --git a/apt-pkg/contrib/progress.cc b/apt-pkg/contrib/progress.cc
index cffdddc4f..84ee4c124 100644
--- a/apt-pkg/contrib/progress.cc
+++ b/apt-pkg/contrib/progress.cc
@@ -65,27 +65,18 @@ void OpProgress::OverallProgress(unsigned long Current, unsigned long Total,
// OpProgress::SubProgress - Set the sub progress state /*{{{*/
// ---------------------------------------------------------------------
/* */
-void OpProgress::SubProgress(unsigned long SubTotal,const string &Op)
+void OpProgress::SubProgress(unsigned long SubTotal,const string &Op,
+ float const Percent)
{
this->SubTotal = SubTotal;
- SubOp = Op;
- if (Total == 0)
- Percent = 0;
+ if (Op.empty() == false)
+ SubOp = Op;
+ if (Total == 0 || Percent == 0)
+ this->Percent = 0;
+ else if (Percent != -1)
+ this->Percent = this->Current += (Size*Percent)/SubTotal;
else
- Percent = Current*100.0/Total;
- Update();
-}
- /*}}}*/
-// OpProgress::SubProgress - Set the sub progress state /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-void OpProgress::SubProgress(unsigned long SubTotal)
-{
- this->SubTotal = SubTotal;
- if (Total == 0)
- Percent = 0;
- else
- Percent = Current*100.0/Total;
+ this->Percent = Current*100.0/Total;
Update();
}
/*}}}*/
@@ -135,7 +126,7 @@ bool OpProgress::CheckChange(float Interval)
OpTextProgress::OpTextProgress(Configuration &Config) :
NoUpdate(false), NoDisplay(false), LastLen(0)
{
- if (Config.FindI("quiet",0) >= 1)
+ if (Config.FindI("quiet",0) >= 1 || Config.FindB("quiet::NoUpdate", false) == true)
NoUpdate = true;
if (Config.FindI("quiet",0) >= 2)
NoDisplay = true;
diff --git a/apt-pkg/contrib/progress.h b/apt-pkg/contrib/progress.h
index 7dd004f7e..3a914d17f 100644
--- a/apt-pkg/contrib/progress.h
+++ b/apt-pkg/contrib/progress.h
@@ -55,8 +55,7 @@ class OpProgress
public:
void Progress(unsigned long Current);
- void SubProgress(unsigned long SubTotal);
- void SubProgress(unsigned long SubTotal,const string &Op);
+ void SubProgress(unsigned long SubTotal, const string &Op = "", float const Percent = -1);
void OverallProgress(unsigned long Current,unsigned long Total,
unsigned long Size,const string &Op);
virtual void Done() {};
diff --git a/apt-pkg/contrib/sha1.cc b/apt-pkg/contrib/sha1.cc
index eae52d52f..0b1c16dc3 100644
--- a/apt-pkg/contrib/sha1.cc
+++ b/apt-pkg/contrib/sha1.cc
@@ -178,67 +178,6 @@ static void SHA1Transform(uint32_t state[5],uint8_t const buffer[64])
}
/*}}}*/
-// SHA1SumValue::SHA1SumValue - Constructs the summation from a string /*{{{*/
-// ---------------------------------------------------------------------
-/* The string form of a SHA1 is a 40 character hex number */
-SHA1SumValue::SHA1SumValue(string Str)
-{
- memset(Sum,0,sizeof(Sum));
- Set(Str);
-}
-
- /*}}} */
-// SHA1SumValue::SHA1SumValue - Default constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* Sets the value to 0 */
-SHA1SumValue::SHA1SumValue()
-{
- memset(Sum,0,sizeof(Sum));
-}
-
- /*}}} */
-// SHA1SumValue::Set - Set the sum from a string /*{{{*/
-// ---------------------------------------------------------------------
-/* Converts the hex string into a set of chars */
-bool SHA1SumValue::Set(string Str)
-{
- return Hex2Num(Str,Sum,sizeof(Sum));
-}
-
- /*}}} */
-// SHA1SumValue::Value - Convert the number into a string /*{{{*/
-// ---------------------------------------------------------------------
-/* Converts the set of chars into a hex string in lower case */
-string SHA1SumValue::Value() const
-{
- char Conv[16] =
- { '0','1','2','3','4','5','6','7','8','9','a','b',
- 'c','d','e','f'
- };
- char Result[41];
- Result[40] = 0;
-
- // Convert each char into two letters
- int J = 0;
- int I = 0;
- for (; I != 40; J++,I += 2)
- {
- Result[I] = Conv[Sum[J] >> 4];
- Result[I + 1] = Conv[Sum[J] & 0xF];
- }
-
- return string(Result);
-}
-
- /*}}} */
-// SHA1SumValue::operator == - Comparator /*{{{*/
-// ---------------------------------------------------------------------
-/* Call memcmp on the buffer */
-bool SHA1SumValue::operator == (const SHA1SumValue & rhs) const
-{
- return memcmp(Sum,rhs.Sum,sizeof(Sum)) == 0;
-}
- /*}}}*/
// SHA1Summation::SHA1Summation - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -290,11 +229,13 @@ SHA1SumValue SHA1Summation::Result()
// Transfer over the result
SHA1SumValue Value;
+ char res[20];
for (unsigned i = 0; i < 20; i++)
{
- Value.Sum[i] = (unsigned char)
+ res[i] = (unsigned char)
((state[i >> 2] >> ((3 - (i & 3)) * 8)) & 255);
}
+ Value.Set(res);
return Value;
}
/*}}}*/
diff --git a/apt-pkg/contrib/sha1.h b/apt-pkg/contrib/sha1.h
index 8ddd889f1..e7683fa7b 100644
--- a/apt-pkg/contrib/sha1.h
+++ b/apt-pkg/contrib/sha1.h
@@ -21,28 +21,11 @@
using std::string;
using std::min;
-class SHA1Summation;
+#include "hashsum_template.h"
-class SHA1SumValue
-{
- friend class SHA1Summation;
- unsigned char Sum[20];
-
- public:
-
- // Accessors
- bool operator ==(const SHA1SumValue &rhs) const;
- string Value() const;
- inline void Value(unsigned char S[20])
- {for (int I = 0; I != sizeof(Sum); I++) S[I] = Sum[I];};
- inline operator string() const {return Value();};
- bool Set(string Str);
- inline void Set(unsigned char S[20])
- {for (int I = 0; I != sizeof(Sum); I++) Sum[I] = S[I];};
+class SHA1Summation;
- SHA1SumValue(string Str);
- SHA1SumValue();
-};
+typedef HashSumValue<160> SHA1SumValue;
class SHA1Summation
{
diff --git a/apt-pkg/contrib/sha2.cc b/apt-pkg/contrib/sha2.cc
new file mode 100644
index 000000000..4604d3167
--- /dev/null
+++ b/apt-pkg/contrib/sha2.cc
@@ -0,0 +1,43 @@
+/*
+ * Cryptographic API. {{{
+ *
+ * SHA-512, as specified in
+ * http://csrc.nist.gov/cryptval/shs/sha256-384-512.pdf
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License as published by the Free
+ * Software Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ */ /*}}}*/
+
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/sha2.h"
+#endif
+
+#include <apt-pkg/sha2.h>
+#include <apt-pkg/strutl.h>
+
+// SHA2Summation::AddFD - Add content of file into the checksum /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool SHA2SummationBase::AddFD(int Fd,unsigned long Size){
+ unsigned char Buf[64 * 64];
+ int Res = 0;
+ int ToEOF = (Size == 0);
+ while (Size != 0 || ToEOF)
+ {
+ unsigned n = sizeof(Buf);
+ if (!ToEOF) n = min(Size,(unsigned long)n);
+ Res = read(Fd,Buf,n);
+ if (Res < 0 || (!ToEOF && (unsigned) Res != n)) // error, or short read
+ return false;
+ if (ToEOF && Res == 0) // EOF
+ break;
+ Size -= Res;
+ Add(Buf,Res);
+ }
+ return true;
+}
+ /*}}}*/
+
diff --git a/apt-pkg/contrib/sha2.h b/apt-pkg/contrib/sha2.h
new file mode 100644
index 000000000..bd5472527
--- /dev/null
+++ b/apt-pkg/contrib/sha2.h
@@ -0,0 +1,114 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: sha512.h,v 1.3 2001/05/07 05:05:47 jgg Exp $
+/* ######################################################################
+
+ SHA{512,256}SumValue - Storage for a SHA-{512,256} hash.
+ SHA{512,256}Summation - SHA-{512,256} Secure Hash Algorithm.
+
+ This is a C++ interface to a set of SHA{512,256}Sum functions, that mirrors
+ the equivalent MD5 & SHA1 classes.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef APTPKG_SHA2_H
+#define APTPKG_SHA2_H
+
+#include <string>
+#include <cstring>
+#include <algorithm>
+#include <stdint.h>
+
+#include "sha2_internal.h"
+#include "hashsum_template.h"
+
+using std::string;
+using std::min;
+
+class SHA512Summation;
+class SHA256Summation;
+
+typedef HashSumValue<512> SHA512SumValue;
+typedef HashSumValue<256> SHA256SumValue;
+
+class SHA2SummationBase
+{
+ protected:
+ bool Done;
+ public:
+ virtual bool Add(const unsigned char *inbuf,unsigned long inlen) = 0;
+ virtual bool AddFD(int Fd,unsigned long Size);
+
+ inline bool Add(const char *Data)
+ {
+ return Add((unsigned char *)Data,strlen(Data));
+ };
+ inline bool Add(const unsigned char *Beg,const unsigned char *End)
+ {
+ return Add(Beg,End-Beg);
+ };
+ void Result();
+};
+
+class SHA256Summation : public SHA2SummationBase
+{
+ SHA256_CTX ctx;
+ unsigned char Sum[32];
+
+ public:
+ virtual bool Add(const unsigned char *inbuf, unsigned long len)
+ {
+ if (Done)
+ return false;
+ SHA256_Update(&ctx, inbuf, len);
+ return true;
+ };
+ SHA256SumValue Result()
+ {
+ if (!Done) {
+ SHA256_Final(Sum, &ctx);
+ Done = true;
+ }
+ SHA256SumValue res;
+ res.Set(Sum);
+ return res;
+ };
+ SHA256Summation()
+ {
+ SHA256_Init(&ctx);
+ Done = false;
+ };
+};
+
+class SHA512Summation : public SHA2SummationBase
+{
+ SHA512_CTX ctx;
+ unsigned char Sum[64];
+
+ public:
+ virtual bool Add(const unsigned char *inbuf, unsigned long len)
+ {
+ if (Done)
+ return false;
+ SHA512_Update(&ctx, inbuf, len);
+ return true;
+ };
+ SHA512SumValue Result()
+ {
+ if (!Done) {
+ SHA512_Final(Sum, &ctx);
+ Done = true;
+ }
+ SHA512SumValue res;
+ res.Set(Sum);
+ return res;
+ };
+ SHA512Summation()
+ {
+ SHA512_Init(&ctx);
+ Done = false;
+ };
+};
+
+
+#endif
diff --git a/apt-pkg/contrib/sha256.cc b/apt-pkg/contrib/sha256.cc
deleted file mode 100644
index e380c13ae..000000000
--- a/apt-pkg/contrib/sha256.cc
+++ /dev/null
@@ -1,424 +0,0 @@
-/*
- * Cryptographic API. {{{
- *
- * SHA-256, as specified in
- * http://csrc.nist.gov/cryptval/shs/sha256-384-512.pdf
- *
- * SHA-256 code by Jean-Luc Cooke <jlcooke@certainkey.com>.
- *
- * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
- * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
- * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
- *
- * Ported from the Linux kernel to Apt by Anthony Towns <ajt@debian.org>
- *
- * This program is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License as published by the Free
- * Software Foundation; either version 2 of the License, or (at your option)
- * any later version.
- *
- */ /*}}}*/
-
-#ifdef __GNUG__
-#pragma implementation "apt-pkg/sha256.h"
-#endif
-
-
-#define SHA256_DIGEST_SIZE 32
-#define SHA256_HMAC_BLOCK_SIZE 64
-
-#define ror32(value,bits) (((value) >> (bits)) | ((value) << (32 - (bits))))
-
-#include <apt-pkg/sha256.h>
-#include <apt-pkg/strutl.h>
-#include <string.h>
-#include <unistd.h>
-#include <stdint.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <arpa/inet.h>
-
-typedef uint32_t u32;
-typedef uint8_t u8;
-
-static inline u32 Ch(u32 x, u32 y, u32 z)
-{
- return z ^ (x & (y ^ z));
-}
-
-static inline u32 Maj(u32 x, u32 y, u32 z)
-{
- return (x & y) | (z & (x | y));
-}
-
-#define e0(x) (ror32(x, 2) ^ ror32(x,13) ^ ror32(x,22))
-#define e1(x) (ror32(x, 6) ^ ror32(x,11) ^ ror32(x,25))
-#define s0(x) (ror32(x, 7) ^ ror32(x,18) ^ (x >> 3))
-#define s1(x) (ror32(x,17) ^ ror32(x,19) ^ (x >> 10))
-
-#define H0 0x6a09e667
-#define H1 0xbb67ae85
-#define H2 0x3c6ef372
-#define H3 0xa54ff53a
-#define H4 0x510e527f
-#define H5 0x9b05688c
-#define H6 0x1f83d9ab
-#define H7 0x5be0cd19
-
-static inline void LOAD_OP(int I, u32 *W, const u8 *input) /*{{{*/
-{
- W[I] = ( ((u32) input[I * 4 + 0] << 24)
- | ((u32) input[I * 4 + 1] << 16)
- | ((u32) input[I * 4 + 2] << 8)
- | ((u32) input[I * 4 + 3]));
-}
- /*}}}*/
-static inline void BLEND_OP(int I, u32 *W)
-{
- W[I] = s1(W[I-2]) + W[I-7] + s0(W[I-15]) + W[I-16];
-}
-
-static void sha256_transform(u32 *state, const u8 *input) /*{{{*/
-{
- u32 a, b, c, d, e, f, g, h, t1, t2;
- u32 W[64];
- int i;
-
- /* load the input */
- for (i = 0; i < 16; i++)
- LOAD_OP(i, W, input);
-
- /* now blend */
- for (i = 16; i < 64; i++)
- BLEND_OP(i, W);
-
- /* load the state into our registers */
- a=state[0]; b=state[1]; c=state[2]; d=state[3];
- e=state[4]; f=state[5]; g=state[6]; h=state[7];
-
- /* now iterate */
- t1 = h + e1(e) + Ch(e,f,g) + 0x428a2f98 + W[ 0];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x71374491 + W[ 1];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0xb5c0fbcf + W[ 2];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0xe9b5dba5 + W[ 3];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x3956c25b + W[ 4];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x59f111f1 + W[ 5];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x923f82a4 + W[ 6];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0xab1c5ed5 + W[ 7];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0xd807aa98 + W[ 8];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x12835b01 + W[ 9];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x243185be + W[10];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x550c7dc3 + W[11];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x72be5d74 + W[12];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x80deb1fe + W[13];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x9bdc06a7 + W[14];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0xc19bf174 + W[15];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0xe49b69c1 + W[16];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0xefbe4786 + W[17];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x0fc19dc6 + W[18];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x240ca1cc + W[19];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x2de92c6f + W[20];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x4a7484aa + W[21];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x5cb0a9dc + W[22];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x76f988da + W[23];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0x983e5152 + W[24];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0xa831c66d + W[25];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0xb00327c8 + W[26];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0xbf597fc7 + W[27];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0xc6e00bf3 + W[28];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0xd5a79147 + W[29];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x06ca6351 + W[30];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x14292967 + W[31];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0x27b70a85 + W[32];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x2e1b2138 + W[33];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x4d2c6dfc + W[34];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x53380d13 + W[35];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x650a7354 + W[36];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x766a0abb + W[37];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x81c2c92e + W[38];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x92722c85 + W[39];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0xa2bfe8a1 + W[40];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0xa81a664b + W[41];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0xc24b8b70 + W[42];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0xc76c51a3 + W[43];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0xd192e819 + W[44];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0xd6990624 + W[45];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0xf40e3585 + W[46];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x106aa070 + W[47];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0x19a4c116 + W[48];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x1e376c08 + W[49];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x2748774c + W[50];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x34b0bcb5 + W[51];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x391c0cb3 + W[52];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0x4ed8aa4a + W[53];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0x5b9cca4f + W[54];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0x682e6ff3 + W[55];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- t1 = h + e1(e) + Ch(e,f,g) + 0x748f82ee + W[56];
- t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
- t1 = g + e1(d) + Ch(d,e,f) + 0x78a5636f + W[57];
- t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
- t1 = f + e1(c) + Ch(c,d,e) + 0x84c87814 + W[58];
- t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
- t1 = e + e1(b) + Ch(b,c,d) + 0x8cc70208 + W[59];
- t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
- t1 = d + e1(a) + Ch(a,b,c) + 0x90befffa + W[60];
- t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
- t1 = c + e1(h) + Ch(h,a,b) + 0xa4506ceb + W[61];
- t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
- t1 = b + e1(g) + Ch(g,h,a) + 0xbef9a3f7 + W[62];
- t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
- t1 = a + e1(f) + Ch(f,g,h) + 0xc67178f2 + W[63];
- t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
-
- state[0] += a; state[1] += b; state[2] += c; state[3] += d;
- state[4] += e; state[5] += f; state[6] += g; state[7] += h;
-
- /* clear any sensitive info... */
- a = b = c = d = e = f = g = h = t1 = t2 = 0;
- memset(W, 0, 64 * sizeof(u32));
-}
- /*}}}*/
-SHA256Summation::SHA256Summation() /*{{{*/
-{
- Sum.state[0] = H0;
- Sum.state[1] = H1;
- Sum.state[2] = H2;
- Sum.state[3] = H3;
- Sum.state[4] = H4;
- Sum.state[5] = H5;
- Sum.state[6] = H6;
- Sum.state[7] = H7;
- Sum.count[0] = Sum.count[1] = 0;
- memset(Sum.buf, 0, sizeof(Sum.buf));
- Done = false;
-}
- /*}}}*/
-bool SHA256Summation::Add(const u8 *data, unsigned long len) /*{{{*/
-{
- struct sha256_ctx *sctx = &Sum;
- unsigned int i, index, part_len;
-
- if (Done) return false;
-
- /* Compute number of bytes mod 128 */
- index = (unsigned int)((sctx->count[0] >> 3) & 0x3f);
-
- /* Update number of bits */
- if ((sctx->count[0] += (len << 3)) < (len << 3)) {
- sctx->count[1]++;
- sctx->count[1] += (len >> 29);
- }
-
- part_len = 64 - index;
-
- /* Transform as many times as possible. */
- if (len >= part_len) {
- memcpy(&sctx->buf[index], data, part_len);
- sha256_transform(sctx->state, sctx->buf);
-
- for (i = part_len; i + 63 < len; i += 64)
- sha256_transform(sctx->state, &data[i]);
- index = 0;
- } else {
- i = 0;
- }
-
- /* Buffer remaining input */
- memcpy(&sctx->buf[index], &data[i], len-i);
-
- return true;
-}
- /*}}}*/
-SHA256SumValue SHA256Summation::Result() /*{{{*/
-{
- struct sha256_ctx *sctx = &Sum;
- if (!Done) {
- u8 bits[8];
- unsigned int index, pad_len, t;
- static const u8 padding[64] = { 0x80, };
-
- /* Save number of bits */
- t = sctx->count[0];
- bits[7] = t; t >>= 8;
- bits[6] = t; t >>= 8;
- bits[5] = t; t >>= 8;
- bits[4] = t;
- t = sctx->count[1];
- bits[3] = t; t >>= 8;
- bits[2] = t; t >>= 8;
- bits[1] = t; t >>= 8;
- bits[0] = t;
-
- /* Pad out to 56 mod 64. */
- index = (sctx->count[0] >> 3) & 0x3f;
- pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
- Add(padding, pad_len);
-
- /* Append length (before padding) */
- Add(bits, 8);
- }
-
- Done = true;
-
- /* Store state in digest */
-
- SHA256SumValue res;
- u8 *out = res.Sum;
-
- int i, j;
- unsigned int t;
- for (i = j = 0; i < 8; i++, j += 4) {
- t = sctx->state[i];
- out[j+3] = t; t >>= 8;
- out[j+2] = t; t >>= 8;
- out[j+1] = t; t >>= 8;
- out[j ] = t;
- }
-
- return res;
-}
- /*}}}*/
-// SHA256SumValue::SHA256SumValue - Constructs the sum from a string /*{{{*/
-// ---------------------------------------------------------------------
-/* The string form of a SHA256 is a 64 character hex number */
-SHA256SumValue::SHA256SumValue(string Str)
-{
- memset(Sum,0,sizeof(Sum));
- Set(Str);
-}
- /*}}}*/
-// SHA256SumValue::SHA256SumValue - Default constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* Sets the value to 0 */
-SHA256SumValue::SHA256SumValue()
-{
- memset(Sum,0,sizeof(Sum));
-}
- /*}}}*/
-// SHA256SumValue::Set - Set the sum from a string /*{{{*/
-// ---------------------------------------------------------------------
-/* Converts the hex string into a set of chars */
-bool SHA256SumValue::Set(string Str)
-{
- return Hex2Num(Str,Sum,sizeof(Sum));
-}
- /*}}}*/
-// SHA256SumValue::Value - Convert the number into a string /*{{{*/
-// ---------------------------------------------------------------------
-/* Converts the set of chars into a hex string in lower case */
-string SHA256SumValue::Value() const
-{
- char Conv[16] =
- { '0','1','2','3','4','5','6','7','8','9','a','b',
- 'c','d','e','f'
- };
- char Result[65];
- Result[64] = 0;
-
- // Convert each char into two letters
- int J = 0;
- int I = 0;
- for (; I != 64; J++,I += 2)
- {
- Result[I] = Conv[Sum[J] >> 4];
- Result[I + 1] = Conv[Sum[J] & 0xF];
- }
-
- return string(Result);
-}
- /*}}}*/
-// SHA256SumValue::operator == - Comparator /*{{{*/
-// ---------------------------------------------------------------------
-/* Call memcmp on the buffer */
-bool SHA256SumValue::operator == (const SHA256SumValue & rhs) const
-{
- return memcmp(Sum,rhs.Sum,sizeof(Sum)) == 0;
-}
- /*}}}*/
-// SHA256Summation::AddFD - Add content of file into the checksum /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool SHA256Summation::AddFD(int Fd,unsigned long Size)
-{
- unsigned char Buf[64 * 64];
- int Res = 0;
- int ToEOF = (Size == 0);
- while (Size != 0 || ToEOF)
- {
- unsigned n = sizeof(Buf);
- if (!ToEOF) n = min(Size,(unsigned long)n);
- Res = read(Fd,Buf,n);
- if (Res < 0 || (!ToEOF && (unsigned) Res != n)) // error, or short read
- return false;
- if (ToEOF && Res == 0) // EOF
- break;
- Size -= Res;
- Add(Buf,Res);
- }
- return true;
-}
- /*}}}*/
-
diff --git a/apt-pkg/contrib/sha256.h b/apt-pkg/contrib/sha256.h
index 5934b5641..fe2b30ac2 100644
--- a/apt-pkg/contrib/sha256.h
+++ b/apt-pkg/contrib/sha256.h
@@ -1,72 +1,8 @@
-// -*- mode: cpp; mode: fold -*-
-// Description /*{{{*/
-// $Id: sha1.h,v 1.3 2001/05/07 05:05:47 jgg Exp $
-/* ######################################################################
-
- SHA256SumValue - Storage for a SHA-256 hash.
- SHA256Summation - SHA-256 Secure Hash Algorithm.
-
- This is a C++ interface to a set of SHA256Sum functions, that mirrors
- the equivalent MD5 & SHA1 classes.
-
- ##################################################################### */
- /*}}}*/
#ifndef APTPKG_SHA256_H
#define APTPKG_SHA256_H
-#include <string>
-#include <cstring>
-#include <algorithm>
-#include <stdint.h>
-
-using std::string;
-using std::min;
-
-class SHA256Summation;
-
-class SHA256SumValue
-{
- friend class SHA256Summation;
- unsigned char Sum[32];
-
- public:
-
- // Accessors
- bool operator ==(const SHA256SumValue &rhs) const;
- string Value() const;
- inline void Value(unsigned char S[32])
- {for (int I = 0; I != sizeof(Sum); I++) S[I] = Sum[I];};
- inline operator string() const {return Value();};
- bool Set(string Str);
- inline void Set(unsigned char S[32])
- {for (int I = 0; I != sizeof(Sum); I++) Sum[I] = S[I];};
-
- SHA256SumValue(string Str);
- SHA256SumValue();
-};
-
-struct sha256_ctx {
- uint32_t count[2];
- uint32_t state[8];
- uint8_t buf[128];
-};
-
-class SHA256Summation
-{
- struct sha256_ctx Sum;
-
- bool Done;
-
- public:
+#include "sha2.h"
- bool Add(const unsigned char *inbuf,unsigned long inlen);
- inline bool Add(const char *Data) {return Add((unsigned char *)Data,strlen(Data));};
- bool AddFD(int Fd,unsigned long Size);
- inline bool Add(const unsigned char *Beg,const unsigned char *End)
- {return Add(Beg,End-Beg);};
- SHA256SumValue Result();
-
- SHA256Summation();
-};
+#warn "This header is deprecated, please include sha2.h instead"
#endif
diff --git a/apt-pkg/contrib/sha2_internal.cc b/apt-pkg/contrib/sha2_internal.cc
new file mode 100644
index 000000000..10b82dec4
--- /dev/null
+++ b/apt-pkg/contrib/sha2_internal.cc
@@ -0,0 +1,1065 @@
+/*
+ * FILE: sha2.c
+ * AUTHOR: Aaron D. Gifford - http://www.aarongifford.com/
+ *
+ * Copyright (c) 2000-2001, Aaron D. Gifford
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holder nor the names of contributors
+ * may be used to endorse or promote products derived from this software
+ * without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTOR(S) ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTOR(S) BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * $Id: sha2.c,v 1.1 2001/11/08 00:01:51 adg Exp adg $
+ */
+
+#include <string.h> /* memcpy()/memset() or bcopy()/bzero() */
+#include <assert.h> /* assert() */
+#include "sha2_internal.h"
+
+/*
+ * ASSERT NOTE:
+ * Some sanity checking code is included using assert(). On my FreeBSD
+ * system, this additional code can be removed by compiling with NDEBUG
+ * defined. Check your own systems manpage on assert() to see how to
+ * compile WITHOUT the sanity checking code on your system.
+ *
+ * UNROLLED TRANSFORM LOOP NOTE:
+ * You can define SHA2_UNROLL_TRANSFORM to use the unrolled transform
+ * loop version for the hash transform rounds (defined using macros
+ * later in this file). Either define on the command line, for example:
+ *
+ * cc -DSHA2_UNROLL_TRANSFORM -o sha2 sha2.c sha2prog.c
+ *
+ * or define below:
+ *
+ * #define SHA2_UNROLL_TRANSFORM
+ *
+ */
+
+
+/*** SHA-256/384/512 Machine Architecture Definitions *****************/
+/*
+ * BYTE_ORDER NOTE:
+ *
+ * Please make sure that your system defines BYTE_ORDER. If your
+ * architecture is little-endian, make sure it also defines
+ * LITTLE_ENDIAN and that the two (BYTE_ORDER and LITTLE_ENDIAN) are
+ * equivilent.
+ *
+ * If your system does not define the above, then you can do so by
+ * hand like this:
+ *
+ * #define LITTLE_ENDIAN 1234
+ * #define BIG_ENDIAN 4321
+ *
+ * And for little-endian machines, add:
+ *
+ * #define BYTE_ORDER LITTLE_ENDIAN
+ *
+ * Or for big-endian machines:
+ *
+ * #define BYTE_ORDER BIG_ENDIAN
+ *
+ * The FreeBSD machine this was written on defines BYTE_ORDER
+ * appropriately by including <sys/types.h> (which in turn includes
+ * <machine/endian.h> where the appropriate definitions are actually
+ * made).
+ */
+#if !defined(BYTE_ORDER) || (BYTE_ORDER != LITTLE_ENDIAN && BYTE_ORDER != BIG_ENDIAN)
+#error Define BYTE_ORDER to be equal to either LITTLE_ENDIAN or BIG_ENDIAN
+#endif
+
+/*
+ * Define the followingsha2_* types to types of the correct length on
+ * the native archtecture. Most BSD systems and Linux define u_intXX_t
+ * types. Machines with very recent ANSI C headers, can use the
+ * uintXX_t definintions from inttypes.h by defining SHA2_USE_INTTYPES_H
+ * during compile or in the sha.h header file.
+ *
+ * Machines that support neither u_intXX_t nor inttypes.h's uintXX_t
+ * will need to define these three typedefs below (and the appropriate
+ * ones in sha.h too) by hand according to their system architecture.
+ *
+ * Thank you, Jun-ichiro itojun Hagino, for suggesting using u_intXX_t
+ * types and pointing out recent ANSI C support for uintXX_t in inttypes.h.
+ */
+#ifdef SHA2_USE_INTTYPES_H
+
+typedef uint8_t sha2_byte; /* Exactly 1 byte */
+typedef uint32_t sha2_word32; /* Exactly 4 bytes */
+typedef uint64_t sha2_word64; /* Exactly 8 bytes */
+
+#else /* SHA2_USE_INTTYPES_H */
+
+typedef u_int8_t sha2_byte; /* Exactly 1 byte */
+typedef u_int32_t sha2_word32; /* Exactly 4 bytes */
+typedef u_int64_t sha2_word64; /* Exactly 8 bytes */
+
+#endif /* SHA2_USE_INTTYPES_H */
+
+
+/*** SHA-256/384/512 Various Length Definitions ***********************/
+/* NOTE: Most of these are in sha2.h */
+#define SHA256_SHORT_BLOCK_LENGTH (SHA256_BLOCK_LENGTH - 8)
+#define SHA384_SHORT_BLOCK_LENGTH (SHA384_BLOCK_LENGTH - 16)
+#define SHA512_SHORT_BLOCK_LENGTH (SHA512_BLOCK_LENGTH - 16)
+
+
+/*** ENDIAN REVERSAL MACROS *******************************************/
+#if BYTE_ORDER == LITTLE_ENDIAN
+#define REVERSE32(w,x) { \
+ sha2_word32 tmp = (w); \
+ tmp = (tmp >> 16) | (tmp << 16); \
+ (x) = ((tmp & 0xff00ff00UL) >> 8) | ((tmp & 0x00ff00ffUL) << 8); \
+}
+#define REVERSE64(w,x) { \
+ sha2_word64 tmp = (w); \
+ tmp = (tmp >> 32) | (tmp << 32); \
+ tmp = ((tmp & 0xff00ff00ff00ff00ULL) >> 8) | \
+ ((tmp & 0x00ff00ff00ff00ffULL) << 8); \
+ (x) = ((tmp & 0xffff0000ffff0000ULL) >> 16) | \
+ ((tmp & 0x0000ffff0000ffffULL) << 16); \
+}
+#endif /* BYTE_ORDER == LITTLE_ENDIAN */
+
+/*
+ * Macro for incrementally adding the unsigned 64-bit integer n to the
+ * unsigned 128-bit integer (represented using a two-element array of
+ * 64-bit words):
+ */
+#define ADDINC128(w,n) { \
+ (w)[0] += (sha2_word64)(n); \
+ if ((w)[0] < (n)) { \
+ (w)[1]++; \
+ } \
+}
+
+/*
+ * Macros for copying blocks of memory and for zeroing out ranges
+ * of memory. Using these macros makes it easy to switch from
+ * using memset()/memcpy() and using bzero()/bcopy().
+ *
+ * Please define either SHA2_USE_MEMSET_MEMCPY or define
+ * SHA2_USE_BZERO_BCOPY depending on which function set you
+ * choose to use:
+ */
+#if !defined(SHA2_USE_MEMSET_MEMCPY) && !defined(SHA2_USE_BZERO_BCOPY)
+/* Default to memset()/memcpy() if no option is specified */
+#define SHA2_USE_MEMSET_MEMCPY 1
+#endif
+#if defined(SHA2_USE_MEMSET_MEMCPY) && defined(SHA2_USE_BZERO_BCOPY)
+/* Abort with an error if BOTH options are defined */
+#error Define either SHA2_USE_MEMSET_MEMCPY or SHA2_USE_BZERO_BCOPY, not both!
+#endif
+
+#ifdef SHA2_USE_MEMSET_MEMCPY
+#define MEMSET_BZERO(p,l) memset((p), 0, (l))
+#define MEMCPY_BCOPY(d,s,l) memcpy((d), (s), (l))
+#endif
+#ifdef SHA2_USE_BZERO_BCOPY
+#define MEMSET_BZERO(p,l) bzero((p), (l))
+#define MEMCPY_BCOPY(d,s,l) bcopy((s), (d), (l))
+#endif
+
+
+/*** THE SIX LOGICAL FUNCTIONS ****************************************/
+/*
+ * Bit shifting and rotation (used by the six SHA-XYZ logical functions:
+ *
+ * NOTE: The naming of R and S appears backwards here (R is a SHIFT and
+ * S is a ROTATION) because the SHA-256/384/512 description document
+ * (see http://csrc.nist.gov/cryptval/shs/sha256-384-512.pdf) uses this
+ * same "backwards" definition.
+ */
+/* Shift-right (used in SHA-256, SHA-384, and SHA-512): */
+#define R(b,x) ((x) >> (b))
+/* 32-bit Rotate-right (used in SHA-256): */
+#define S32(b,x) (((x) >> (b)) | ((x) << (32 - (b))))
+/* 64-bit Rotate-right (used in SHA-384 and SHA-512): */
+#define S64(b,x) (((x) >> (b)) | ((x) << (64 - (b))))
+
+/* Two of six logical functions used in SHA-256, SHA-384, and SHA-512: */
+#define Ch(x,y,z) (((x) & (y)) ^ ((~(x)) & (z)))
+#define Maj(x,y,z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z)))
+
+/* Four of six logical functions used in SHA-256: */
+#define Sigma0_256(x) (S32(2, (x)) ^ S32(13, (x)) ^ S32(22, (x)))
+#define Sigma1_256(x) (S32(6, (x)) ^ S32(11, (x)) ^ S32(25, (x)))
+#define sigma0_256(x) (S32(7, (x)) ^ S32(18, (x)) ^ R(3 , (x)))
+#define sigma1_256(x) (S32(17, (x)) ^ S32(19, (x)) ^ R(10, (x)))
+
+/* Four of six logical functions used in SHA-384 and SHA-512: */
+#define Sigma0_512(x) (S64(28, (x)) ^ S64(34, (x)) ^ S64(39, (x)))
+#define Sigma1_512(x) (S64(14, (x)) ^ S64(18, (x)) ^ S64(41, (x)))
+#define sigma0_512(x) (S64( 1, (x)) ^ S64( 8, (x)) ^ R( 7, (x)))
+#define sigma1_512(x) (S64(19, (x)) ^ S64(61, (x)) ^ R( 6, (x)))
+
+/*** INTERNAL FUNCTION PROTOTYPES *************************************/
+/* NOTE: These should not be accessed directly from outside this
+ * library -- they are intended for private internal visibility/use
+ * only.
+ */
+void SHA512_Last(SHA512_CTX*);
+void SHA256_Transform(SHA256_CTX*, const sha2_word32*);
+void SHA512_Transform(SHA512_CTX*, const sha2_word64*);
+
+
+/*** SHA-XYZ INITIAL HASH VALUES AND CONSTANTS ************************/
+/* Hash constant words K for SHA-256: */
+const static sha2_word32 K256[64] = {
+ 0x428a2f98UL, 0x71374491UL, 0xb5c0fbcfUL, 0xe9b5dba5UL,
+ 0x3956c25bUL, 0x59f111f1UL, 0x923f82a4UL, 0xab1c5ed5UL,
+ 0xd807aa98UL, 0x12835b01UL, 0x243185beUL, 0x550c7dc3UL,
+ 0x72be5d74UL, 0x80deb1feUL, 0x9bdc06a7UL, 0xc19bf174UL,
+ 0xe49b69c1UL, 0xefbe4786UL, 0x0fc19dc6UL, 0x240ca1ccUL,
+ 0x2de92c6fUL, 0x4a7484aaUL, 0x5cb0a9dcUL, 0x76f988daUL,
+ 0x983e5152UL, 0xa831c66dUL, 0xb00327c8UL, 0xbf597fc7UL,
+ 0xc6e00bf3UL, 0xd5a79147UL, 0x06ca6351UL, 0x14292967UL,
+ 0x27b70a85UL, 0x2e1b2138UL, 0x4d2c6dfcUL, 0x53380d13UL,
+ 0x650a7354UL, 0x766a0abbUL, 0x81c2c92eUL, 0x92722c85UL,
+ 0xa2bfe8a1UL, 0xa81a664bUL, 0xc24b8b70UL, 0xc76c51a3UL,
+ 0xd192e819UL, 0xd6990624UL, 0xf40e3585UL, 0x106aa070UL,
+ 0x19a4c116UL, 0x1e376c08UL, 0x2748774cUL, 0x34b0bcb5UL,
+ 0x391c0cb3UL, 0x4ed8aa4aUL, 0x5b9cca4fUL, 0x682e6ff3UL,
+ 0x748f82eeUL, 0x78a5636fUL, 0x84c87814UL, 0x8cc70208UL,
+ 0x90befffaUL, 0xa4506cebUL, 0xbef9a3f7UL, 0xc67178f2UL
+};
+
+/* Initial hash value H for SHA-256: */
+const static sha2_word32 sha256_initial_hash_value[8] = {
+ 0x6a09e667UL,
+ 0xbb67ae85UL,
+ 0x3c6ef372UL,
+ 0xa54ff53aUL,
+ 0x510e527fUL,
+ 0x9b05688cUL,
+ 0x1f83d9abUL,
+ 0x5be0cd19UL
+};
+
+/* Hash constant words K for SHA-384 and SHA-512: */
+const static sha2_word64 K512[80] = {
+ 0x428a2f98d728ae22ULL, 0x7137449123ef65cdULL,
+ 0xb5c0fbcfec4d3b2fULL, 0xe9b5dba58189dbbcULL,
+ 0x3956c25bf348b538ULL, 0x59f111f1b605d019ULL,
+ 0x923f82a4af194f9bULL, 0xab1c5ed5da6d8118ULL,
+ 0xd807aa98a3030242ULL, 0x12835b0145706fbeULL,
+ 0x243185be4ee4b28cULL, 0x550c7dc3d5ffb4e2ULL,
+ 0x72be5d74f27b896fULL, 0x80deb1fe3b1696b1ULL,
+ 0x9bdc06a725c71235ULL, 0xc19bf174cf692694ULL,
+ 0xe49b69c19ef14ad2ULL, 0xefbe4786384f25e3ULL,
+ 0x0fc19dc68b8cd5b5ULL, 0x240ca1cc77ac9c65ULL,
+ 0x2de92c6f592b0275ULL, 0x4a7484aa6ea6e483ULL,
+ 0x5cb0a9dcbd41fbd4ULL, 0x76f988da831153b5ULL,
+ 0x983e5152ee66dfabULL, 0xa831c66d2db43210ULL,
+ 0xb00327c898fb213fULL, 0xbf597fc7beef0ee4ULL,
+ 0xc6e00bf33da88fc2ULL, 0xd5a79147930aa725ULL,
+ 0x06ca6351e003826fULL, 0x142929670a0e6e70ULL,
+ 0x27b70a8546d22ffcULL, 0x2e1b21385c26c926ULL,
+ 0x4d2c6dfc5ac42aedULL, 0x53380d139d95b3dfULL,
+ 0x650a73548baf63deULL, 0x766a0abb3c77b2a8ULL,
+ 0x81c2c92e47edaee6ULL, 0x92722c851482353bULL,
+ 0xa2bfe8a14cf10364ULL, 0xa81a664bbc423001ULL,
+ 0xc24b8b70d0f89791ULL, 0xc76c51a30654be30ULL,
+ 0xd192e819d6ef5218ULL, 0xd69906245565a910ULL,
+ 0xf40e35855771202aULL, 0x106aa07032bbd1b8ULL,
+ 0x19a4c116b8d2d0c8ULL, 0x1e376c085141ab53ULL,
+ 0x2748774cdf8eeb99ULL, 0x34b0bcb5e19b48a8ULL,
+ 0x391c0cb3c5c95a63ULL, 0x4ed8aa4ae3418acbULL,
+ 0x5b9cca4f7763e373ULL, 0x682e6ff3d6b2b8a3ULL,
+ 0x748f82ee5defb2fcULL, 0x78a5636f43172f60ULL,
+ 0x84c87814a1f0ab72ULL, 0x8cc702081a6439ecULL,
+ 0x90befffa23631e28ULL, 0xa4506cebde82bde9ULL,
+ 0xbef9a3f7b2c67915ULL, 0xc67178f2e372532bULL,
+ 0xca273eceea26619cULL, 0xd186b8c721c0c207ULL,
+ 0xeada7dd6cde0eb1eULL, 0xf57d4f7fee6ed178ULL,
+ 0x06f067aa72176fbaULL, 0x0a637dc5a2c898a6ULL,
+ 0x113f9804bef90daeULL, 0x1b710b35131c471bULL,
+ 0x28db77f523047d84ULL, 0x32caab7b40c72493ULL,
+ 0x3c9ebe0a15c9bebcULL, 0x431d67c49c100d4cULL,
+ 0x4cc5d4becb3e42b6ULL, 0x597f299cfc657e2aULL,
+ 0x5fcb6fab3ad6faecULL, 0x6c44198c4a475817ULL
+};
+
+/* Initial hash value H for SHA-384 */
+const static sha2_word64 sha384_initial_hash_value[8] = {
+ 0xcbbb9d5dc1059ed8ULL,
+ 0x629a292a367cd507ULL,
+ 0x9159015a3070dd17ULL,
+ 0x152fecd8f70e5939ULL,
+ 0x67332667ffc00b31ULL,
+ 0x8eb44a8768581511ULL,
+ 0xdb0c2e0d64f98fa7ULL,
+ 0x47b5481dbefa4fa4ULL
+};
+
+/* Initial hash value H for SHA-512 */
+const static sha2_word64 sha512_initial_hash_value[8] = {
+ 0x6a09e667f3bcc908ULL,
+ 0xbb67ae8584caa73bULL,
+ 0x3c6ef372fe94f82bULL,
+ 0xa54ff53a5f1d36f1ULL,
+ 0x510e527fade682d1ULL,
+ 0x9b05688c2b3e6c1fULL,
+ 0x1f83d9abfb41bd6bULL,
+ 0x5be0cd19137e2179ULL
+};
+
+/*
+ * Constant used by SHA256/384/512_End() functions for converting the
+ * digest to a readable hexadecimal character string:
+ */
+static const char *sha2_hex_digits = "0123456789abcdef";
+
+
+/*** SHA-256: *********************************************************/
+void SHA256_Init(SHA256_CTX* context) {
+ if (context == (SHA256_CTX*)0) {
+ return;
+ }
+ MEMCPY_BCOPY(context->state, sha256_initial_hash_value, SHA256_DIGEST_LENGTH);
+ MEMSET_BZERO(context->buffer, SHA256_BLOCK_LENGTH);
+ context->bitcount = 0;
+}
+
+#ifdef SHA2_UNROLL_TRANSFORM
+
+/* Unrolled SHA-256 round macros: */
+
+#if BYTE_ORDER == LITTLE_ENDIAN
+
+#define ROUND256_0_TO_15(a,b,c,d,e,f,g,h) \
+ REVERSE32(*data++, W256[j]); \
+ T1 = (h) + Sigma1_256(e) + Ch((e), (f), (g)) + \
+ K256[j] + W256[j]; \
+ (d) += T1; \
+ (h) = T1 + Sigma0_256(a) + Maj((a), (b), (c)); \
+ j++
+
+
+#else /* BYTE_ORDER == LITTLE_ENDIAN */
+
+#define ROUND256_0_TO_15(a,b,c,d,e,f,g,h) \
+ T1 = (h) + Sigma1_256(e) + Ch((e), (f), (g)) + \
+ K256[j] + (W256[j] = *data++); \
+ (d) += T1; \
+ (h) = T1 + Sigma0_256(a) + Maj((a), (b), (c)); \
+ j++
+
+#endif /* BYTE_ORDER == LITTLE_ENDIAN */
+
+#define ROUND256(a,b,c,d,e,f,g,h) \
+ s0 = W256[(j+1)&0x0f]; \
+ s0 = sigma0_256(s0); \
+ s1 = W256[(j+14)&0x0f]; \
+ s1 = sigma1_256(s1); \
+ T1 = (h) + Sigma1_256(e) + Ch((e), (f), (g)) + K256[j] + \
+ (W256[j&0x0f] += s1 + W256[(j+9)&0x0f] + s0); \
+ (d) += T1; \
+ (h) = T1 + Sigma0_256(a) + Maj((a), (b), (c)); \
+ j++
+
+void SHA256_Transform(SHA256_CTX* context, const sha2_word32* data) {
+ sha2_word32 a, b, c, d, e, f, g, h, s0, s1;
+ sha2_word32 T1, *W256;
+ int j;
+
+ W256 = (sha2_word32*)context->buffer;
+
+ /* Initialize registers with the prev. intermediate value */
+ a = context->state[0];
+ b = context->state[1];
+ c = context->state[2];
+ d = context->state[3];
+ e = context->state[4];
+ f = context->state[5];
+ g = context->state[6];
+ h = context->state[7];
+
+ j = 0;
+ do {
+ /* Rounds 0 to 15 (unrolled): */
+ ROUND256_0_TO_15(a,b,c,d,e,f,g,h);
+ ROUND256_0_TO_15(h,a,b,c,d,e,f,g);
+ ROUND256_0_TO_15(g,h,a,b,c,d,e,f);
+ ROUND256_0_TO_15(f,g,h,a,b,c,d,e);
+ ROUND256_0_TO_15(e,f,g,h,a,b,c,d);
+ ROUND256_0_TO_15(d,e,f,g,h,a,b,c);
+ ROUND256_0_TO_15(c,d,e,f,g,h,a,b);
+ ROUND256_0_TO_15(b,c,d,e,f,g,h,a);
+ } while (j < 16);
+
+ /* Now for the remaining rounds to 64: */
+ do {
+ ROUND256(a,b,c,d,e,f,g,h);
+ ROUND256(h,a,b,c,d,e,f,g);
+ ROUND256(g,h,a,b,c,d,e,f);
+ ROUND256(f,g,h,a,b,c,d,e);
+ ROUND256(e,f,g,h,a,b,c,d);
+ ROUND256(d,e,f,g,h,a,b,c);
+ ROUND256(c,d,e,f,g,h,a,b);
+ ROUND256(b,c,d,e,f,g,h,a);
+ } while (j < 64);
+
+ /* Compute the current intermediate hash value */
+ context->state[0] += a;
+ context->state[1] += b;
+ context->state[2] += c;
+ context->state[3] += d;
+ context->state[4] += e;
+ context->state[5] += f;
+ context->state[6] += g;
+ context->state[7] += h;
+
+ /* Clean up */
+ a = b = c = d = e = f = g = h = T1 = 0;
+}
+
+#else /* SHA2_UNROLL_TRANSFORM */
+
+void SHA256_Transform(SHA256_CTX* context, const sha2_word32* data) {
+ sha2_word32 a, b, c, d, e, f, g, h, s0, s1;
+ sha2_word32 T1, T2, *W256;
+ int j;
+
+ W256 = (sha2_word32*)context->buffer;
+
+ /* Initialize registers with the prev. intermediate value */
+ a = context->state[0];
+ b = context->state[1];
+ c = context->state[2];
+ d = context->state[3];
+ e = context->state[4];
+ f = context->state[5];
+ g = context->state[6];
+ h = context->state[7];
+
+ j = 0;
+ do {
+#if BYTE_ORDER == LITTLE_ENDIAN
+ /* Copy data while converting to host byte order */
+ REVERSE32(*data++,W256[j]);
+ /* Apply the SHA-256 compression function to update a..h */
+ T1 = h + Sigma1_256(e) + Ch(e, f, g) + K256[j] + W256[j];
+#else /* BYTE_ORDER == LITTLE_ENDIAN */
+ /* Apply the SHA-256 compression function to update a..h with copy */
+ T1 = h + Sigma1_256(e) + Ch(e, f, g) + K256[j] + (W256[j] = *data++);
+#endif /* BYTE_ORDER == LITTLE_ENDIAN */
+ T2 = Sigma0_256(a) + Maj(a, b, c);
+ h = g;
+ g = f;
+ f = e;
+ e = d + T1;
+ d = c;
+ c = b;
+ b = a;
+ a = T1 + T2;
+
+ j++;
+ } while (j < 16);
+
+ do {
+ /* Part of the message block expansion: */
+ s0 = W256[(j+1)&0x0f];
+ s0 = sigma0_256(s0);
+ s1 = W256[(j+14)&0x0f];
+ s1 = sigma1_256(s1);
+
+ /* Apply the SHA-256 compression function to update a..h */
+ T1 = h + Sigma1_256(e) + Ch(e, f, g) + K256[j] +
+ (W256[j&0x0f] += s1 + W256[(j+9)&0x0f] + s0);
+ T2 = Sigma0_256(a) + Maj(a, b, c);
+ h = g;
+ g = f;
+ f = e;
+ e = d + T1;
+ d = c;
+ c = b;
+ b = a;
+ a = T1 + T2;
+
+ j++;
+ } while (j < 64);
+
+ /* Compute the current intermediate hash value */
+ context->state[0] += a;
+ context->state[1] += b;
+ context->state[2] += c;
+ context->state[3] += d;
+ context->state[4] += e;
+ context->state[5] += f;
+ context->state[6] += g;
+ context->state[7] += h;
+
+ /* Clean up */
+ a = b = c = d = e = f = g = h = T1 = T2 = 0;
+}
+
+#endif /* SHA2_UNROLL_TRANSFORM */
+
+void SHA256_Update(SHA256_CTX* context, const sha2_byte *data, size_t len) {
+ unsigned int freespace, usedspace;
+
+ if (len == 0) {
+ /* Calling with no data is valid - we do nothing */
+ return;
+ }
+
+ /* Sanity check: */
+ assert(context != (SHA256_CTX*)0 && data != (sha2_byte*)0);
+
+ usedspace = (context->bitcount >> 3) % SHA256_BLOCK_LENGTH;
+ if (usedspace > 0) {
+ /* Calculate how much free space is available in the buffer */
+ freespace = SHA256_BLOCK_LENGTH - usedspace;
+
+ if (len >= freespace) {
+ /* Fill the buffer completely and process it */
+ MEMCPY_BCOPY(&context->buffer[usedspace], data, freespace);
+ context->bitcount += freespace << 3;
+ len -= freespace;
+ data += freespace;
+ SHA256_Transform(context, (sha2_word32*)context->buffer);
+ } else {
+ /* The buffer is not yet full */
+ MEMCPY_BCOPY(&context->buffer[usedspace], data, len);
+ context->bitcount += len << 3;
+ /* Clean up: */
+ usedspace = freespace = 0;
+ return;
+ }
+ }
+ while (len >= SHA256_BLOCK_LENGTH) {
+ /* Process as many complete blocks as we can */
+ SHA256_Transform(context, (sha2_word32*)data);
+ context->bitcount += SHA256_BLOCK_LENGTH << 3;
+ len -= SHA256_BLOCK_LENGTH;
+ data += SHA256_BLOCK_LENGTH;
+ }
+ if (len > 0) {
+ /* There's left-overs, so save 'em */
+ MEMCPY_BCOPY(context->buffer, data, len);
+ context->bitcount += len << 3;
+ }
+ /* Clean up: */
+ usedspace = freespace = 0;
+}
+
+void SHA256_Final(sha2_byte digest[], SHA256_CTX* context) {
+ sha2_word32 *d = (sha2_word32*)digest;
+ unsigned int usedspace;
+
+ /* Sanity check: */
+ assert(context != (SHA256_CTX*)0);
+
+ /* If no digest buffer is passed, we don't bother doing this: */
+ if (digest != (sha2_byte*)0) {
+ usedspace = (context->bitcount >> 3) % SHA256_BLOCK_LENGTH;
+#if BYTE_ORDER == LITTLE_ENDIAN
+ /* Convert FROM host byte order */
+ REVERSE64(context->bitcount,context->bitcount);
+#endif
+ if (usedspace > 0) {
+ /* Begin padding with a 1 bit: */
+ context->buffer[usedspace++] = 0x80;
+
+ if (usedspace <= SHA256_SHORT_BLOCK_LENGTH) {
+ /* Set-up for the last transform: */
+ MEMSET_BZERO(&context->buffer[usedspace], SHA256_SHORT_BLOCK_LENGTH - usedspace);
+ } else {
+ if (usedspace < SHA256_BLOCK_LENGTH) {
+ MEMSET_BZERO(&context->buffer[usedspace], SHA256_BLOCK_LENGTH - usedspace);
+ }
+ /* Do second-to-last transform: */
+ SHA256_Transform(context, (sha2_word32*)context->buffer);
+
+ /* And set-up for the last transform: */
+ MEMSET_BZERO(context->buffer, SHA256_SHORT_BLOCK_LENGTH);
+ }
+ } else {
+ /* Set-up for the last transform: */
+ MEMSET_BZERO(context->buffer, SHA256_SHORT_BLOCK_LENGTH);
+
+ /* Begin padding with a 1 bit: */
+ *context->buffer = 0x80;
+ }
+ /* Set the bit count: */
+ *(sha2_word64*)&context->buffer[SHA256_SHORT_BLOCK_LENGTH] = context->bitcount;
+
+ /* Final transform: */
+ SHA256_Transform(context, (sha2_word32*)context->buffer);
+
+#if BYTE_ORDER == LITTLE_ENDIAN
+ {
+ /* Convert TO host byte order */
+ int j;
+ for (j = 0; j < 8; j++) {
+ REVERSE32(context->state[j],context->state[j]);
+ *d++ = context->state[j];
+ }
+ }
+#else
+ MEMCPY_BCOPY(d, context->state, SHA256_DIGEST_LENGTH);
+#endif
+ }
+
+ /* Clean up state data: */
+ MEMSET_BZERO(context, sizeof(context));
+ usedspace = 0;
+}
+
+char *SHA256_End(SHA256_CTX* context, char buffer[]) {
+ sha2_byte digest[SHA256_DIGEST_LENGTH], *d = digest;
+ int i;
+
+ /* Sanity check: */
+ assert(context != (SHA256_CTX*)0);
+
+ if (buffer != (char*)0) {
+ SHA256_Final(digest, context);
+
+ for (i = 0; i < SHA256_DIGEST_LENGTH; i++) {
+ *buffer++ = sha2_hex_digits[(*d & 0xf0) >> 4];
+ *buffer++ = sha2_hex_digits[*d & 0x0f];
+ d++;
+ }
+ *buffer = (char)0;
+ } else {
+ MEMSET_BZERO(context, sizeof(context));
+ }
+ MEMSET_BZERO(digest, SHA256_DIGEST_LENGTH);
+ return buffer;
+}
+
+char* SHA256_Data(const sha2_byte* data, size_t len, char digest[SHA256_DIGEST_STRING_LENGTH]) {
+ SHA256_CTX context;
+
+ SHA256_Init(&context);
+ SHA256_Update(&context, data, len);
+ return SHA256_End(&context, digest);
+}
+
+
+/*** SHA-512: *********************************************************/
+void SHA512_Init(SHA512_CTX* context) {
+ if (context == (SHA512_CTX*)0) {
+ return;
+ }
+ MEMCPY_BCOPY(context->state, sha512_initial_hash_value, SHA512_DIGEST_LENGTH);
+ MEMSET_BZERO(context->buffer, SHA512_BLOCK_LENGTH);
+ context->bitcount[0] = context->bitcount[1] = 0;
+}
+
+#ifdef SHA2_UNROLL_TRANSFORM
+
+/* Unrolled SHA-512 round macros: */
+#if BYTE_ORDER == LITTLE_ENDIAN
+
+#define ROUND512_0_TO_15(a,b,c,d,e,f,g,h) \
+ REVERSE64(*data++, W512[j]); \
+ T1 = (h) + Sigma1_512(e) + Ch((e), (f), (g)) + \
+ K512[j] + W512[j]; \
+ (d) += T1, \
+ (h) = T1 + Sigma0_512(a) + Maj((a), (b), (c)), \
+ j++
+
+
+#else /* BYTE_ORDER == LITTLE_ENDIAN */
+
+#define ROUND512_0_TO_15(a,b,c,d,e,f,g,h) \
+ T1 = (h) + Sigma1_512(e) + Ch((e), (f), (g)) + \
+ K512[j] + (W512[j] = *data++); \
+ (d) += T1; \
+ (h) = T1 + Sigma0_512(a) + Maj((a), (b), (c)); \
+ j++
+
+#endif /* BYTE_ORDER == LITTLE_ENDIAN */
+
+#define ROUND512(a,b,c,d,e,f,g,h) \
+ s0 = W512[(j+1)&0x0f]; \
+ s0 = sigma0_512(s0); \
+ s1 = W512[(j+14)&0x0f]; \
+ s1 = sigma1_512(s1); \
+ T1 = (h) + Sigma1_512(e) + Ch((e), (f), (g)) + K512[j] + \
+ (W512[j&0x0f] += s1 + W512[(j+9)&0x0f] + s0); \
+ (d) += T1; \
+ (h) = T1 + Sigma0_512(a) + Maj((a), (b), (c)); \
+ j++
+
+void SHA512_Transform(SHA512_CTX* context, const sha2_word64* data) {
+ sha2_word64 a, b, c, d, e, f, g, h, s0, s1;
+ sha2_word64 T1, *W512 = (sha2_word64*)context->buffer;
+ int j;
+
+ /* Initialize registers with the prev. intermediate value */
+ a = context->state[0];
+ b = context->state[1];
+ c = context->state[2];
+ d = context->state[3];
+ e = context->state[4];
+ f = context->state[5];
+ g = context->state[6];
+ h = context->state[7];
+
+ j = 0;
+ do {
+ ROUND512_0_TO_15(a,b,c,d,e,f,g,h);
+ ROUND512_0_TO_15(h,a,b,c,d,e,f,g);
+ ROUND512_0_TO_15(g,h,a,b,c,d,e,f);
+ ROUND512_0_TO_15(f,g,h,a,b,c,d,e);
+ ROUND512_0_TO_15(e,f,g,h,a,b,c,d);
+ ROUND512_0_TO_15(d,e,f,g,h,a,b,c);
+ ROUND512_0_TO_15(c,d,e,f,g,h,a,b);
+ ROUND512_0_TO_15(b,c,d,e,f,g,h,a);
+ } while (j < 16);
+
+ /* Now for the remaining rounds up to 79: */
+ do {
+ ROUND512(a,b,c,d,e,f,g,h);
+ ROUND512(h,a,b,c,d,e,f,g);
+ ROUND512(g,h,a,b,c,d,e,f);
+ ROUND512(f,g,h,a,b,c,d,e);
+ ROUND512(e,f,g,h,a,b,c,d);
+ ROUND512(d,e,f,g,h,a,b,c);
+ ROUND512(c,d,e,f,g,h,a,b);
+ ROUND512(b,c,d,e,f,g,h,a);
+ } while (j < 80);
+
+ /* Compute the current intermediate hash value */
+ context->state[0] += a;
+ context->state[1] += b;
+ context->state[2] += c;
+ context->state[3] += d;
+ context->state[4] += e;
+ context->state[5] += f;
+ context->state[6] += g;
+ context->state[7] += h;
+
+ /* Clean up */
+ a = b = c = d = e = f = g = h = T1 = 0;
+}
+
+#else /* SHA2_UNROLL_TRANSFORM */
+
+void SHA512_Transform(SHA512_CTX* context, const sha2_word64* data) {
+ sha2_word64 a, b, c, d, e, f, g, h, s0, s1;
+ sha2_word64 T1, T2, *W512 = (sha2_word64*)context->buffer;
+ int j;
+
+ /* Initialize registers with the prev. intermediate value */
+ a = context->state[0];
+ b = context->state[1];
+ c = context->state[2];
+ d = context->state[3];
+ e = context->state[4];
+ f = context->state[5];
+ g = context->state[6];
+ h = context->state[7];
+
+ j = 0;
+ do {
+#if BYTE_ORDER == LITTLE_ENDIAN
+ /* Convert TO host byte order */
+ REVERSE64(*data++, W512[j]);
+ /* Apply the SHA-512 compression function to update a..h */
+ T1 = h + Sigma1_512(e) + Ch(e, f, g) + K512[j] + W512[j];
+#else /* BYTE_ORDER == LITTLE_ENDIAN */
+ /* Apply the SHA-512 compression function to update a..h with copy */
+ T1 = h + Sigma1_512(e) + Ch(e, f, g) + K512[j] + (W512[j] = *data++);
+#endif /* BYTE_ORDER == LITTLE_ENDIAN */
+ T2 = Sigma0_512(a) + Maj(a, b, c);
+ h = g;
+ g = f;
+ f = e;
+ e = d + T1;
+ d = c;
+ c = b;
+ b = a;
+ a = T1 + T2;
+
+ j++;
+ } while (j < 16);
+
+ do {
+ /* Part of the message block expansion: */
+ s0 = W512[(j+1)&0x0f];
+ s0 = sigma0_512(s0);
+ s1 = W512[(j+14)&0x0f];
+ s1 = sigma1_512(s1);
+
+ /* Apply the SHA-512 compression function to update a..h */
+ T1 = h + Sigma1_512(e) + Ch(e, f, g) + K512[j] +
+ (W512[j&0x0f] += s1 + W512[(j+9)&0x0f] + s0);
+ T2 = Sigma0_512(a) + Maj(a, b, c);
+ h = g;
+ g = f;
+ f = e;
+ e = d + T1;
+ d = c;
+ c = b;
+ b = a;
+ a = T1 + T2;
+
+ j++;
+ } while (j < 80);
+
+ /* Compute the current intermediate hash value */
+ context->state[0] += a;
+ context->state[1] += b;
+ context->state[2] += c;
+ context->state[3] += d;
+ context->state[4] += e;
+ context->state[5] += f;
+ context->state[6] += g;
+ context->state[7] += h;
+
+ /* Clean up */
+ a = b = c = d = e = f = g = h = T1 = T2 = 0;
+}
+
+#endif /* SHA2_UNROLL_TRANSFORM */
+
+void SHA512_Update(SHA512_CTX* context, const sha2_byte *data, size_t len) {
+ unsigned int freespace, usedspace;
+
+ if (len == 0) {
+ /* Calling with no data is valid - we do nothing */
+ return;
+ }
+
+ /* Sanity check: */
+ assert(context != (SHA512_CTX*)0 && data != (sha2_byte*)0);
+
+ usedspace = (context->bitcount[0] >> 3) % SHA512_BLOCK_LENGTH;
+ if (usedspace > 0) {
+ /* Calculate how much free space is available in the buffer */
+ freespace = SHA512_BLOCK_LENGTH - usedspace;
+
+ if (len >= freespace) {
+ /* Fill the buffer completely and process it */
+ MEMCPY_BCOPY(&context->buffer[usedspace], data, freespace);
+ ADDINC128(context->bitcount, freespace << 3);
+ len -= freespace;
+ data += freespace;
+ SHA512_Transform(context, (sha2_word64*)context->buffer);
+ } else {
+ /* The buffer is not yet full */
+ MEMCPY_BCOPY(&context->buffer[usedspace], data, len);
+ ADDINC128(context->bitcount, len << 3);
+ /* Clean up: */
+ usedspace = freespace = 0;
+ return;
+ }
+ }
+ while (len >= SHA512_BLOCK_LENGTH) {
+ /* Process as many complete blocks as we can */
+ SHA512_Transform(context, (sha2_word64*)data);
+ ADDINC128(context->bitcount, SHA512_BLOCK_LENGTH << 3);
+ len -= SHA512_BLOCK_LENGTH;
+ data += SHA512_BLOCK_LENGTH;
+ }
+ if (len > 0) {
+ /* There's left-overs, so save 'em */
+ MEMCPY_BCOPY(context->buffer, data, len);
+ ADDINC128(context->bitcount, len << 3);
+ }
+ /* Clean up: */
+ usedspace = freespace = 0;
+}
+
+void SHA512_Last(SHA512_CTX* context) {
+ unsigned int usedspace;
+
+ usedspace = (context->bitcount[0] >> 3) % SHA512_BLOCK_LENGTH;
+#if BYTE_ORDER == LITTLE_ENDIAN
+ /* Convert FROM host byte order */
+ REVERSE64(context->bitcount[0],context->bitcount[0]);
+ REVERSE64(context->bitcount[1],context->bitcount[1]);
+#endif
+ if (usedspace > 0) {
+ /* Begin padding with a 1 bit: */
+ context->buffer[usedspace++] = 0x80;
+
+ if (usedspace <= SHA512_SHORT_BLOCK_LENGTH) {
+ /* Set-up for the last transform: */
+ MEMSET_BZERO(&context->buffer[usedspace], SHA512_SHORT_BLOCK_LENGTH - usedspace);
+ } else {
+ if (usedspace < SHA512_BLOCK_LENGTH) {
+ MEMSET_BZERO(&context->buffer[usedspace], SHA512_BLOCK_LENGTH - usedspace);
+ }
+ /* Do second-to-last transform: */
+ SHA512_Transform(context, (sha2_word64*)context->buffer);
+
+ /* And set-up for the last transform: */
+ MEMSET_BZERO(context->buffer, SHA512_BLOCK_LENGTH - 2);
+ }
+ } else {
+ /* Prepare for final transform: */
+ MEMSET_BZERO(context->buffer, SHA512_SHORT_BLOCK_LENGTH);
+
+ /* Begin padding with a 1 bit: */
+ *context->buffer = 0x80;
+ }
+ /* Store the length of input data (in bits): */
+ *(sha2_word64*)&context->buffer[SHA512_SHORT_BLOCK_LENGTH] = context->bitcount[1];
+ *(sha2_word64*)&context->buffer[SHA512_SHORT_BLOCK_LENGTH+8] = context->bitcount[0];
+
+ /* Final transform: */
+ SHA512_Transform(context, (sha2_word64*)context->buffer);
+}
+
+void SHA512_Final(sha2_byte digest[], SHA512_CTX* context) {
+ sha2_word64 *d = (sha2_word64*)digest;
+
+ /* Sanity check: */
+ assert(context != (SHA512_CTX*)0);
+
+ /* If no digest buffer is passed, we don't bother doing this: */
+ if (digest != (sha2_byte*)0) {
+ SHA512_Last(context);
+
+ /* Save the hash data for output: */
+#if BYTE_ORDER == LITTLE_ENDIAN
+ {
+ /* Convert TO host byte order */
+ int j;
+ for (j = 0; j < 8; j++) {
+ REVERSE64(context->state[j],context->state[j]);
+ *d++ = context->state[j];
+ }
+ }
+#else
+ MEMCPY_BCOPY(d, context->state, SHA512_DIGEST_LENGTH);
+#endif
+ }
+
+ /* Zero out state data */
+ MEMSET_BZERO(context, sizeof(context));
+}
+
+char *SHA512_End(SHA512_CTX* context, char buffer[]) {
+ sha2_byte digest[SHA512_DIGEST_LENGTH], *d = digest;
+ int i;
+
+ /* Sanity check: */
+ assert(context != (SHA512_CTX*)0);
+
+ if (buffer != (char*)0) {
+ SHA512_Final(digest, context);
+
+ for (i = 0; i < SHA512_DIGEST_LENGTH; i++) {
+ *buffer++ = sha2_hex_digits[(*d & 0xf0) >> 4];
+ *buffer++ = sha2_hex_digits[*d & 0x0f];
+ d++;
+ }
+ *buffer = (char)0;
+ } else {
+ MEMSET_BZERO(context, sizeof(context));
+ }
+ MEMSET_BZERO(digest, SHA512_DIGEST_LENGTH);
+ return buffer;
+}
+
+char* SHA512_Data(const sha2_byte* data, size_t len, char digest[SHA512_DIGEST_STRING_LENGTH]) {
+ SHA512_CTX context;
+
+ SHA512_Init(&context);
+ SHA512_Update(&context, data, len);
+ return SHA512_End(&context, digest);
+}
+
+
+/*** SHA-384: *********************************************************/
+void SHA384_Init(SHA384_CTX* context) {
+ if (context == (SHA384_CTX*)0) {
+ return;
+ }
+ MEMCPY_BCOPY(context->state, sha384_initial_hash_value, SHA512_DIGEST_LENGTH);
+ MEMSET_BZERO(context->buffer, SHA384_BLOCK_LENGTH);
+ context->bitcount[0] = context->bitcount[1] = 0;
+}
+
+void SHA384_Update(SHA384_CTX* context, const sha2_byte* data, size_t len) {
+ SHA512_Update((SHA512_CTX*)context, data, len);
+}
+
+void SHA384_Final(sha2_byte digest[], SHA384_CTX* context) {
+ sha2_word64 *d = (sha2_word64*)digest;
+
+ /* Sanity check: */
+ assert(context != (SHA384_CTX*)0);
+
+ /* If no digest buffer is passed, we don't bother doing this: */
+ if (digest != (sha2_byte*)0) {
+ SHA512_Last((SHA512_CTX*)context);
+
+ /* Save the hash data for output: */
+#if BYTE_ORDER == LITTLE_ENDIAN
+ {
+ /* Convert TO host byte order */
+ int j;
+ for (j = 0; j < 6; j++) {
+ REVERSE64(context->state[j],context->state[j]);
+ *d++ = context->state[j];
+ }
+ }
+#else
+ MEMCPY_BCOPY(d, context->state, SHA384_DIGEST_LENGTH);
+#endif
+ }
+
+ /* Zero out state data */
+ MEMSET_BZERO(context, sizeof(context));
+}
+
+char *SHA384_End(SHA384_CTX* context, char buffer[]) {
+ sha2_byte digest[SHA384_DIGEST_LENGTH], *d = digest;
+ int i;
+
+ /* Sanity check: */
+ assert(context != (SHA384_CTX*)0);
+
+ if (buffer != (char*)0) {
+ SHA384_Final(digest, context);
+
+ for (i = 0; i < SHA384_DIGEST_LENGTH; i++) {
+ *buffer++ = sha2_hex_digits[(*d & 0xf0) >> 4];
+ *buffer++ = sha2_hex_digits[*d & 0x0f];
+ d++;
+ }
+ *buffer = (char)0;
+ } else {
+ MEMSET_BZERO(context, sizeof(context));
+ }
+ MEMSET_BZERO(digest, SHA384_DIGEST_LENGTH);
+ return buffer;
+}
+
+char* SHA384_Data(const sha2_byte* data, size_t len, char digest[SHA384_DIGEST_STRING_LENGTH]) {
+ SHA384_CTX context;
+
+ SHA384_Init(&context);
+ SHA384_Update(&context, data, len);
+ return SHA384_End(&context, digest);
+}
+
diff --git a/apt-pkg/contrib/sha2_internal.h b/apt-pkg/contrib/sha2_internal.h
new file mode 100644
index 000000000..bf759ad45
--- /dev/null
+++ b/apt-pkg/contrib/sha2_internal.h
@@ -0,0 +1,197 @@
+/*
+ * FILE: sha2.h
+ * AUTHOR: Aaron D. Gifford - http://www.aarongifford.com/
+ *
+ * Copyright (c) 2000-2001, Aaron D. Gifford
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holder nor the names of contributors
+ * may be used to endorse or promote products derived from this software
+ * without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTOR(S) ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTOR(S) BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * $Id: sha2.h,v 1.1 2001/11/08 00:02:01 adg Exp adg $
+ */
+
+#ifndef __SHA2_H__
+#define __SHA2_H__
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/*
+ * Import u_intXX_t size_t type definitions from system headers. You
+ * may need to change this, or define these things yourself in this
+ * file.
+ */
+#include <sys/types.h>
+
+#ifdef SHA2_USE_INTTYPES_H
+
+#include <inttypes.h>
+
+#endif /* SHA2_USE_INTTYPES_H */
+
+
+/*** SHA-256/384/512 Various Length Definitions ***********************/
+#define SHA256_BLOCK_LENGTH 64
+#define SHA256_DIGEST_LENGTH 32
+#define SHA256_DIGEST_STRING_LENGTH (SHA256_DIGEST_LENGTH * 2 + 1)
+#define SHA384_BLOCK_LENGTH 128
+#define SHA384_DIGEST_LENGTH 48
+#define SHA384_DIGEST_STRING_LENGTH (SHA384_DIGEST_LENGTH * 2 + 1)
+#define SHA512_BLOCK_LENGTH 128
+#define SHA512_DIGEST_LENGTH 64
+#define SHA512_DIGEST_STRING_LENGTH (SHA512_DIGEST_LENGTH * 2 + 1)
+
+
+/*** SHA-256/384/512 Context Structures *******************************/
+/* NOTE: If your architecture does not define either u_intXX_t types or
+ * uintXX_t (from inttypes.h), you may need to define things by hand
+ * for your system:
+ */
+#if 0
+typedef unsigned char u_int8_t; /* 1-byte (8-bits) */
+typedef unsigned int u_int32_t; /* 4-bytes (32-bits) */
+typedef unsigned long long u_int64_t; /* 8-bytes (64-bits) */
+#endif
+/*
+ * Most BSD systems already define u_intXX_t types, as does Linux.
+ * Some systems, however, like Compaq's Tru64 Unix instead can use
+ * uintXX_t types defined by very recent ANSI C standards and included
+ * in the file:
+ *
+ * #include <inttypes.h>
+ *
+ * If you choose to use <inttypes.h> then please define:
+ *
+ * #define SHA2_USE_INTTYPES_H
+ *
+ * Or on the command line during compile:
+ *
+ * cc -DSHA2_USE_INTTYPES_H ...
+ */
+#ifdef SHA2_USE_INTTYPES_H
+
+typedef struct _SHA256_CTX {
+ uint32_t state[8];
+ uint64_t bitcount;
+ uint8_t buffer[SHA256_BLOCK_LENGTH];
+} SHA256_CTX;
+typedef struct _SHA512_CTX {
+ uint64_t state[8];
+ uint64_t bitcount[2];
+ uint8_t buffer[SHA512_BLOCK_LENGTH];
+} SHA512_CTX;
+
+#else /* SHA2_USE_INTTYPES_H */
+
+typedef struct _SHA256_CTX {
+ u_int32_t state[8];
+ u_int64_t bitcount;
+ u_int8_t buffer[SHA256_BLOCK_LENGTH];
+} SHA256_CTX;
+typedef struct _SHA512_CTX {
+ u_int64_t state[8];
+ u_int64_t bitcount[2];
+ u_int8_t buffer[SHA512_BLOCK_LENGTH];
+} SHA512_CTX;
+
+#endif /* SHA2_USE_INTTYPES_H */
+
+typedef SHA512_CTX SHA384_CTX;
+
+
+/*** SHA-256/384/512 Function Prototypes ******************************/
+#ifndef NOPROTO
+#ifdef SHA2_USE_INTTYPES_H
+
+void SHA256_Init(SHA256_CTX *);
+void SHA256_Update(SHA256_CTX*, const uint8_t*, size_t);
+void SHA256_Final(uint8_t[SHA256_DIGEST_LENGTH], SHA256_CTX*);
+char* SHA256_End(SHA256_CTX*, char[SHA256_DIGEST_STRING_LENGTH]);
+char* SHA256_Data(const uint8_t*, size_t, char[SHA256_DIGEST_STRING_LENGTH]);
+
+void SHA384_Init(SHA384_CTX*);
+void SHA384_Update(SHA384_CTX*, const uint8_t*, size_t);
+void SHA384_Final(uint8_t[SHA384_DIGEST_LENGTH], SHA384_CTX*);
+char* SHA384_End(SHA384_CTX*, char[SHA384_DIGEST_STRING_LENGTH]);
+char* SHA384_Data(const uint8_t*, size_t, char[SHA384_DIGEST_STRING_LENGTH]);
+
+void SHA512_Init(SHA512_CTX*);
+void SHA512_Update(SHA512_CTX*, const uint8_t*, size_t);
+void SHA512_Final(uint8_t[SHA512_DIGEST_LENGTH], SHA512_CTX*);
+char* SHA512_End(SHA512_CTX*, char[SHA512_DIGEST_STRING_LENGTH]);
+char* SHA512_Data(const uint8_t*, size_t, char[SHA512_DIGEST_STRING_LENGTH]);
+
+#else /* SHA2_USE_INTTYPES_H */
+
+void SHA256_Init(SHA256_CTX *);
+void SHA256_Update(SHA256_CTX*, const u_int8_t*, size_t);
+void SHA256_Final(u_int8_t[SHA256_DIGEST_LENGTH], SHA256_CTX*);
+char* SHA256_End(SHA256_CTX*, char[SHA256_DIGEST_STRING_LENGTH]);
+char* SHA256_Data(const u_int8_t*, size_t, char[SHA256_DIGEST_STRING_LENGTH]);
+
+void SHA384_Init(SHA384_CTX*);
+void SHA384_Update(SHA384_CTX*, const u_int8_t*, size_t);
+void SHA384_Final(u_int8_t[SHA384_DIGEST_LENGTH], SHA384_CTX*);
+char* SHA384_End(SHA384_CTX*, char[SHA384_DIGEST_STRING_LENGTH]);
+char* SHA384_Data(const u_int8_t*, size_t, char[SHA384_DIGEST_STRING_LENGTH]);
+
+void SHA512_Init(SHA512_CTX*);
+void SHA512_Update(SHA512_CTX*, const u_int8_t*, size_t);
+void SHA512_Final(u_int8_t[SHA512_DIGEST_LENGTH], SHA512_CTX*);
+char* SHA512_End(SHA512_CTX*, char[SHA512_DIGEST_STRING_LENGTH]);
+char* SHA512_Data(const u_int8_t*, size_t, char[SHA512_DIGEST_STRING_LENGTH]);
+
+#endif /* SHA2_USE_INTTYPES_H */
+
+#else /* NOPROTO */
+
+void SHA256_Init();
+void SHA256_Update();
+void SHA256_Final();
+char* SHA256_End();
+char* SHA256_Data();
+
+void SHA384_Init();
+void SHA384_Update();
+void SHA384_Final();
+char* SHA384_End();
+char* SHA384_Data();
+
+void SHA512_Init();
+void SHA512_Update();
+void SHA512_Final();
+char* SHA512_End();
+char* SHA512_Data();
+
+#endif /* NOPROTO */
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+#endif /* __SHA2_H__ */
+
diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc
index 987f4c3a4..072dda3ac 100644
--- a/apt-pkg/contrib/strutl.cc
+++ b/apt-pkg/contrib/strutl.cc
@@ -692,14 +692,16 @@ int StringToBool(const string &Text,int Default)
year 2000 complient and timezone neutral */
string TimeRFC1123(time_t Date)
{
- struct tm Conv = *gmtime(&Date);
- char Buf[300];
+ struct tm Conv;
+ if (gmtime_r(&Date, &Conv) == NULL)
+ return "";
+ char Buf[300];
const char *Day[] = {"Sun","Mon","Tue","Wed","Thu","Fri","Sat"};
const char *Month[] = {"Jan","Feb","Mar","Apr","May","Jun","Jul",
"Aug","Sep","Oct","Nov","Dec"};
- sprintf(Buf,"%s, %02i %s %i %02i:%02i:%02i GMT",Day[Conv.tm_wday],
+ snprintf(Buf, sizeof(Buf), "%s, %02i %s %i %02i:%02i:%02i GMT",Day[Conv.tm_wday],
Conv.tm_mday,Month[Conv.tm_mon],Conv.tm_year+1900,Conv.tm_hour,
Conv.tm_min,Conv.tm_sec);
return Buf;
@@ -968,6 +970,23 @@ bool StrToNum(const char *Str,unsigned long &Res,unsigned Len,unsigned Base)
return true;
}
/*}}}*/
+// Base256ToNum - Convert a fixed length binary to a number /*{{{*/
+// ---------------------------------------------------------------------
+/* This is used in decoding the 256bit encoded fixed length fields in
+ tar files */
+bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len)
+{
+ if ((Str[0] & 0x80) == 0)
+ return false;
+ else
+ {
+ Res = Str[0] & 0x7F;
+ for(unsigned int i = 1; i < Len; ++i)
+ Res = (Res<<8) + Str[i];
+ return true;
+ }
+}
+ /*}}}*/
// HexDigit - Convert a hex character into an integer /*{{{*/
// ---------------------------------------------------------------------
/* Helper for Hex2Num */
@@ -1174,6 +1193,15 @@ char *safe_snprintf(char *Buffer,char *End,const char *Format,...)
return Buffer + Did;
}
/*}}}*/
+// StripEpoch - Remove the version "epoch" from a version string /*{{{*/
+// ---------------------------------------------------------------------
+string StripEpoch(const string &VerStr)
+{
+ size_t i = VerStr.find(":");
+ if (i == string::npos)
+ return VerStr;
+ return VerStr.substr(i+1);
+}
// tolower_ascii - tolower() function that ignores the locale /*{{{*/
// ---------------------------------------------------------------------
diff --git a/apt-pkg/contrib/strutl.h b/apt-pkg/contrib/strutl.h
index a457ff047..89cbf0370 100644
--- a/apt-pkg/contrib/strutl.h
+++ b/apt-pkg/contrib/strutl.h
@@ -52,6 +52,7 @@ string LookupTag(const string &Message,const char *Tag,const char *Default = 0);
int StringToBool(const string &Text,int Default = -1);
bool ReadMessages(int Fd, vector<string> &List);
bool StrToNum(const char *Str,unsigned long &Res,unsigned Len,unsigned Base = 0);
+bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len);
bool Hex2Num(const string &Str,unsigned char *Num,unsigned int Length);
bool TokSplitString(char Tok,char *Input,char **List,
unsigned long ListMax);
@@ -61,6 +62,7 @@ void strprintf(string &out,const char *format,...) __like_printf(2);
char *safe_snprintf(char *Buffer,char *End,const char *Format,...) __like_printf(3);
bool CheckDomainList(const string &Host, const string &List);
int tolower_ascii(int const c) __attrib_const __hot;
+string StripEpoch(const string &VerStr);
#define APT_MKSTRCMP(name,func) \
inline int name(const char *A,const char *B) {return func(A,A+strlen(A),B,B+strlen(B));}; \
diff --git a/apt-pkg/contrib/weakptr.h b/apt-pkg/contrib/weakptr.h
index 5158e393c..8de727d89 100644
--- a/apt-pkg/contrib/weakptr.h
+++ b/apt-pkg/contrib/weakptr.h
@@ -22,6 +22,8 @@
#define WEAK_POINTER_H
#include <set>
+#include <stddef.h>
+
/**
* Class for objects providing support for weak pointers.
*
diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc
index af1209ccb..1e8c04033 100644
--- a/apt-pkg/deb/debindexfile.cc
+++ b/apt-pkg/deb/debindexfile.cc
@@ -66,7 +66,10 @@ pkgSrcRecords::Parser *debSourcesIndex::CreateSrcParser() const
string SourcesURI = _config->FindDir("Dir::State::lists") +
URItoFileName(IndexURI("Sources"));
string SourcesURIgzip = SourcesURI + ".gz";
- if (!FileExists(SourcesURI) && FileExists(SourcesURIgzip))
+
+ if (!FileExists(SourcesURI) && !FileExists(SourcesURIgzip))
+ return NULL;
+ else if (!FileExists(SourcesURI) && FileExists(SourcesURIgzip))
SourcesURI = SourcesURIgzip;
return new debSrcRecordParser(SourcesURI,this);
@@ -324,8 +327,14 @@ bool debPackagesIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const
return _error->Error("Problem with MergeList %s",PackageFile.c_str());
// Check the release file
- string ReleaseFile = debReleaseIndex(URI,Dist).MetaIndexFile("Release");
+ string ReleaseFile = debReleaseIndex(URI,Dist).MetaIndexFile("InRelease");
+ bool releaseExists = false;
if (FileExists(ReleaseFile) == true)
+ releaseExists = true;
+ else
+ ReleaseFile = debReleaseIndex(URI,Dist).MetaIndexFile("Release");
+
+ if (releaseExists == true || FileExists(ReleaseFile) == true)
{
FileFd Rel(ReleaseFile,FileFd::ReadOnly);
if (_error->PendingError() == true)
diff --git a/apt-pkg/deb/debindexfile.h b/apt-pkg/deb/debindexfile.h
index 0f8d4433f..678c22473 100644
--- a/apt-pkg/deb/debindexfile.h
+++ b/apt-pkg/deb/debindexfile.h
@@ -24,8 +24,10 @@ class debStatusIndex : public pkgIndexFile
{
/** \brief dpointer placeholder (for later in case we need it) */
void *d;
+
+ protected:
string File;
-
+
public:
virtual const Type *GetType() const;
@@ -38,6 +40,7 @@ class debStatusIndex : public pkgIndexFile
virtual bool HasPackages() const {return true;};
virtual unsigned long Size() const;
virtual bool Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const;
+ bool Merge(pkgCacheGenerator &Gen,OpProgress *Prog, unsigned long const Flag) const;
virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
debStatusIndex(string File);
diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc
index 1b3bfd6ae..a94b79f05 100644
--- a/apt-pkg/deb/deblistparser.cc
+++ b/apt-pkg/deb/deblistparser.cc
@@ -67,23 +67,12 @@ string debListParser::Package() {
/*}}}*/
// ListParser::Architecture - Return the package arch /*{{{*/
// ---------------------------------------------------------------------
-/* This will return the Architecture of the package this section describes
- Note that architecture "all" packages will get the architecture of the
- Packages file parsed here. */
+/* This will return the Architecture of the package this section describes */
string debListParser::Architecture() {
- string const Result = Section.FindS("Architecture");
- if (Result.empty() == true || Result == "all")
- {
- if (Arch.empty() == true)
- /* FIXME: this is a problem for installed arch all
- packages as we don't know from which arch this
- package was installed - and therefore which
- dependency this package resolves. */
- return _config->Find("APT::Architecture");
- else
- return Arch;
- }
- return Result;
+ std::string const Arch = Section.FindS("Architecture");
+ if (Arch.empty() == true)
+ return _config->Find("APT::Architecture");
+ return Arch;
}
/*}}}*/
// ListParser::ArchitectureAll /*{{{*/
@@ -112,29 +101,35 @@ bool debListParser::NewVersion(pkgCache::VerIterator &Ver)
Ver->Section = UniqFindTagWrite("Section");
// Parse multi-arch
- if (Section.FindS("Architecture") == "all")
- /* Arch all packages can't have a Multi-Arch field,
- but we need a special treatment for them nonetheless */
- Ver->MultiArch = pkgCache::Version::All;
- else
- {
- string const MultiArch = Section.FindS("Multi-Arch");
- if (MultiArch.empty() == true)
- Ver->MultiArch = pkgCache::Version::None;
- else if (MultiArch == "same")
- Ver->MultiArch = pkgCache::Version::Same;
- else if (MultiArch == "foreign")
- Ver->MultiArch = pkgCache::Version::Foreign;
- else if (MultiArch == "allowed")
- Ver->MultiArch = pkgCache::Version::Allowed;
- else
+ string const MultiArch = Section.FindS("Multi-Arch");
+ if (MultiArch.empty() == true)
+ Ver->MultiArch = pkgCache::Version::None;
+ else if (MultiArch == "same") {
+ // Parse multi-arch
+ if (ArchitectureAll() == true)
{
- _error->Warning("Unknown Multi-Arch type »%s« for package »%s«",
- MultiArch.c_str(), Section.FindS("Package").c_str());
+ /* Arch all packages can't be Multi-Arch: same */
+ _error->Warning("Architecture: all package '%s' can't be Multi-Arch: same",
+ Section.FindS("Package").c_str());
Ver->MultiArch = pkgCache::Version::None;
}
+ else
+ Ver->MultiArch = pkgCache::Version::Same;
+ }
+ else if (MultiArch == "foreign")
+ Ver->MultiArch = pkgCache::Version::Foreign;
+ else if (MultiArch == "allowed")
+ Ver->MultiArch = pkgCache::Version::Allowed;
+ else
+ {
+ _error->Warning("Unknown Multi-Arch type '%s' for package '%s'",
+ MultiArch.c_str(), Section.FindS("Package").c_str());
+ Ver->MultiArch = pkgCache::Version::None;
}
+ if (ArchitectureAll() == true)
+ Ver->MultiArch |= pkgCache::Version::All;
+
// Archive Size
Ver->Size = Section.FindULL("Size");
// Unpacked Size (in K)
@@ -150,24 +145,6 @@ bool debListParser::NewVersion(pkgCache::VerIterator &Ver)
Ver->Priority = pkgCache::State::Extra;
}
- if (Ver->MultiArch == pkgCache::Version::All)
- {
- /* We maintain a "pseudo" arch=all package for architecture all versions
- on which these versions can depend on. This pseudo package is many used
- for downloading/installing: The other pseudo-packages will degenerate
- to a NOP in the download/install step - this package will ensure that
- it is downloaded only one time and installed only one time -- even if
- the architecture bound versions coming in and out on regular basis. */
- if (strcmp(Ver.Arch(true),"all") == 0)
- return true;
- else if (MultiArchEnabled == true)
- {
- // our pseudo packages have no size to not confuse the fetcher
- Ver->Size = 0;
- Ver->InstalledSize = 0;
- }
- }
-
if (ParseDepends(Ver,"Depends",pkgCache::Dep::Depends) == false)
return false;
if (ParseDepends(Ver,"Pre-Depends",pkgCache::Dep::PreDepends) == false)
@@ -302,18 +279,18 @@ unsigned short debListParser::VersionHash()
/* Strip out any spaces from the text, this undoes dpkgs reformatting
of certain fields. dpkg also has the rather interesting notion of
reformatting depends operators < -> <= */
- char *I = S;
+ char *J = S;
for (; Start != End; Start++)
{
if (isspace(*Start) == 0)
- *I++ = tolower_ascii(*Start);
+ *J++ = tolower_ascii(*Start);
if (*Start == '<' && Start[1] != '<' && Start[1] != '=')
- *I++ = '=';
+ *J++ = '=';
if (*Start == '>' && Start[1] != '>' && Start[1] != '=')
- *I++ = '=';
+ *J++ = '=';
}
- Result = AddCRC16(Result,S,I - S);
+ Result = AddCRC16(Result,S,J - S);
}
return Result;
@@ -505,7 +482,7 @@ const char *debListParser::ParseDepends(const char *Start,const char *Stop,
// Parse off the package name
const char *I = Start;
for (;I != Stop && isspace(*I) == 0 && *I != '(' && *I != ')' &&
- *I != ',' && *I != '|'; I++);
+ *I != ',' && *I != '|' && *I != '[' && *I != ']'; I++);
// Malformed, no '('
if (I != Stop && *I == ')')
@@ -644,13 +621,13 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver,
return true;
string Package;
- string const pkgArch = Ver.Arch(true);
+ string const pkgArch = Ver.Arch();
string Version;
unsigned int Op;
while (1)
{
- Start = ParseDepends(Start,Stop,Package,Version,Op);
+ Start = ParseDepends(Start,Stop,Package,Version,Op,false,!MultiArchEnabled);
if (Start == 0)
return _error->Error("Problem parsing dependency %s",Tag);
@@ -683,7 +660,7 @@ bool debListParser::ParseProvides(pkgCache::VerIterator &Ver)
{
string Package;
string Version;
- string const Arch = Ver.Arch(true);
+ string const Arch = Ver.Arch();
unsigned int Op;
while (1)
@@ -703,27 +680,28 @@ bool debListParser::ParseProvides(pkgCache::VerIterator &Ver)
}
}
- if (Ver->MultiArch == pkgCache::Version::Allowed)
+ if (MultiArchEnabled == false)
+ return true;
+ else if ((Ver->MultiArch & pkgCache::Version::Allowed) == pkgCache::Version::Allowed)
{
string const Package = string(Ver.ParentPkg().Name()).append(":").append("any");
- NewProvides(Ver, Package, "any", Ver.VerStr());
+ return NewProvidesAllArch(Ver, Package, Ver.VerStr());
}
+ else if ((Ver->MultiArch & pkgCache::Version::Foreign) == pkgCache::Version::Foreign)
+ return NewProvidesAllArch(Ver, Ver.ParentPkg().Name(), Ver.VerStr());
- if (Ver->MultiArch != pkgCache::Version::Foreign)
- return true;
-
- if (MultiArchEnabled == false)
- return true;
-
- string const Package = Ver.ParentPkg().Name();
- string const Version = Ver.VerStr();
+ return true;
+}
+ /*}}}*/
+// ListParser::NewProvides - add provides for all architectures /*{{{*/
+bool debListParser::NewProvidesAllArch(pkgCache::VerIterator &Ver, string const &Package,
+ string const &Version) {
for (std::vector<string>::const_iterator a = Architectures.begin();
a != Architectures.end(); ++a)
{
if (NewProvides(Ver, Package, *a, Version) == false)
return false;
}
-
return true;
}
/*}}}*/
@@ -768,7 +746,7 @@ bool debListParser::Step()
if (Architecture == Arch)
return true;
- if (Architecture == "all")
+ if (Architecture == "all" && Arch == _config->Find("APT::Architecture"))
return true;
}
@@ -783,45 +761,89 @@ bool debListParser::Step()
bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,
FileFd &File, string component)
{
- pkgTagFile Tags(&File, File.Size() + 256); // XXX
- pkgTagSection Section;
- if (Tags.Step(Section) == false)
- return false;
-
- // FIXME: Do we need it now for multi-arch?
- // mvo: I don't think we need to fill that in (it's unused since apt-0.6)
-// FileI->Architecture = WriteUniqString(Arch);
-
// apt-secure does no longer download individual (per-section) Release
// file. to provide Component pinning we use the section name now
FileI->Component = WriteUniqString(component);
- const char *Start;
- const char *Stop;
- if (Section.Find("Suite",Start,Stop) == true)
- FileI->Archive = WriteUniqString(Start,Stop - Start);
- if (Section.Find("Component",Start,Stop) == true)
- FileI->Component = WriteUniqString(Start,Stop - Start);
- if (Section.Find("Version",Start,Stop) == true)
- FileI->Version = WriteUniqString(Start,Stop - Start);
- if (Section.Find("Origin",Start,Stop) == true)
- FileI->Origin = WriteUniqString(Start,Stop - Start);
- if (Section.Find("Codename",Start,Stop) == true)
- FileI->Codename = WriteUniqString(Start,Stop - Start);
- if (Section.Find("Label",Start,Stop) == true)
- FileI->Label = WriteUniqString(Start,Stop - Start);
- if (Section.Find("Architecture",Start,Stop) == true)
- FileI->Architecture = WriteUniqString(Start,Stop - Start);
-
- if (Section.FindFlag("NotAutomatic",FileI->Flags,
- pkgCache::Flag::NotAutomatic) == false)
- _error->Warning("Bad NotAutomatic flag");
- if (Section.FindFlag("ButAutomaticUpgrades",FileI->Flags,
- pkgCache::Flag::ButAutomaticUpgrades) == false)
- _error->Warning("Bad ButAutomaticUpgrades flag");
- // overrule the NotAutomatic setting if needed as they are both present for compatibility
- else if ((FileI->Flags & pkgCache::Flag::ButAutomaticUpgrades) == pkgCache::Flag::ButAutomaticUpgrades)
- FileI->Flags &= ~pkgCache::Flag::NotAutomatic;
+ FILE* release = fdopen(dup(File.Fd()), "r");
+ if (release == NULL)
+ return false;
+
+ char buffer[101];
+ bool gpgClose = false;
+ while (fgets(buffer, sizeof(buffer), release) != NULL)
+ {
+ size_t len = 0;
+
+ // Skip empty lines
+ for (; buffer[len] == '\r' && buffer[len] == '\n'; ++len);
+ if (buffer[len] == '\0')
+ continue;
+
+ // only evalute the first GPG section
+ if (strncmp("-----", buffer, 5) == 0)
+ {
+ if (gpgClose == true)
+ break;
+ gpgClose = true;
+ continue;
+ }
+
+ // seperate the tag from the data
+ for (; buffer[len] != ':' && buffer[len] != '\0'; ++len);
+ if (buffer[len] == '\0')
+ continue;
+ char* dataStart = buffer + len;
+ for (++dataStart; *dataStart == ' '; ++dataStart);
+ char* dataEnd = dataStart;
+ for (++dataEnd; *dataEnd != '\0'; ++dataEnd);
+
+ // which datastorage need to be updated
+ map_ptrloc* writeTo = NULL;
+ if (buffer[0] == ' ')
+ ;
+ #define APT_PARSER_WRITETO(X, Y) else if (strncmp(Y, buffer, len) == 0) writeTo = &X;
+ APT_PARSER_WRITETO(FileI->Archive, "Suite")
+ APT_PARSER_WRITETO(FileI->Component, "Component")
+ APT_PARSER_WRITETO(FileI->Version, "Version")
+ APT_PARSER_WRITETO(FileI->Origin, "Origin")
+ APT_PARSER_WRITETO(FileI->Codename, "Codename")
+ APT_PARSER_WRITETO(FileI->Label, "Label")
+ #undef APT_PARSER_WRITETO
+ #define APT_PARSER_FLAGIT(X) else if (strncmp(#X, buffer, len) == 0) \
+ pkgTagSection::FindFlag(FileI->Flags, pkgCache::Flag:: X, dataStart, dataEnd-1);
+ APT_PARSER_FLAGIT(NotAutomatic)
+ APT_PARSER_FLAGIT(ButAutomaticUpgrades)
+ #undef APT_PARSER_FLAGIT
+
+ // load all data from the line and save it
+ string data;
+ if (writeTo != NULL)
+ data.append(dataStart, dataEnd);
+ if (sizeof(buffer) - 1 == (dataEnd - buffer))
+ {
+ while (fgets(buffer, sizeof(buffer), release) != NULL)
+ {
+ if (writeTo != NULL)
+ data.append(buffer);
+ if (strlen(buffer) != sizeof(buffer) - 1)
+ break;
+ }
+ }
+ if (writeTo != NULL)
+ {
+ // remove spaces and stuff from the end of the data line
+ for (std::string::reverse_iterator s = data.rbegin();
+ s != data.rend(); ++s)
+ {
+ if (*s != '\r' && *s != '\n' && *s != ' ')
+ break;
+ *s = '\0';
+ }
+ *writeTo = WriteUniqString(data);
+ }
+ }
+ fclose(release);
return !_error->PendingError();
}
diff --git a/apt-pkg/deb/deblistparser.h b/apt-pkg/deb/deblistparser.h
index 54da938ec..41d712fbf 100644
--- a/apt-pkg/deb/deblistparser.h
+++ b/apt-pkg/deb/deblistparser.h
@@ -25,11 +25,12 @@ class debListParser : public pkgCacheGenerator::ListParser
const char *Str;
unsigned char Val;
};
-
+
private:
/** \brief dpointer placeholder (for later in case we need it) */
void *d;
-
+
+ protected:
pkgTagFile Tags;
pkgTagSection Section;
unsigned long iOffset;
@@ -38,10 +39,11 @@ class debListParser : public pkgCacheGenerator::ListParser
bool MultiArchEnabled;
unsigned long UniqFindTagWrite(const char *Tag);
- bool ParseStatus(pkgCache::PkgIterator &Pkg,pkgCache::VerIterator &Ver);
+ virtual bool ParseStatus(pkgCache::PkgIterator &Pkg,pkgCache::VerIterator &Ver);
bool ParseDepends(pkgCache::VerIterator &Ver,const char *Tag,
unsigned int Type);
bool ParseProvides(pkgCache::VerIterator &Ver);
+ bool NewProvidesAllArch(pkgCache::VerIterator &Ver, string const &Package, string const &Version);
static bool GrabWord(string Word,WordList *List,unsigned char &Out);
public:
@@ -71,7 +73,7 @@ class debListParser : public pkgCacheGenerator::ListParser
static const char *ParseDepends(const char *Start,const char *Stop,
string &Package,string &Ver,unsigned int &Op,
bool const &ParseArchFlags = false,
- bool const &StripMultiArch = false);
+ bool const &StripMultiArch = true);
static const char *ConvertRelation(const char *I,unsigned int &Op);
debListParser(FileFd *File, string const &Arch = "");
diff --git a/apt-pkg/deb/debmetaindex.cc b/apt-pkg/deb/debmetaindex.cc
index 717d0bcde..a91cc34e9 100644
--- a/apt-pkg/deb/debmetaindex.cc
+++ b/apt-pkg/deb/debmetaindex.cc
@@ -119,6 +119,29 @@ string debReleaseIndex::SourceIndexURI(const char *Type, const string &Section)
return URI + "dists/" + Dist + "/" + SourceIndexURISuffix(Type, Section);
}
+string debReleaseIndex::TranslationIndexURISuffix(const char *Type, const string &Section) const
+{
+ string Res ="";
+ if (Dist[Dist.size() - 1] != '/')
+ Res += Section + "/i18n/";
+ return Res + Type;
+}
+
+string debReleaseIndex::TranslationIndexURI(const char *Type, const string &Section) const
+{
+ string Res;
+ if (Dist[Dist.size() - 1] == '/')
+ {
+ if (Dist != "/")
+ Res = URI + Dist;
+ else
+ Res = URI;
+ return Res + Type;
+ }
+ else
+ return URI + "dists/" + Dist + "/" + TranslationIndexURISuffix(Type, Section);
+}
+
debReleaseIndex::debReleaseIndex(string const &URI, string const &Dist) {
this->URI = URI;
this->Dist = Dist;
@@ -155,6 +178,7 @@ vector <struct IndexTarget *>* debReleaseIndex::ComputeIndexTargets() const {
if (IndexTargets->empty() == false && ArchEntries.size() == 1)
return IndexTargets;
+ std::set<std::string> sections;
for (map<string, vector<debSectionEntry const*> >::const_iterator a = ArchEntries.begin();
a != ArchEntries.end(); ++a) {
if (a->first == "source")
@@ -167,6 +191,40 @@ vector <struct IndexTarget *>* debReleaseIndex::ComputeIndexTargets() const {
Target->URI = IndexURI(Target->ShortDesc.c_str(), (*I)->Section, a->first);
Target->Description = Info (Target->ShortDesc.c_str(), (*I)->Section, a->first);
IndexTargets->push_back (Target);
+ sections.insert((*I)->Section);
+ }
+ }
+
+ std::vector<std::string> const lang = APT::Configuration::getLanguages(true);
+ if (lang.empty() == true)
+ return IndexTargets;
+
+ // get the Translations:
+ // - if its a dists-style repository get the i18n/Index first
+ // - if its flat try to acquire files by guessing
+ if (Dist[Dist.size() - 1] == '/') {
+ for (std::set<std::string>::const_iterator s = sections.begin();
+ s != sections.end(); ++s) {
+ for (std::vector<std::string>::const_iterator l = lang.begin();
+ l != lang.end(); l++) {
+ if (*l == "none") continue;
+ IndexTarget * Target = new OptionalIndexTarget();
+ Target->ShortDesc = "Translation-" + *l;
+ Target->MetaKey = TranslationIndexURISuffix(l->c_str(), *s);
+ Target->URI = TranslationIndexURI(l->c_str(), *s);
+ Target->Description = Info (Target->ShortDesc.c_str(), *s);
+ IndexTargets->push_back(Target);
+ }
+ }
+ } else {
+ for (std::set<std::string>::const_iterator s = sections.begin();
+ s != sections.end(); ++s) {
+ IndexTarget * Target = new OptionalIndexTarget();
+ Target->ShortDesc = "TranslationIndex";
+ Target->MetaKey = TranslationIndexURISuffix("Index", *s);
+ Target->URI = TranslationIndexURI("Index", *s);
+ Target->Description = Info (Target->ShortDesc.c_str(), *s);
+ IndexTargets->push_back (Target);
}
}
@@ -182,58 +240,34 @@ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool const &GetAll) const
new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
(*Target)->ShortDesc, HashString());
}
- // this is normally created in pkgAcqMetaSig, but if we run
- // in --print-uris mode, we add it here
- new pkgAcqMetaIndex(Owner, MetaIndexURI("Release"),
- MetaIndexInfo("Release"), "Release",
- MetaIndexURI("Release.gpg"),
- ComputeIndexTargets(),
- new indexRecords (Dist));
-
}
- new pkgAcqMetaSig(Owner, MetaIndexURI("Release.gpg"),
- MetaIndexInfo("Release.gpg"), "Release.gpg",
- MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release",
- ComputeIndexTargets(),
- new indexRecords (Dist));
-
- // Queue the translations
- std::vector<std::string> const lang = APT::Configuration::getLanguages(true);
- map<string, set<string> > sections;
- for (map<string, vector<debSectionEntry const*> >::const_iterator a = ArchEntries.begin();
- a != ArchEntries.end(); ++a) {
- if (a->first == "source")
- continue;
- for (vector<debSectionEntry const*>::const_iterator I = a->second.begin();
- I != a->second.end(); I++)
- sections[(*I)->Section].insert(lang.begin(), lang.end());
- }
-
- for (map<string, set<string> >::const_iterator s = sections.begin();
- s != sections.end(); ++s)
- for (set<string>::const_iterator l = s->second.begin();
- l != s->second.end(); l++) {
- if (*l == "none") continue;
- debTranslationsIndex i = debTranslationsIndex(URI,Dist,s->first,(*l).c_str());
- i.GetIndexes(Owner);
- }
+ new pkgAcqMetaClearSig(Owner, MetaIndexURI("InRelease"),
+ MetaIndexInfo("InRelease"), "InRelease",
+ MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release",
+ MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg",
+ ComputeIndexTargets(),
+ new indexRecords (Dist));
return true;
}
bool debReleaseIndex::IsTrusted() const
{
- string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
- URItoFileName(MetaIndexURI("Release")) + ".gpg";
-
if(_config->FindB("APT::Authentication::TrustCDROM", false))
if(URI.substr(0,strlen("cdrom:")) == "cdrom:")
return true;
-
+
+ string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
+ URItoFileName(MetaIndexURI("Release")) + ".gpg";
+
if (FileExists(VerifiedSigFile))
return true;
- return false;
+
+ VerifiedSigFile = _config->FindDir("Dir::State::lists") +
+ URItoFileName(MetaIndexURI("InRelease"));
+
+ return FileExists(VerifiedSigFile);
}
vector <pkgIndexFile *> *debReleaseIndex::GetIndexFiles() {
diff --git a/apt-pkg/deb/debmetaindex.h b/apt-pkg/deb/debmetaindex.h
index ffcc7c4bb..0aaf7f14a 100644
--- a/apt-pkg/deb/debmetaindex.h
+++ b/apt-pkg/deb/debmetaindex.h
@@ -39,6 +39,8 @@ class debReleaseIndex : public metaIndex {
string IndexURISuffix(const char *Type, string const &Section, string const &Arch="native") const;
string SourceIndexURI(const char *Type, const string &Section) const;
string SourceIndexURISuffix(const char *Type, const string &Section) const;
+ string TranslationIndexURI(const char *Type, const string &Section) const;
+ string TranslationIndexURISuffix(const char *Type, const string &Section) const;
virtual vector <pkgIndexFile *> *GetIndexFiles();
virtual bool IsTrusted() const;
diff --git a/apt-pkg/deb/debrecords.cc b/apt-pkg/deb/debrecords.cc
index ec9e395ef..1ca9ae1d2 100644
--- a/apt-pkg/deb/debrecords.cc
+++ b/apt-pkg/deb/debrecords.cc
@@ -77,7 +77,7 @@ string debRecordParser::SHA1Hash()
return Section.FindS("SHA1");
}
/*}}}*/
-// RecordParser::SHA1Hash - Return the archive hash /*{{{*/
+// RecordParser::SHA256Hash - Return the archive hash /*{{{*/
// ---------------------------------------------------------------------
/* */
string debRecordParser::SHA256Hash()
@@ -85,6 +85,14 @@ string debRecordParser::SHA256Hash()
return Section.FindS("SHA256");
}
/*}}}*/
+// RecordParser::SHA512Hash - Return the archive hash /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string debRecordParser::SHA512Hash()
+{
+ return Section.FindS("SHA512");
+}
+ /*}}}*/
// RecordParser::Maintainer - Return the maintainer email /*{{{*/
// ---------------------------------------------------------------------
/* */
diff --git a/apt-pkg/deb/debrecords.h b/apt-pkg/deb/debrecords.h
index bbcb5640d..9692ac94c 100644
--- a/apt-pkg/deb/debrecords.h
+++ b/apt-pkg/deb/debrecords.h
@@ -39,6 +39,7 @@ class debRecordParser : public pkgRecords::Parser
virtual string MD5Hash();
virtual string SHA1Hash();
virtual string SHA256Hash();
+ virtual string SHA512Hash();
virtual string SourcePkg();
virtual string SourceVer();
diff --git a/apt-pkg/deb/debsrcrecords.cc b/apt-pkg/deb/debsrcrecords.cc
index 21336e1af..749305005 100644
--- a/apt-pkg/deb/debsrcrecords.cc
+++ b/apt-pkg/deb/debsrcrecords.cc
@@ -14,6 +14,7 @@
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/aptconfiguration.h>
using std::max;
/*}}}*/
@@ -111,7 +112,9 @@ bool debSrcRecordParser::Files(vector<pkgSrcRecords::File> &List)
string Base = Sect.FindS("Directory");
if (Base.empty() == false && Base[Base.length()-1] != '/')
Base += '/';
-
+
+ std::vector<std::string> const compExts = APT::Configuration::getCompressorExtensions();
+
// Iterate over the entire list grabbing each triplet
const char *C = Files.c_str();
while (*C != 0)
@@ -144,7 +147,8 @@ bool debSrcRecordParser::Files(vector<pkgSrcRecords::File> &List)
}
F.Type = string(F.Path,Tmp+1,Pos-Tmp);
- if (F.Type == "gz" || F.Type == "bz2" || F.Type == "lzma" || F.Type == "tar")
+ if (std::find(compExts.begin(), compExts.end(), std::string(".").append(F.Type)) != compExts.end() ||
+ F.Type == "tar")
{
Pos = Tmp-1;
continue;
diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc
index 55525db85..cd7c4e5d5 100644
--- a/apt-pkg/deb/dpkgpm.cc
+++ b/apt-pkg/deb/dpkgpm.cc
@@ -324,6 +324,15 @@ bool pkgDPkgPM::RunScriptsWithPkgs(const char *Cnf)
SetCloseExec(STDIN_FILENO,false);
SetCloseExec(STDERR_FILENO,false);
+ if (_config->FindDir("DPkg::Chroot-Directory","/") != "/")
+ {
+ std::cerr << "Chrooting into "
+ << _config->FindDir("DPkg::Chroot-Directory")
+ << std::endl;
+ if (chroot(_config->FindDir("DPkg::Chroot-Directory","/").c_str()) != 0)
+ _exit(100);
+ }
+
const char *Args[4];
Args[0] = "/bin/sh";
Args[1] = "-c";
@@ -338,7 +347,6 @@ bool pkgDPkgPM::RunScriptsWithPkgs(const char *Cnf)
return _error->Errno("fdopen","Faild to open new FD");
// Feed it the filenames.
- bool Die = false;
if (Version <= 1)
{
for (vector<Item>::iterator I = List.begin(); I != List.end(); I++)
@@ -355,14 +363,11 @@ bool pkgDPkgPM::RunScriptsWithPkgs(const char *Cnf)
into the pipe. */
fprintf(F,"%s\n",I->File.c_str());
if (ferror(F) != 0)
- {
- Die = true;
break;
- }
}
}
else
- Die = !SendV2Pkgs(F);
+ SendV2Pkgs(F);
fclose(F);
@@ -402,8 +407,9 @@ void pkgDPkgPM::DoTerminalPty(int master)
{
// this happens when the child is about to exit, we
// give it time to actually exit, otherwise we run
- // into a race
- usleep(500000);
+ // into a race so we sleep for half a second.
+ struct timespec sleepfor = { 0, 500000000 };
+ nanosleep(&sleepfor, NULL);
return;
}
if(len <= 0)
@@ -901,7 +907,10 @@ bool pkgDPkgPM::Go(int OutStatusFd)
// Generate the argument list
const char *Args[MaxArgs + 50];
-
+ // keep track of allocated strings for multiarch package names
+ char *Packages[MaxArgs + 50];
+ unsigned int pkgcount = 0;
+
// Now check if we are within the MaxArgs limit
//
// this code below is problematic, because it may happen that
@@ -1009,16 +1018,27 @@ bool pkgDPkgPM::Go(int OutStatusFd)
}
else
{
+ string const nativeArch = _config->Find("APT::Architecture");
+ unsigned long const oldSize = I->Op == Item::Configure ? Size : 0;
for (;I != J && Size < MaxArgBytes; I++)
{
if((*I).Pkg.end() == true)
continue;
if (I->Op == Item::Configure && disappearedPkgs.find(I->Pkg.Name()) != disappearedPkgs.end())
continue;
- Args[n++] = I->Pkg.Name();
+ if (I->Pkg.Arch() == nativeArch || !strcmp(I->Pkg.Arch(), "all"))
+ Args[n++] = I->Pkg.Name();
+ else
+ {
+ Packages[pkgcount] = strdup(I->Pkg.FullName(false).c_str());
+ Args[n++] = Packages[pkgcount++];
+ }
Size += strlen(Args[n-1]);
- }
- }
+ }
+ // skip configure action if all sheduled packages disappeared
+ if (oldSize == Size)
+ continue;
+ }
Args[n] = 0;
J = I;
@@ -1165,6 +1185,11 @@ bool pkgDPkgPM::Go(int OutStatusFd)
sigemptyset(&sigmask);
sigprocmask(SIG_BLOCK,&sigmask,&original_sigmask);
+ /* clean up the temporary allocation for multiarch package names in
+ the parent, so we don't leak memory when we return. */
+ for (unsigned int i = 0; i < pkgcount; i++)
+ free(Packages[i]);
+
// the result of the waitpid call
int res;
int select_ret;
@@ -1246,7 +1271,7 @@ bool pkgDPkgPM::Go(int OutStatusFd)
strprintf(d->dpkg_error, "Sub-process %s exited unexpectedly",Args[0]);
if(d->dpkg_error.size() > 0)
- _error->Error(d->dpkg_error.c_str());
+ _error->Error("%s", d->dpkg_error.c_str());
if(stopOnError)
{
@@ -1431,7 +1456,7 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg)
{
while( fgets(buf, sizeof(buf), log) != NULL)
fprintf(report, " %s", buf);
- fclose(log);
+ pclose(log);
}
}
@@ -1447,7 +1472,7 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg)
{
while( fgets(buf, sizeof(buf), log) != NULL)
fprintf(report, " %s", buf);
- fclose(log);
+ pclose(log);
}
}
diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc
index 23abc76c1..1d905df2e 100644
--- a/apt-pkg/depcache.cc
+++ b/apt-pkg/depcache.cc
@@ -10,6 +10,7 @@
// Include Files /*{{{*/
#include <apt-pkg/depcache.h>
#include <apt-pkg/version.h>
+#include <apt-pkg/versionmatch.h>
#include <apt-pkg/error.h>
#include <apt-pkg/sptr.h>
#include <apt-pkg/algorithms.h>
@@ -69,7 +70,7 @@ void pkgDepCache::ActionGroup::release()
cache.MarkAndSweep();
}
- released = false;
+ released = true;
}
}
@@ -166,7 +167,7 @@ bool pkgDepCache::readStateFile(OpProgress *Prog) /*{{{*/
{
FileFd state_file;
string const state = _config->FindFile("Dir::State::extended_states");
- if(FileExists(state)) {
+ if(RealFileExists(state)) {
state_file.Open(state, FileFd::ReadOnly);
int const file_size = state_file.Size();
if(Prog != NULL)
@@ -225,7 +226,7 @@ bool pkgDepCache::writeStateFile(OpProgress *prog, bool InstalledOnly) /*{{{*/
string const state = _config->FindFile("Dir::State::extended_states");
// if it does not exist, create a empty one
- if(!FileExists(state))
+ if(!RealFileExists(state))
{
StateFile.Open(state, FileFd::WriteAtomic);
StateFile.Close();
@@ -338,8 +339,7 @@ bool pkgDepCache::CheckDep(DepIterator Dep,int Type,PkgIterator &Res)
/* Check simple depends. A depends -should- never self match but
we allow it anyhow because dpkg does. Technically it is a packaging
bug. Conflicts may never self match */
- if (Dep.TargetPkg()->Group != Dep.ParentPkg()->Group ||
- (Dep->Type != Dep::Conflicts && Dep->Type != Dep::DpkgBreaks && Dep->Type != Dep::Obsoletes))
+ if (Dep.TargetPkg() != Dep.ParentPkg() || Dep.IsNegative() == false)
{
PkgIterator Pkg = Dep.TargetPkg();
// Check the base package
@@ -369,8 +369,7 @@ bool pkgDepCache::CheckDep(DepIterator Dep,int Type,PkgIterator &Res)
{
/* Provides may never be applied against the same package (or group)
if it is a conflicts. See the comment above. */
- if (P.OwnerPkg()->Group == Pkg->Group &&
- (Dep->Type == Dep::Conflicts || Dep->Type == Dep::DpkgBreaks))
+ if (P.OwnerPkg()->Group == Pkg->Group && Dep.IsNegative() == true)
continue;
// Check if the provides is a hit
@@ -407,60 +406,8 @@ bool pkgDepCache::CheckDep(DepIterator Dep,int Type,PkgIterator &Res)
/*}}}*/
// DepCache::AddSizes - Add the packages sizes to the counters /*{{{*/
// ---------------------------------------------------------------------
-/* Call with Mult = -1 to preform the inverse opration
- The Mult increases the complexity of the calulations here and is unused -
- or do we really have a usecase for removing the size of a package two
- times? So let us replace it with a simple bool and be done with it… */
-__deprecated void pkgDepCache::AddSizes(const PkgIterator &Pkg,signed long Mult)
-{
- StateCache &P = PkgState[Pkg->ID];
-
- if (Pkg->VersionList == 0)
- return;
-
- if (Pkg.State() == pkgCache::PkgIterator::NeedsConfigure &&
- P.Keep() == true)
- return;
-
- // Compute the size data
- if (P.NewInstall() == true)
- {
- iUsrSize += (signed long long)(Mult*P.InstVerIter(*this)->InstalledSize);
- iDownloadSize += (signed long long)(Mult*P.InstVerIter(*this)->Size);
- return;
- }
-
- // Upgrading
- if (Pkg->CurrentVer != 0 &&
- (P.InstallVer != (Version *)Pkg.CurrentVer() ||
- (P.iFlags & ReInstall) == ReInstall) && P.InstallVer != 0)
- {
- iUsrSize += (signed long long)(Mult*((signed long long)P.InstVerIter(*this)->InstalledSize -
- (signed long long)Pkg.CurrentVer()->InstalledSize));
- iDownloadSize += (signed long long)(Mult*P.InstVerIter(*this)->Size);
- return;
- }
-
- // Reinstall
- if (Pkg.State() == pkgCache::PkgIterator::NeedsUnpack &&
- P.Delete() == false)
- {
- iDownloadSize += (signed long long)(Mult*P.InstVerIter(*this)->Size);
- return;
- }
-
- // Removing
- if (Pkg->CurrentVer != 0 && P.InstallVer == 0)
- {
- iUsrSize -= (signed long long)(Mult*Pkg.CurrentVer()->InstalledSize);
- return;
- }
-}
- /*}}}*/
-// DepCache::AddSizes - Add the packages sizes to the counters /*{{{*/
-// ---------------------------------------------------------------------
/* Call with Inverse = true to preform the inverse opration */
-void pkgDepCache::AddSizes(const PkgIterator &Pkg, bool const &Inverse)
+void pkgDepCache::AddSizes(const PkgIterator &Pkg, bool const Inverse)
{
StateCache &P = PkgState[Pkg->ID];
@@ -531,8 +478,9 @@ void pkgDepCache::AddSizes(const PkgIterator &Pkg, bool const &Inverse)
calld Remove/Add itself. Remember, dependencies can be circular so
while processing a dep for Pkg it is possible that Add/Remove
will be called on Pkg */
-void pkgDepCache::AddStates(const PkgIterator &Pkg,int Add)
+void pkgDepCache::AddStates(const PkgIterator &Pkg, bool const Invert)
{
+ signed char const Add = (Invert == false) ? 1 : -1;
StateCache &State = PkgState[Pkg->ID];
// The Package is broken (either minimal dep or policy dep)
@@ -548,8 +496,8 @@ void pkgDepCache::AddStates(const PkgIterator &Pkg,int Add)
// Not installed
if (Pkg->CurrentVer == 0)
{
- if (State.Mode == ModeDelete &&
- (State.iFlags | Purge) == Purge && Pkg.Purge() == false)
+ if (State.Mode == ModeDelete &&
+ (State.iFlags & Purge) == Purge && Pkg.Purge() == false)
iDelCount += Add;
if (State.Mode == ModeInstall)
@@ -593,9 +541,7 @@ void pkgDepCache::BuildGroupOrs(VerIterator const &V)
/* Invert for Conflicts. We have to do this twice to get the
right sense for a conflicts group */
- if (D->Type == Dep::Conflicts ||
- D->Type == Dep::DpkgBreaks ||
- D->Type == Dep::Obsoletes)
+ if (D.IsNegative() == true)
State = ~State;
// Add to the group if we are within an or..
@@ -606,9 +552,7 @@ void pkgDepCache::BuildGroupOrs(VerIterator const &V)
Group = 0;
// Invert for Conflicts
- if (D->Type == Dep::Conflicts ||
- D->Type == Dep::DpkgBreaks ||
- D->Type == Dep::Obsoletes)
+ if (D.IsNegative() == true)
State = ~State;
}
}
@@ -704,107 +648,6 @@ void pkgDepCache::UpdateVerState(PkgIterator Pkg)
}
}
/*}}}*/
-// DepCache::RemovePseudoInstalledPkg - MultiArch helper for Update() /*{{{*/
-// ---------------------------------------------------------------------
-/* We "install" arch all packages for all archs if it is installed. Many
- of these will be broken. This method will look at these broken Pkg and
- "remove" it. */
-bool pkgDepCache::RemovePseudoInstalledPkg(PkgIterator &Pkg, std::set<unsigned long> &recheck) {
- if (unlikely(Pkg->CurrentVer == 0))
- return false;
-
- VerIterator V = Pkg.CurrentVer();
- if (V->MultiArch != Version::All)
- return false;
-
- // Never ever kill an "all" package - they have no dependency so they can't be broken
- if (strcmp(Pkg.Arch(),"all") == 0)
- return false;
-
- unsigned char const CurDepState = VersionState(V.DependsList(),DepInstall,DepInstMin,DepInstPolicy);
- if ((CurDepState & DepInstMin) == DepInstMin) {
- // okay, the package isn't broken, but is the package also required?
- // If it has no real dependencies, no installed rdepends and doesn't
- // provide something of value, we will kill it as not required.
- // These pseudopackages have otherwise interesting effects if they get
- // a new dependency in a newer version…
- for (pkgCache::DepIterator D = V.DependsList();
- D.end() != true; ++D)
- if (D.IsCritical() == true && D.ParentPkg()->Group != Pkg->Group)
- return false;
- for (DepIterator D = Pkg.RevDependsList(); D.end() != true; ++D)
- {
- if (D.IsCritical() == false)
- continue;
- PkgIterator const P = D.ParentPkg();
- if (P->Group == Pkg->Group)
- continue;
- if (P->CurrentVer != 0)
- return false;
- }
- for (PrvIterator Prv = V.ProvidesList(); Prv.end() != true; Prv++)
- for (DepIterator d = Prv.ParentPkg().RevDependsList();
- d.end() != true; ++d)
- {
- PkgIterator const P = d.ParentPkg();
- if (P->CurrentVer != 0 &&
- P->Group != Pkg->Group)
- return false;
- }
- }
-
- // Dependencies for this arch all package are not statisfied
- // so we installed it only for our convenience: get right of it now.
- RemoveSizes(Pkg);
- RemoveStates(Pkg);
-
- Pkg->CurrentVer = 0;
- PkgState[Pkg->ID].InstallVer = 0;
-
- AddStates(Pkg);
- Update(Pkg);
- AddSizes(Pkg);
-
- // After the remove previously satisfied pseudo pkg could be now
- // no longer satisfied, so we need to recheck the reverse dependencies
- for (DepIterator d = Pkg.RevDependsList(); d.end() != true; ++d)
- {
- PkgIterator const P = d.ParentPkg();
- if (P->CurrentVer != 0)
- recheck.insert(P.Index());
- }
-
- for (DepIterator d = V.DependsList(); d.end() != true; ++d)
- {
- PkgIterator const P = d.TargetPkg();
- for (PrvIterator Prv = P.ProvidesList(); Prv.end() != true; ++Prv)
- {
- PkgIterator const O = Prv.OwnerPkg();
- if (O->CurrentVer != 0)
- recheck.insert(O.Index());
- }
-
- if (P->CurrentVer != 0)
- recheck.insert(P.Index());
- }
-
- for (PrvIterator Prv = V.ProvidesList(); Prv.end() != true; Prv++)
- {
- for (DepIterator d = Prv.ParentPkg().RevDependsList();
- d.end() != true; ++d)
- {
- PkgIterator const P = d.ParentPkg();
- if (P->CurrentVer == 0)
- continue;
-
- recheck.insert(P.Index());
- }
- }
-
-
- return true;
-}
- /*}}}*/
// DepCache::Update - Figure out all the state information /*{{{*/
// ---------------------------------------------------------------------
/* This will figure out the state of all the packages and all the
@@ -819,12 +662,8 @@ void pkgDepCache::Update(OpProgress *Prog)
iBrokenCount = 0;
iBadCount = 0;
- std::set<unsigned long> recheck;
-
// Perform the depends pass
int Done = 0;
- bool const checkMultiArch = APT::Configuration::getArchitectures().size() > 1;
- unsigned long killed = 0;
for (PkgIterator I = PkgBegin(); I.end() != true; I++,Done++)
{
if (Prog != 0 && Done%20 == 0)
@@ -846,9 +685,7 @@ void pkgDepCache::Update(OpProgress *Prog)
Group = 0;
// Invert for Conflicts
- if (D->Type == Dep::Conflicts ||
- D->Type == Dep::DpkgBreaks ||
- D->Type == Dep::Obsoletes)
+ if (D.IsNegative() == true)
State = ~State;
}
}
@@ -857,69 +694,6 @@ void pkgDepCache::Update(OpProgress *Prog)
AddSizes(I);
UpdateVerState(I);
AddStates(I);
-
- if (checkMultiArch != true || I->CurrentVer == 0)
- continue;
-
- VerIterator const V = I.CurrentVer();
- if (V->MultiArch != Version::All)
- continue;
-
- recheck.insert(I.Index());
- --Done; // no progress if we need to recheck the package
- }
-
- if (checkMultiArch == true) {
- /* FIXME: recheck breaks proper progress reporting as we don't know
- how many packages we need to recheck. To lower the effect
- a bit we increase with a kill, but we should do something more clever… */
- while(recheck.empty() == false)
- for (std::set<unsigned long>::const_iterator p = recheck.begin();
- p != recheck.end();) {
- if (Prog != 0 && Done%20 == 0)
- Prog->Progress(Done);
- PkgIterator P = PkgIterator(*Cache, Cache->PkgP + *p);
- if (RemovePseudoInstalledPkg(P, recheck) == true) {
- ++killed;
- ++Done;
- }
- recheck.erase(p++);
- }
-
- /* Okay, we have killed a great amount of pseudopackages -
- we have killed so many that we have now arch "all" packages
- without an installed pseudo package, but we NEED an installed
- pseudo package, so we will search now for a pseudo package
- we can install without breaking everything. */
- for (GrpIterator G = Cache->GrpBegin(); G.end() != true; ++G)
- {
- PkgIterator P = G.FindPkg("all");
- if (P.end() == true)
- continue;
- if (P->CurrentVer == 0)
- continue;
- bool installed = false;
- for (P = G.FindPkg("any"); P.end() != true; P = G.NextPkg(P))
- {
- if (strcmp(P.Arch(), "all") == 0)
- continue;
- if (P->CurrentVer == 0)
- continue;
- installed = true;
- break;
- }
- if (installed == false)
- recheck.insert(G.Index());
- }
-
- while (recheck.empty() != true)
- {
- std::set<unsigned long>::const_iterator g = recheck.begin();
- unsigned long const G = *g;
- recheck.erase(g);
- if (unlikely(ReInstallPseudoForGroup(G, recheck) == false))
- _error->Warning(_("Internal error, group '%s' has no installable pseudo package"), GrpIterator(*Cache, Cache->GrpP + G).Name());
- }
}
if (Prog != 0)
@@ -928,80 +702,6 @@ void pkgDepCache::Update(OpProgress *Prog)
readStateFile(Prog);
}
/*}}}*/
-// DepCache::ReInstallPseudoForGroup - MultiArch helper for Update() /*{{{*/
-// ---------------------------------------------------------------------
-/* RemovePseudoInstalledPkg() is very successful. It even kills packages
- to an amount that no pseudo package is left, but we need a pseudo package
- for upgrading senarios so we need to reinstall one pseudopackage which
- doesn't break everything. Thankfully we can't have architecture depending
- negative dependencies so this problem is already eliminated */
-bool pkgDepCache::ReInstallPseudoForGroup(pkgCache::PkgIterator const &P, std::set<unsigned long> &recheck)
-{
- if (P->CurrentVer != 0)
- return true;
- // recursive call for packages which provide this package
- for (pkgCache::PrvIterator Prv = P.ProvidesList(); Prv.end() != true; ++Prv)
- ReInstallPseudoForGroup(Prv.OwnerPkg(), recheck);
- // check if we actually need to look at this group
- unsigned long const G = P->Group;
- std::set<unsigned long>::const_iterator Pi = recheck.find(G);
- if (Pi == recheck.end())
- return true;
- recheck.erase(Pi); // remove here, so we can't fall into an endless loop
- if (unlikely(ReInstallPseudoForGroup(G, recheck) == false))
- {
- recheck.insert(G);
- return false;
- }
- return true;
-}
-bool pkgDepCache::ReInstallPseudoForGroup(unsigned long const &G, std::set<unsigned long> &recheck)
-{
- std::vector<std::string> static const Archs = APT::Configuration::getArchitectures();
- pkgCache::GrpIterator Grp(*Cache, Cache->GrpP + G);
- if (unlikely(Grp.end() == true))
- return false;
- for (std::vector<std::string>::const_iterator a = Archs.begin();
- a != Archs.end(); ++a)
- {
- pkgCache::PkgIterator P = Grp.FindPkg(*a);
- if (P.end() == true)
- continue;
- pkgCache::VerIterator allV = Grp.FindPkg("all").CurrentVer();
- for (VerIterator V = P.VersionList(); V.end() != true; ++V)
- {
- // search for the same version as the all package
- if (allV->Hash != V->Hash || strcmp(allV.VerStr(),V.VerStr()) != 0)
- continue;
- unsigned char const CurDepState = VersionState(V.DependsList(),DepInstall,DepInstMin,DepInstPolicy);
- // If it is broken, try to install dependencies first before retry
- if ((CurDepState & DepInstMin) != DepInstMin)
- {
- for (pkgCache::DepIterator D = V.DependsList(); D.end() != true; ++D)
- {
- if (D->Type != pkgCache::Dep::PreDepends && D->Type != pkgCache::Dep::Depends)
- continue;
- ReInstallPseudoForGroup(D.TargetPkg(), recheck);
- }
- unsigned char const CurDepState = VersionState(V.DependsList(),DepInstall,DepInstMin,DepInstPolicy);
- // if package ist still broken… try another arch
- if ((CurDepState & DepInstMin) != DepInstMin)
- break;
- }
- // dependencies satisfied: reinstall the package
- RemoveSizes(P);
- RemoveStates(P);
- P->CurrentVer = V.Index();
- PkgState[P->ID].InstallVer = V;
- AddStates(P);
- Update(P);
- AddSizes(P);
- return true;
- }
- }
- return false;
-}
- /*}}}*/
// DepCache::Update - Update the deps list of a package /*{{{*/
// ---------------------------------------------------------------------
/* This is a helper for update that only does the dep portion of the scan.
@@ -1015,9 +715,7 @@ void pkgDepCache::Update(DepIterator D)
State = DependencyState(D);
// Invert for Conflicts
- if (D->Type == Dep::Conflicts ||
- D->Type == Dep::DpkgBreaks ||
- D->Type == Dep::Obsoletes)
+ if (D.IsNegative() == true)
State = ~State;
RemoveStates(D.ParentPkg());
@@ -1057,37 +755,33 @@ void pkgDepCache::Update(PkgIterator const &Pkg)
// DepCache::MarkKeep - Put the package in the keep state /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::MarkKeep(PkgIterator const &Pkg, bool Soft, bool FromUser,
+bool pkgDepCache::MarkKeep(PkgIterator const &Pkg, bool Soft, bool FromUser,
unsigned long Depth)
{
- // Simplifies other routines.
- if (Pkg.end() == true)
- return;
+ if (IsModeChangeOk(ModeKeep, Pkg, Depth, FromUser) == false)
+ return false;
/* Reject an attempt to keep a non-source broken installed package, those
must be upgraded */
if (Pkg.State() == PkgIterator::NeedsUnpack &&
Pkg.CurrentVer().Downloadable() == false)
- return;
-
- /** \todo Can this be moved later in the method? */
- ActionGroup group(*this);
+ return false;
/* We changed the soft state all the time so the UI is a bit nicer
to use */
StateCache &P = PkgState[Pkg->ID];
+
+ // Check that it is not already kept
+ if (P.Mode == ModeKeep)
+ return true;
+
if (Soft == true)
P.iFlags |= AutoKept;
else
P.iFlags &= ~AutoKept;
-
- // Check that it is not already kept
- if (P.Mode == ModeKeep)
- return;
- // We dont even try to keep virtual packages..
- if (Pkg->VersionList == 0)
- return;
+ ActionGroup group(*this);
+
#if 0 // reseting the autoflag here means we lose the
// auto-mark information if a user selects a package for removal
// but changes his mind then and sets it for keep again
@@ -1112,41 +806,37 @@ void pkgDepCache::MarkKeep(PkgIterator const &Pkg, bool Soft, bool FromUser,
P.InstallVer = Pkg.CurrentVer();
AddStates(Pkg);
-
Update(Pkg);
-
AddSizes(Pkg);
+
+ return true;
}
/*}}}*/
// DepCache::MarkDelete - Put the package in the delete state /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::MarkDelete(PkgIterator const &Pkg, bool rPurge,
+bool pkgDepCache::MarkDelete(PkgIterator const &Pkg, bool rPurge,
unsigned long Depth, bool FromUser)
{
- // Simplifies other routines.
- if (Pkg.end() == true)
- return;
+ if (IsModeChangeOk(ModeDelete, Pkg, Depth, FromUser) == false)
+ return false;
- ActionGroup group(*this);
+ StateCache &P = PkgState[Pkg->ID];
// Check that it is not already marked for delete
- StateCache &P = PkgState[Pkg->ID];
- P.iFlags &= ~(AutoKept | Purge);
- if (rPurge == true)
- P.iFlags |= Purge;
-
if ((P.Mode == ModeDelete || P.InstallVer == 0) &&
(Pkg.Purge() == true || rPurge == false))
- return;
-
- // We dont even try to delete virtual packages..
- if (Pkg->VersionList == 0)
- return;
+ return true;
- // check if we are allowed to install the package
+ // check if we are allowed to remove the package
if (IsDeleteOk(Pkg,rPurge,Depth,FromUser) == false)
- return;
+ return false;
+
+ P.iFlags &= ~(AutoKept | Purge);
+ if (rPurge == true)
+ P.iFlags |= Purge;
+
+ ActionGroup group(*this);
if (DebugMarker == true)
std::clog << OutputInDepth(Depth) << (rPurge ? "MarkPurge " : "MarkDelete ") << Pkg << " FU=" << FromUser << std::endl;
@@ -1164,35 +854,17 @@ void pkgDepCache::MarkDelete(PkgIterator const &Pkg, bool rPurge,
Update(Pkg);
AddSizes(Pkg);
- // if we remove the pseudo package, we also need to remove the "real"
- if (Pkg->CurrentVer != 0 && Pkg.CurrentVer().Pseudo() == true)
- MarkDelete(Pkg.Group().FindPkg("all"), rPurge, Depth+1, FromUser);
- else if (rPurge == true && Pkg->CurrentVer == 0 &&
- Pkg->CurrentState != pkgCache::State::NotInstalled &&
- strcmp(Pkg.Arch(), "all") != 0)
- {
- PkgIterator const allPkg = Pkg.Group().FindPkg("all");
- if (allPkg.end() == false && allPkg->CurrentVer == 0 &&
- allPkg->CurrentState != pkgCache::State::NotInstalled)
- MarkDelete(allPkg, rPurge, Depth+1, FromUser);
- }
+ return true;
}
/*}}}*/
// DepCache::IsDeleteOk - check if it is ok to remove this package /*{{{*/
// ---------------------------------------------------------------------
-/* The default implementation just honors dpkg hold
- But an application using this library can override this method
- to control the MarkDelete behaviour */
+/* The default implementation tries to prevent deletion of install requests.
+ dpkg holds are enforced by the private IsModeChangeOk */
bool pkgDepCache::IsDeleteOk(PkgIterator const &Pkg,bool rPurge,
unsigned long Depth, bool FromUser)
{
- if (FromUser == false && Pkg->SelectedState == pkgCache::State::Hold && _config->FindB("APT::Ignore-Hold",false) == false)
- {
- if (DebugMarker == true)
- std::clog << OutputInDepth(Depth) << "Hold prevents MarkDelete of " << Pkg << " FU=" << FromUser << std::endl;
- return false;
- }
- else if (FromUser == false && Pkg->CurrentVer == 0)
+ if (FromUser == false && Pkg->CurrentVer == 0)
{
StateCache &P = PkgState[Pkg->ID];
if (P.InstallVer != 0 && P.Status == 2 && (P.Flags & Flag::Auto) != Flag::Auto)
@@ -1205,45 +877,94 @@ bool pkgDepCache::IsDeleteOk(PkgIterator const &Pkg,bool rPurge,
return true;
}
/*}}}*/
+// DepCache::IsModeChangeOk - check if it is ok to change the mode /*{{{*/
+// ---------------------------------------------------------------------
+/* this is used by all Mark methods on the very first line to check sanity
+ and prevents mode changes for packages on hold for example.
+ If you want to check Mode specific stuff you can use the virtual public
+ Is<Mode>Ok methods instead */
+char const* PrintMode(char const mode)
+{
+ switch (mode)
+ {
+ case pkgDepCache::ModeInstall: return "Install";
+ case pkgDepCache::ModeKeep: return "Keep";
+ case pkgDepCache::ModeDelete: return "Delete";
+ default: return "UNKNOWN";
+ }
+}
+bool pkgDepCache::IsModeChangeOk(ModeList const mode, PkgIterator const &Pkg,
+ unsigned long const Depth, bool const FromUser)
+{
+ // we are not trying to hard…
+ if (unlikely(Depth > 100))
+ return false;
+
+ // general sanity
+ if (unlikely(Pkg.end() == true || Pkg->VersionList == 0))
+ return false;
+
+ // the user is always right
+ if (FromUser == true)
+ return true;
+
+ StateCache &P = PkgState[Pkg->ID];
+
+ // if previous state was set by user only user can reset it
+ if ((P.iFlags & Protected) == Protected)
+ {
+ if (unlikely(DebugMarker == true) && P.Mode != mode)
+ std::clog << OutputInDepth(Depth) << "Ignore Mark" << PrintMode(mode)
+ << " of " << Pkg << " as its mode (" << PrintMode(P.Mode)
+ << ") is protected" << std::endl;
+ return false;
+ }
+ // enforce dpkg holds
+ else if (mode != ModeKeep && Pkg->SelectedState == pkgCache::State::Hold &&
+ _config->FindB("APT::Ignore-Hold",false) == false)
+ {
+ if (unlikely(DebugMarker == true) && P.Mode != mode)
+ std::clog << OutputInDepth(Depth) << "Hold prevents Mark" << PrintMode(mode)
+ << " of " << Pkg << std::endl;
+ return false;
+ }
+
+ return true;
+}
+ /*}}}*/
// DepCache::MarkInstall - Put the package in the install state /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
+bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
unsigned long Depth, bool FromUser,
bool ForceImportantDeps)
{
- if (Depth > 100)
- return;
-
- // Simplifies other routines.
- if (Pkg.end() == true)
- return;
-
- ActionGroup group(*this);
+ if (IsModeChangeOk(ModeInstall, Pkg, Depth, FromUser) == false)
+ return false;
+
+ StateCache &P = PkgState[Pkg->ID];
+
+ // See if there is even any possible instalation candidate
+ if (P.CandidateVer == 0)
+ return false;
/* Check that it is not already marked for install and that it can be
installed */
- StateCache &P = PkgState[Pkg->ID];
- P.iFlags &= ~AutoKept;
if ((P.InstPolicyBroken() == false && P.InstBroken() == false) &&
(P.Mode == ModeInstall ||
P.CandidateVer == (Version *)Pkg.CurrentVer()))
{
if (P.CandidateVer == (Version *)Pkg.CurrentVer() && P.InstallVer == 0)
- MarkKeep(Pkg, false, FromUser, Depth+1);
- return;
+ return MarkKeep(Pkg, false, FromUser, Depth+1);
+ return true;
}
- // See if there is even any possible instalation candidate
- if (P.CandidateVer == 0)
- return;
- // We dont even try to install virtual packages..
- if (Pkg->VersionList == 0)
- return;
-
// check if we are allowed to install the package
if (IsInstallOk(Pkg,AutoInst,Depth,FromUser) == false)
- return;
+ return false;
+
+ ActionGroup group(*this);
+ P.iFlags &= ~AutoKept;
/* Target the candidate version and remove the autoflag. We reset the
autoflag below if this was called recursively. Otherwise the user
@@ -1256,9 +977,10 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
if(FromUser)
{
- // Set it to manual if it's a new install or cancelling the
- // removal of a garbage package.
- if(P.Status == 2 || (!Pkg.CurrentVer().end() && !P.Marked))
+ // Set it to manual if it's a new install or already installed,
+ // but only if its not marked by the autoremover (aptitude depend on this behavior)
+ // or if we do automatic installation (aptitude never does it)
+ if(P.Status == 2 || (Pkg->CurrentVer != 0 && (AutoInst == true || P.Marked == false)))
P.Flags &= ~Flag::Auto;
}
else
@@ -1274,12 +996,8 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
Update(Pkg);
AddSizes(Pkg);
- // always trigger the install of the all package for a pseudo package
- if (P.CandidateVerIter(*Cache).Pseudo() == true)
- MarkInstall(Pkg.Group().FindPkg("all"), AutoInst, Depth, FromUser, ForceImportantDeps);
-
- if (AutoInst == false)
- return;
+ if (AutoInst == false || _config->Find("APT::Solver", "internal") != "internal")
+ return true;
if (DebugMarker == true)
std::clog << OutputInDepth(Depth) << "MarkInstall " << Pkg << " FU=" << FromUser << std::endl;
@@ -1310,7 +1028,22 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
*/
if (IsImportantDep(Start) == false)
continue;
-
+
+ /* If we are in an or group locate the first or that can
+ succeed. We have already cached this.. */
+ for (; Ors > 1 && (DepState[Start->ID] & DepCVer) != DepCVer; --Ors)
+ ++Start;
+ if (Ors == 1 && (DepState[Start->ID] &DepCVer) != DepCVer && Start.IsNegative() == false)
+ {
+ if(DebugAutoInstall == true)
+ std::clog << OutputInDepth(Depth) << Start << " can't be satisfied!" << std::endl;
+ if (Start.IsCritical() == false)
+ continue;
+ // if the dependency was critical, we can't install it, so remove it again
+ MarkDelete(Pkg,false,Depth + 1, false);
+ return false;
+ }
+
/* Check if any ImportantDep() (but not Critical) were added
* since we installed the package. Also check for deps that
* were satisfied in the past: for instance, if a version
@@ -1318,59 +1051,49 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
* package should follow that Recommends rather than causing the
* dependency to be removed. (bug #470115)
*/
- bool isNewImportantDep = false;
- bool isPreviouslySatisfiedImportantDep = false;
- if(!ForceImportantDeps && !Start.IsCritical())
+ if (Pkg->CurrentVer != 0 && ForceImportantDeps == false && Start.IsCritical() == false)
{
- bool found=false;
- VerIterator instVer = Pkg.CurrentVer();
- if(!instVer.end())
+ bool isNewImportantDep = true;
+ bool isPreviouslySatisfiedImportantDep = false;
+ for (DepIterator D = Pkg.CurrentVer().DependsList(); D.end() != true; ++D)
+ {
+ //FIXME: Should we handle or-group better here?
+ // We do not check if the package we look for is part of the same or-group
+ // we might find while searching, but could that really be a problem?
+ if (D.IsCritical() == true || IsImportantDep(D) == false ||
+ Start.TargetPkg() != D.TargetPkg())
+ continue;
+
+ isNewImportantDep = false;
+
+ while ((D->CompareOp & Dep::Or) != 0)
+ ++D;
+
+ isPreviouslySatisfiedImportantDep = (((*this)[D] & DepGNow) != 0);
+ if (isPreviouslySatisfiedImportantDep == true)
+ break;
+ }
+
+ if(isNewImportantDep == true)
+ {
+ if (DebugAutoInstall == true)
+ std::clog << OutputInDepth(Depth) << "new important dependency: "
+ << Start.TargetPkg().FullName() << std::endl;
+ }
+ else if(isPreviouslySatisfiedImportantDep == true)
{
- for (DepIterator D = instVer.DependsList(); D.end() != true; D++)
- {
- //FIXME: deal better with or-groups(?)
- DepIterator LocalStart = D;
-
- if(IsImportantDep(D) && !D.IsCritical() &&
- Start.TargetPkg() == D.TargetPkg())
- {
- if(!isPreviouslySatisfiedImportantDep)
- {
- DepIterator D2 = D;
- while((D2->CompareOp & Dep::Or) != 0)
- ++D2;
-
- isPreviouslySatisfiedImportantDep =
- (((*this)[D2] & DepGNow) != 0);
- }
-
- found=true;
- }
- }
- // this is a new dep if it was not found to be already
- // a important dep of the installed pacakge
- isNewImportantDep = !found;
+ if (DebugAutoInstall == true)
+ std::clog << OutputInDepth(Depth) << "previously satisfied important dependency on "
+ << Start.TargetPkg().FullName() << std::endl;
+ }
+ else
+ {
+ if (DebugAutoInstall == true)
+ std::clog << OutputInDepth(Depth) << "ignore old unsatisfied important dependency on "
+ << Start.TargetPkg().FullName() << std::endl;
+ continue;
}
}
- if(isNewImportantDep)
- if(DebugAutoInstall == true)
- std::clog << OutputInDepth(Depth) << "new important dependency: "
- << Start.TargetPkg().Name() << std::endl;
- if(isPreviouslySatisfiedImportantDep)
- if(DebugAutoInstall == true)
- std::clog << OutputInDepth(Depth) << "previously satisfied important dependency on "
- << Start.TargetPkg().Name() << std::endl;
-
- // skip important deps if the package is already installed
- if (Pkg->CurrentVer != 0 && Start.IsCritical() == false
- && !isNewImportantDep && !isPreviouslySatisfiedImportantDep
- && !ForceImportantDeps)
- continue;
-
- /* If we are in an or group locate the first or that can
- succeed. We have already cached this.. */
- for (; Ors > 1 && (DepState[Start->ID] & DepCVer) != DepCVer; Ors--)
- Start++;
/* This bit is for processing the possibilty of an install/upgrade
fixing the problem */
@@ -1436,8 +1159,7 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
/* For conflicts we just de-install the package and mark as auto,
Conflicts may not have or groups. For dpkg's Breaks we try to
upgrade the package. */
- if (Start->Type == Dep::Conflicts || Start->Type == Dep::Obsoletes ||
- Start->Type == Dep::DpkgBreaks)
+ if (Start.IsNegative() == true)
{
for (Version **I = List; *I != 0; I++)
{
@@ -1450,30 +1172,26 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
continue;
if (PkgState[Pkg->ID].CandidateVer != *I &&
- Start->Type == Dep::DpkgBreaks)
- MarkInstall(Pkg,true,Depth + 1, false, ForceImportantDeps);
- else
- MarkDelete(Pkg,false,Depth + 1, false);
+ Start->Type == Dep::DpkgBreaks &&
+ MarkInstall(Pkg,true,Depth + 1, false, ForceImportantDeps) == true)
+ continue;
+ else if (MarkDelete(Pkg,false,Depth + 1, false) == false)
+ break;
}
continue;
}
}
+
+ return Dep.end() == true;
}
/*}}}*/
// DepCache::IsInstallOk - check if it is ok to install this package /*{{{*/
// ---------------------------------------------------------------------
-/* The default implementation just honors dpkg hold
- But an application using this library can override this method
- to control the MarkInstall behaviour */
+/* The default implementation does nothing.
+ dpkg holds are enforced by the private IsModeChangeOk */
bool pkgDepCache::IsInstallOk(PkgIterator const &Pkg,bool AutoInst,
unsigned long Depth, bool FromUser)
{
- if (FromUser == false && Pkg->SelectedState == pkgCache::State::Hold && _config->FindB("APT::Ignore-Hold",false) == false)
- {
- if (DebugMarker == true)
- std::clog << OutputInDepth(Depth) << "Hold prevents MarkInstall of " << Pkg << " FU=" << FromUser << std::endl;
- return false;
- }
return true;
}
/*}}}*/
@@ -1498,27 +1216,25 @@ void pkgDepCache::SetReInstall(PkgIterator const &Pkg,bool To)
AddStates(Pkg);
AddSizes(Pkg);
-
- if (unlikely(Pkg.CurrentVer().end() == true) || Pkg.CurrentVer().Pseudo() == false)
- return;
-
- SetReInstall(Pkg.Group().FindPkg("all"), To);
}
/*}}}*/
// DepCache::SetCandidateVersion - Change the candidate version /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::SetCandidateVersion(VerIterator TargetVer, bool const &Pseudo)
+void pkgDepCache::SetCandidateVersion(VerIterator TargetVer)
{
- ActionGroup group(*this);
-
pkgCache::PkgIterator Pkg = TargetVer.ParentPkg();
StateCache &P = PkgState[Pkg->ID];
+ if (P.CandidateVer == TargetVer)
+ return;
+
+ ActionGroup group(*this);
+
RemoveSizes(Pkg);
RemoveStates(Pkg);
- if (P.CandidateVer == P.InstallVer)
+ if (P.CandidateVer == P.InstallVer && P.Install() == true)
P.InstallVer = (Version *)TargetVer;
P.CandidateVer = (Version *)TargetVer;
P.Update(Pkg,*this);
@@ -1527,29 +1243,172 @@ void pkgDepCache::SetCandidateVersion(VerIterator TargetVer, bool const &Pseudo)
Update(Pkg);
AddSizes(Pkg);
- if (TargetVer.Pseudo() == false || Pseudo == false)
- return;
+}
+ /*}}}*/
+// DepCache::SetCandidateRelease - Change the candidate version /*{{{*/
+// ---------------------------------------------------------------------
+/* changes the candidate of a package and walks over all its dependencies
+ to check if it needs to change the candidate of the dependency, too,
+ to reach a installable versionstate */
+bool pkgDepCache::SetCandidateRelease(pkgCache::VerIterator TargetVer,
+ std::string const &TargetRel)
+{
+ std::list<std::pair<pkgCache::VerIterator, pkgCache::VerIterator> > Changed;
+ return SetCandidateRelease(TargetVer, TargetRel, Changed);
+}
+bool pkgDepCache::SetCandidateRelease(pkgCache::VerIterator TargetVer,
+ std::string const &TargetRel,
+ std::list<std::pair<pkgCache::VerIterator, pkgCache::VerIterator> > &Changed)
+{
+ ActionGroup group(*this);
+ SetCandidateVersion(TargetVer);
+
+ if (TargetRel == "installed" || TargetRel == "candidate") // both doesn't make sense in this context
+ return true;
- // the version was pseudo: set all other pseudos also
- pkgCache::GrpIterator Grp = Pkg.Group();
- for (Pkg = Grp.FindPkg("any"); Pkg.end() == false; ++Pkg)
+ pkgVersionMatch Match(TargetRel, pkgVersionMatch::Release);
+ // save the position of the last element we will not undo - if we have to
+ std::list<std::pair<pkgCache::VerIterator, pkgCache::VerIterator> >::iterator newChanged = --(Changed.end());
+
+ for (pkgCache::DepIterator D = TargetVer.DependsList(); D.end() == false; ++D)
{
- StateCache &P = PkgState[Pkg->ID];
- if (TargetVer.SimilarVer(P.CandidateVerIter(*this)) == true ||
- (P.CandidateVerIter(*this).Pseudo() == false &&
- strcmp(Pkg.Arch(), "all") != 0))
+ if (D->Type != pkgCache::Dep::PreDepends && D->Type != pkgCache::Dep::Depends &&
+ ((D->Type != pkgCache::Dep::Recommends && D->Type != pkgCache::Dep::Suggests) ||
+ IsImportantDep(D) == false))
continue;
- for (pkgCache::VerIterator Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
+ // walk over an or-group and check if we need to do anything
+ // for simpilicity no or-group is handled as a or-group including one dependency
+ pkgCache::DepIterator Start = D;
+ bool itsFine = false;
+ for (bool stillOr = true; stillOr == true; ++Start)
{
- if (TargetVer.SimilarVer(Ver) == false)
+ stillOr = (Start->CompareOp & Dep::Or) == Dep::Or;
+ pkgCache::PkgIterator const P = Start.TargetPkg();
+ // virtual packages can't be a solution
+ if (P.end() == true || (P->ProvidesList == 0 && P->VersionList == 0))
continue;
- SetCandidateVersion(Ver, false);
+ pkgCache::VerIterator const Cand = PkgState[P->ID].CandidateVerIter(*this);
+ // no versioned dependency - but is it installable?
+ if (Start.TargetVer() == 0 || Start.TargetVer()[0] == '\0')
+ {
+ // Check if one of the providers is installable
+ if (P->ProvidesList != 0)
+ {
+ pkgCache::PrvIterator Prv = P.ProvidesList();
+ for (; Prv.end() == false; ++Prv)
+ {
+ pkgCache::VerIterator const C = PkgState[Prv.OwnerPkg()->ID].CandidateVerIter(*this);
+ if (C.end() == true || C != Prv.OwnerVer() ||
+ (VersionState(C.DependsList(), DepInstall, DepCandMin, DepCandPolicy) & DepCandMin) != DepCandMin)
+ continue;
+ break;
+ }
+ if (Prv.end() == true)
+ continue;
+ }
+ // no providers, so check if we have an installable candidate version
+ else if (Cand.end() == true ||
+ (VersionState(Cand.DependsList(), DepInstall, DepCandMin, DepCandPolicy) & DepCandMin) != DepCandMin)
+ continue;
+ itsFine = true;
+ break;
+ }
+ if (Cand.end() == true)
+ continue;
+ // check if the current candidate is enough for the versioned dependency - and installable?
+ if (VS().CheckDep(P.CandVersion(), Start->CompareOp, Start.TargetVer()) == true &&
+ (VersionState(Cand.DependsList(), DepInstall, DepCandMin, DepCandPolicy) & DepCandMin) == DepCandMin)
+ {
+ itsFine = true;
+ break;
+ }
+ }
+
+ if (itsFine == true) {
+ // something in the or-group was fine, skip all other members
+ for (; (D->CompareOp & Dep::Or) == Dep::Or; ++D);
+ continue;
+ }
+
+ // walk again over the or-group and check each if a candidate switch would help
+ itsFine = false;
+ for (bool stillOr = true; stillOr == true; ++D)
+ {
+ stillOr = (D->CompareOp & Dep::Or) == Dep::Or;
+ // changing candidate will not help if the dependency is not versioned
+ if (D.TargetVer() == 0 || D.TargetVer()[0] == '\0')
+ {
+ if (stillOr == true)
+ continue;
+ break;
+ }
+
+ pkgCache::VerIterator V;
+ if (TargetRel == "newest")
+ V = D.TargetPkg().VersionList();
+ else
+ V = Match.Find(D.TargetPkg());
+
+ // check if the version from this release could satisfy the dependency
+ if (V.end() == true || VS().CheckDep(V.VerStr(), D->CompareOp, D.TargetVer()) == false)
+ {
+ if (stillOr == true)
+ continue;
+ break;
+ }
+
+ pkgCache::VerIterator oldCand = PkgState[D.TargetPkg()->ID].CandidateVerIter(*this);
+ if (V == oldCand)
+ {
+ // Do we already touched this Version? If so, their versioned dependencies are okay, no need to check again
+ for (std::list<std::pair<pkgCache::VerIterator, pkgCache::VerIterator> >::const_iterator c = Changed.begin();
+ c != Changed.end(); ++c)
+ {
+ if (c->first->ParentPkg != V->ParentPkg)
+ continue;
+ itsFine = true;
+ break;
+ }
+ }
+
+ if (itsFine == false)
+ {
+ // change the candidate
+ Changed.push_back(make_pair(oldCand, TargetVer));
+ if (SetCandidateRelease(V, TargetRel, Changed) == false)
+ {
+ if (stillOr == false)
+ break;
+ // undo the candidate changing
+ SetCandidateVersion(oldCand);
+ Changed.pop_back();
+ continue;
+ }
+ itsFine = true;
+ }
+
+ // something in the or-group was fine, skip all other members
+ for (; (D->CompareOp & Dep::Or) == Dep::Or; ++D);
break;
}
+
+ if (itsFine == false && (D->Type == pkgCache::Dep::PreDepends || D->Type == pkgCache::Dep::Depends))
+ {
+ // undo all changes which aren't lead to a solution
+ for (std::list<std::pair<pkgCache::VerIterator, pkgCache::VerIterator> >::const_iterator c = ++newChanged;
+ c != Changed.end(); ++c)
+ SetCandidateVersion(c->first);
+ Changed.erase(newChanged, Changed.end());
+ return false;
+ }
}
+ return true;
}
-
+ /*}}}*/
+// DepCache::MarkAuto - set the Auto flag for a package /*{{{*/
+// ---------------------------------------------------------------------
+/* */
void pkgDepCache::MarkAuto(const PkgIterator &Pkg, bool Auto)
{
StateCache &state = PkgState[Pkg->ID];
@@ -1668,6 +1527,12 @@ bool pkgDepCache::Policy::IsImportantDep(DepIterator const &Dep)
return false;
}
/*}}}*/
+// Policy::GetPriority - Get the priority of the package pin /*{{{*/
+signed short pkgDepCache::Policy::GetPriority(pkgCache::PkgIterator const &Pkg)
+{ return 0; };
+signed short pkgDepCache::Policy::GetPriority(pkgCache::PkgFileIterator const &File)
+{ return 0; };
+ /*}}}*/
pkgDepCache::InRootSetFunc *pkgDepCache::GetRootSetFunc() /*{{{*/
{
DefaultRootSetFunc *f = new DefaultRootSetFunc;
@@ -1693,6 +1558,9 @@ bool pkgDepCache::MarkFollowsSuggests()
// pkgDepCache::MarkRequired - the main mark algorithm /*{{{*/
bool pkgDepCache::MarkRequired(InRootSetFunc &userFunc)
{
+ if (_config->Find("APT::Solver", "internal") != "internal")
+ return true;
+
bool follow_recommends;
bool follow_suggests;
bool debug_autoremove = _config->FindB("Debug::pkgAutoRemove",false);
@@ -1752,10 +1620,11 @@ void pkgDepCache::MarkPackage(const pkgCache::PkgIterator &pkg,
return;
VerIterator const currver = pkg.CurrentVer();
- VerIterator const candver = state.CandidateVerIter(*this);
VerIterator const instver = state.InstVerIter(*this);
#if 0
+ VerIterator const candver = state.CandidateVerIter(*this);
+
// If a package was garbage-collected but is now being marked, we
// should re-select it
// For cases when a pkg is set to upgrade and this trigger the
@@ -1798,28 +1667,6 @@ void pkgDepCache::MarkPackage(const pkgCache::PkgIterator &pkg,
if(ver.end() == true)
return;
- // If the version belongs to a Multi-Arch all package
- // we will mark all others in this Group with this version also
- if (ver->MultiArch == pkgCache::Version::All &&
- strcmp(ver.Arch(true), "all") == 0)
- {
- GrpIterator G = pkg.Group();
- const char* const VerStr = ver.VerStr();
- for (PkgIterator P = G.FindPkg("any");
- P.end() != true; P = G.NextPkg(P))
- {
- for (VerIterator V = P.VersionList();
- V.end() != true; ++V)
- {
- if (ver->Hash != V->Hash ||
- strcmp(VerStr, V.VerStr()) != 0)
- continue;
- MarkPackage(P, V, follow_recommends, follow_suggests);
- break;
- }
- }
- }
-
for(DepIterator d = ver.DependsList(); !d.end(); ++d)
{
if(d->Type == Dep::Depends ||
diff --git a/apt-pkg/depcache.h b/apt-pkg/depcache.h
index 2d3dbdf77..2942558b0 100644
--- a/apt-pkg/depcache.h
+++ b/apt-pkg/depcache.h
@@ -119,7 +119,7 @@ class pkgDepCache : protected pkgCache::Namespace
DepCandPolicy = (1 << 4), DepCandMin = (1 << 5)};
// These flags are used in StateCache::iFlags
- enum InternalFlags {AutoKept = (1 << 0), Purge = (1 << 1), ReInstall = (1 << 2)};
+ enum InternalFlags {AutoKept = (1 << 0), Purge = (1 << 1), ReInstall = (1 << 2), Protected = (1 << 3)};
enum VersionTypes {NowVersion, InstallVersion, CandidateVersion};
enum ModeList {ModeDelete = 0, ModeKeep = 1, ModeInstall = 2};
@@ -232,6 +232,7 @@ class pkgDepCache : protected pkgCache::Namespace
inline bool NewInstall() const {return Status == 2 && Mode == ModeInstall;};
inline bool Delete() const {return Mode == ModeDelete;};
inline bool Keep() const {return Mode == ModeKeep;};
+ inline bool Protect() const {return (iFlags & Protected) == Protected;};
inline bool Upgrade() const {return Status > 0 && Mode == ModeInstall;};
inline bool Upgradable() const {return Status >= 1;};
inline bool Downgrade() const {return Status < 0 && Mode == ModeInstall;};
@@ -258,7 +259,9 @@ class pkgDepCache : protected pkgCache::Namespace
virtual VerIterator GetCandidateVer(PkgIterator const &Pkg);
virtual bool IsImportantDep(DepIterator const &Dep);
-
+ virtual signed short GetPriority(PkgIterator const &Pkg);
+ virtual signed short GetPriority(PkgFileIterator const &File);
+
virtual ~Policy() {};
};
@@ -313,11 +316,10 @@ class pkgDepCache : protected pkgCache::Namespace
void Update(PkgIterator const &P);
// Count manipulators
- void AddSizes(const PkgIterator &Pkg, bool const &Invert = false);
+ void AddSizes(const PkgIterator &Pkg, bool const Invert = false);
inline void RemoveSizes(const PkgIterator &Pkg) {AddSizes(Pkg, true);};
- void AddSizes(const PkgIterator &Pkg,signed long Mult) __deprecated;
- void AddStates(const PkgIterator &Pkg,int Add = 1);
- inline void RemoveStates(const PkgIterator &Pkg) {AddStates(Pkg,-1);};
+ void AddStates(const PkgIterator &Pkg, bool const Invert = false);
+ inline void RemoveStates(const PkgIterator &Pkg) {AddStates(Pkg,true);};
public:
@@ -386,16 +388,36 @@ class pkgDepCache : protected pkgCache::Namespace
/** \name State Manipulators
*/
// @{
- void MarkKeep(PkgIterator const &Pkg, bool Soft = false,
+ bool MarkKeep(PkgIterator const &Pkg, bool Soft = false,
bool FromUser = true, unsigned long Depth = 0);
- void MarkDelete(PkgIterator const &Pkg, bool Purge = false,
+ bool MarkDelete(PkgIterator const &Pkg, bool Purge = false,
unsigned long Depth = 0, bool FromUser = true);
- void MarkInstall(PkgIterator const &Pkg,bool AutoInst = true,
+ bool MarkInstall(PkgIterator const &Pkg,bool AutoInst = true,
unsigned long Depth = 0, bool FromUser = true,
bool ForceImportantDeps = false);
+ void MarkProtected(PkgIterator const &Pkg) { PkgState[Pkg->ID].iFlags |= Protected; };
void SetReInstall(PkgIterator const &Pkg,bool To);
- void SetCandidateVersion(VerIterator TargetVer, bool const &Pseudo = true);
+ void SetCandidateVersion(VerIterator TargetVer);
+ bool SetCandidateRelease(pkgCache::VerIterator TargetVer,
+ std::string const &TargetRel);
+ /** Set the candidate version for dependencies too if needed.
+ *
+ * Sets not only the candidate version as SetCandidateVersion does,
+ * but walks also down the dependency tree and checks if it is required
+ * to set the candidate of the dependency to a version from the given
+ * release, too.
+ *
+ * \param TargetVer new candidate version of the package
+ * \param TargetRel try to switch to this release if needed
+ * \param[out] Changed a list of pairs consisting of the \b old
+ * version of the changed package and the version which
+ * required the switch of this dependency
+ * \return \b true if the switch was successful, \b false otherwise
+ */
+ bool SetCandidateRelease(pkgCache::VerIterator TargetVer,
+ std::string const &TargetRel,
+ std::list<std::pair<pkgCache::VerIterator, pkgCache::VerIterator> > &Changed);
/** Set the "is automatically installed" flag of Pkg. */
void MarkAuto(const PkgIterator &Pkg, bool Auto);
@@ -461,10 +483,8 @@ class pkgDepCache : protected pkgCache::Namespace
virtual ~pkgDepCache();
private:
- // Helper for Update(OpProgress) to remove pseudoinstalled arch all packages
- bool RemovePseudoInstalledPkg(PkgIterator &Pkg, std::set<unsigned long> &recheck);
- bool ReInstallPseudoForGroup(unsigned long const &Grp, std::set<unsigned long> &recheck);
- bool ReInstallPseudoForGroup(pkgCache::PkgIterator const &P, std::set<unsigned long> &recheck);
+ bool IsModeChangeOk(ModeList const mode, PkgIterator const &Pkg,
+ unsigned long const Depth, bool const FromUser);
};
#endif
diff --git a/apt-pkg/edsp.cc b/apt-pkg/edsp.cc
new file mode 100644
index 000000000..4d2230613
--- /dev/null
+++ b/apt-pkg/edsp.cc
@@ -0,0 +1,564 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+ Set of methods to help writing and reading everything needed for EDSP
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <apt-pkg/edsp.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/version.h>
+#include <apt-pkg/policy.h>
+#include <apt-pkg/tagfile.h>
+
+#include <apti18n.h>
+#include <limits>
+
+#include <stdio.h>
+ /*}}}*/
+
+// we could use pkgCache::DepType and ::Priority, but these would be localized strings…
+const char * const EDSP::PrioMap[] = {0, "important", "required", "standard",
+ "optional", "extra"};
+const char * const EDSP::DepMap[] = {"", "Depends", "Pre-Depends", "Suggests",
+ "Recommends" , "Conflicts", "Replaces",
+ "Obsoletes", "Breaks", "Enhances"};
+
+// EDSP::WriteScenario - to the given file descriptor /*{{{*/
+bool EDSP::WriteScenario(pkgDepCache &Cache, FILE* output, OpProgress *Progress)
+{
+ if (Progress != NULL)
+ Progress->SubProgress(Cache.Head().VersionCount, _("Send scenario to solver"));
+ unsigned long p = 0;
+ for (pkgCache::PkgIterator Pkg = Cache.PkgBegin(); Pkg.end() == false; ++Pkg)
+ for (pkgCache::VerIterator Ver = Pkg.VersionList(); Ver.end() == false; ++Ver, ++p)
+ {
+ WriteScenarioVersion(Cache, output, Pkg, Ver);
+ WriteScenarioDependency(Cache, output, Pkg, Ver);
+ fprintf(output, "\n");
+ if (Progress != NULL && p % 100 == 0)
+ Progress->Progress(p);
+ }
+ return true;
+}
+ /*}}}*/
+// EDSP::WriteLimitedScenario - to the given file descriptor /*{{{*/
+bool EDSP::WriteLimitedScenario(pkgDepCache &Cache, FILE* output,
+ APT::PackageSet const &pkgset,
+ OpProgress *Progress)
+{
+ if (Progress != NULL)
+ Progress->SubProgress(Cache.Head().VersionCount, _("Send scenario to solver"));
+ unsigned long p = 0;
+ for (APT::PackageSet::const_iterator Pkg = pkgset.begin(); Pkg != pkgset.end(); ++Pkg, ++p)
+ for (pkgCache::VerIterator Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
+ {
+ WriteScenarioVersion(Cache, output, Pkg, Ver);
+ WriteScenarioLimitedDependency(Cache, output, Pkg, Ver, pkgset);
+ fprintf(output, "\n");
+ if (Progress != NULL && p % 100 == 0)
+ Progress->Progress(p);
+ }
+ if (Progress != NULL)
+ Progress->Done();
+ return true;
+}
+ /*}}}*/
+// EDSP::WriteScenarioVersion /*{{{*/
+void EDSP::WriteScenarioVersion(pkgDepCache &Cache, FILE* output, pkgCache::PkgIterator const &Pkg,
+ pkgCache::VerIterator const &Ver)
+{
+ fprintf(output, "Package: %s\n", Pkg.Name());
+ fprintf(output, "Architecture: %s\n", Ver.Arch());
+ fprintf(output, "Version: %s\n", Ver.VerStr());
+ if (Pkg.CurrentVer() == Ver)
+ fprintf(output, "Installed: yes\n");
+ if (Pkg->SelectedState == pkgCache::State::Hold ||
+ (Cache[Pkg].Keep() == true && Cache[Pkg].Protect() == true))
+ fprintf(output, "Hold: yes\n");
+ fprintf(output, "APT-ID: %d\n", Ver->ID);
+ fprintf(output, "Priority: %s\n", PrioMap[Ver->Priority]);
+ if ((Pkg->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
+ fprintf(output, "Essential: yes\n");
+ fprintf(output, "Section: %s\n", Ver.Section());
+ if ((Ver->MultiArch & pkgCache::Version::Allowed) == pkgCache::Version::Allowed)
+ fprintf(output, "Multi-Arch: allowed\n");
+ else if ((Ver->MultiArch & pkgCache::Version::Foreign) == pkgCache::Version::Foreign)
+ fprintf(output, "Multi-Arch: foreign\n");
+ else if ((Ver->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same)
+ fprintf(output, "Multi-Arch: same\n");
+ signed short Pin = std::numeric_limits<signed short>::min();
+ for (pkgCache::VerFileIterator File = Ver.FileList(); File.end() == false; ++File) {
+ signed short const p = Cache.GetPolicy().GetPriority(File.File());
+ if (Pin < p)
+ Pin = p;
+ }
+ fprintf(output, "APT-Pin: %d\n", Pin);
+ if (Cache.GetCandidateVer(Pkg) == Ver)
+ fprintf(output, "APT-Candidate: yes\n");
+ if ((Cache[Pkg].Flags & pkgCache::Flag::Auto) == pkgCache::Flag::Auto)
+ fprintf(output, "APT-Automatic: yes\n");
+}
+ /*}}}*/
+// EDSP::WriteScenarioDependency /*{{{*/
+void EDSP::WriteScenarioDependency(pkgDepCache &Cache, FILE* output, pkgCache::PkgIterator const &Pkg,
+ pkgCache::VerIterator const &Ver)
+{
+ std::string dependencies[pkgCache::Dep::Enhances + 1];
+ bool orGroup = false;
+ for (pkgCache::DepIterator Dep = Ver.DependsList(); Dep.end() == false; ++Dep)
+ {
+ // Ignore implicit dependencies for multiarch here
+ if (strcmp(Pkg.Arch(), Dep.TargetPkg().Arch()) != 0)
+ continue;
+ if (orGroup == false)
+ dependencies[Dep->Type].append(", ");
+ dependencies[Dep->Type].append(Dep.TargetPkg().Name());
+ if (Dep->Version != 0)
+ dependencies[Dep->Type].append(" (").append(pkgCache::CompTypeDeb(Dep->CompareOp)).append(" ").append(Dep.TargetVer()).append(")");
+ if ((Dep->CompareOp & pkgCache::Dep::Or) == pkgCache::Dep::Or)
+ {
+ dependencies[Dep->Type].append(" | ");
+ orGroup = true;
+ }
+ else
+ orGroup = false;
+ }
+ for (int i = 1; i < pkgCache::Dep::Enhances + 1; ++i)
+ if (dependencies[i].empty() == false)
+ fprintf(output, "%s: %s\n", DepMap[i], dependencies[i].c_str()+2);
+ string provides;
+ for (pkgCache::PrvIterator Prv = Ver.ProvidesList(); Prv.end() == false; ++Prv)
+ {
+ // Ignore implicit provides for multiarch here
+ if (strcmp(Pkg.Arch(), Prv.ParentPkg().Arch()) != 0 || strcmp(Pkg.Name(),Prv.Name()) == 0)
+ continue;
+ provides.append(", ").append(Prv.Name());
+ }
+ if (provides.empty() == false)
+ fprintf(output, "Provides: %s\n", provides.c_str()+2);
+}
+ /*}}}*/
+// EDSP::WriteScenarioLimitedDependency /*{{{*/
+void EDSP::WriteScenarioLimitedDependency(pkgDepCache &Cache, FILE* output,
+ pkgCache::PkgIterator const &Pkg,
+ pkgCache::VerIterator const &Ver,
+ APT::PackageSet const &pkgset)
+{
+ std::string dependencies[pkgCache::Dep::Enhances + 1];
+ bool orGroup = false;
+ for (pkgCache::DepIterator Dep = Ver.DependsList(); Dep.end() == false; ++Dep)
+ {
+ // Ignore implicit dependencies for multiarch here
+ if (strcmp(Pkg.Arch(), Dep.TargetPkg().Arch()) != 0)
+ continue;
+ if (orGroup == false)
+ {
+ if (pkgset.find(Dep.TargetPkg()) == pkgset.end())
+ continue;
+ dependencies[Dep->Type].append(", ");
+ }
+ else if (pkgset.find(Dep.TargetPkg()) == pkgset.end())
+ {
+ if ((Dep->CompareOp & pkgCache::Dep::Or) == pkgCache::Dep::Or)
+ continue;
+ dependencies[Dep->Type].erase(dependencies[Dep->Type].end()-3, dependencies[Dep->Type].end());
+ orGroup = false;
+ continue;
+ }
+ dependencies[Dep->Type].append(Dep.TargetPkg().Name());
+ if (Dep->Version != 0)
+ dependencies[Dep->Type].append(" (").append(pkgCache::CompTypeDeb(Dep->CompareOp)).append(" ").append(Dep.TargetVer()).append(")");
+ if ((Dep->CompareOp & pkgCache::Dep::Or) == pkgCache::Dep::Or)
+ {
+ dependencies[Dep->Type].append(" | ");
+ orGroup = true;
+ }
+ else
+ orGroup = false;
+ }
+ for (int i = 1; i < pkgCache::Dep::Enhances + 1; ++i)
+ if (dependencies[i].empty() == false)
+ fprintf(output, "%s: %s\n", DepMap[i], dependencies[i].c_str()+2);
+ string provides;
+ for (pkgCache::PrvIterator Prv = Ver.ProvidesList(); Prv.end() == false; ++Prv)
+ {
+ // Ignore implicit provides for multiarch here
+ if (strcmp(Pkg.Arch(), Prv.ParentPkg().Arch()) != 0 || strcmp(Pkg.Name(),Prv.Name()) == 0)
+ continue;
+ if (pkgset.find(Prv.ParentPkg()) == pkgset.end())
+ continue;
+ provides.append(", ").append(Prv.Name());
+ }
+ if (provides.empty() == false)
+ fprintf(output, "Provides: %s\n", provides.c_str()+2);
+}
+ /*}}}*/
+// EDSP::WriteRequest - to the given file descriptor /*{{{*/
+bool EDSP::WriteRequest(pkgDepCache &Cache, FILE* output, bool const Upgrade,
+ bool const DistUpgrade, bool const AutoRemove,
+ OpProgress *Progress)
+{
+ if (Progress != NULL)
+ Progress->SubProgress(Cache.Head().PackageCount, _("Send request to solver"));
+ unsigned long p = 0;
+ string del, inst;
+ for (pkgCache::PkgIterator Pkg = Cache.PkgBegin(); Pkg.end() == false; ++Pkg, ++p)
+ {
+ if (Progress != NULL && p % 100 == 0)
+ Progress->Progress(p);
+ string* req;
+ if (Cache[Pkg].Delete() == true)
+ req = &del;
+ else if (Cache[Pkg].NewInstall() == true || Cache[Pkg].Upgrade() == true)
+ req = &inst;
+ else
+ continue;
+ req->append(" ").append(Pkg.FullName());
+ }
+ fprintf(output, "Request: EDSP 0.4\n");
+ if (del.empty() == false)
+ fprintf(output, "Remove: %s\n", del.c_str()+1);
+ if (inst.empty() == false)
+ fprintf(output, "Install: %s\n", inst.c_str()+1);
+ if (Upgrade == true)
+ fprintf(output, "Upgrade: yes\n");
+ if (DistUpgrade == true)
+ fprintf(output, "Dist-Upgrade: yes\n");
+ if (AutoRemove == true)
+ fprintf(output, "Autoremove: yes\n");
+ if (_config->FindB("APT::Solver::Strict-Pinning", true) == false)
+ fprintf(output, "Strict-Pinning: no\n");
+ string solverpref("APT::Solver::");
+ solverpref.append(_config->Find("APT::Solver", "internal")).append("::Preferences");
+ if (_config->Exists(solverpref) == true)
+ fprintf(output, "Preferences: %s\n", _config->Find(solverpref,"").c_str());
+ fprintf(output, "\n");
+
+ return true;
+}
+ /*}}}*/
+// EDSP::ReadResponse - from the given file descriptor /*{{{*/
+bool EDSP::ReadResponse(int const input, pkgDepCache &Cache, OpProgress *Progress) {
+ /* We build an map id to mmap offset here
+ In theory we could use the offset as ID, but then VersionCount
+ couldn't be used to create other versionmappings anymore and it
+ would be too easy for a (buggy) solver to segfault APT… */
+ unsigned long long const VersionCount = Cache.Head().VersionCount;
+ unsigned long VerIdx[VersionCount];
+ for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; ++P) {
+ for (pkgCache::VerIterator V = P.VersionList(); V.end() == false; ++V)
+ VerIdx[V->ID] = V.Index();
+ Cache[P].Marked = true;
+ Cache[P].Garbage = false;
+ }
+
+ FileFd in;
+ in.OpenDescriptor(input, FileFd::ReadOnly);
+ pkgTagFile response(&in, 100);
+ pkgTagSection section;
+
+ while (response.Step(section) == true) {
+ std::string type;
+ if (section.Exists("Install") == true)
+ type = "Install";
+ else if (section.Exists("Remove") == true)
+ type = "Remove";
+ else if (section.Exists("Progress") == true) {
+ if (Progress != NULL) {
+ string msg = section.FindS("Message");
+ if (msg.empty() == true)
+ msg = _("Prepare for receiving solution");
+ Progress->SubProgress(100, msg, section.FindI("Percentage", 0));
+ }
+ continue;
+ } else if (section.Exists("Error") == true) {
+ std::string msg = SubstVar(SubstVar(section.FindS("Message"), "\n .\n", "\n\n"), "\n ", "\n");
+ if (msg.empty() == true) {
+ msg = _("External solver failed without a proper error message");
+ _error->Error(msg.c_str());
+ } else
+ _error->Error("External solver failed with: %s", msg.substr(0,msg.find('\n')).c_str());
+ if (Progress != NULL)
+ Progress->Done();
+ std::cerr << "The solver encountered an error of type: " << section.FindS("Error") << std::endl;
+ std::cerr << "The following information might help you to understand what is wrong:" << std::endl;
+ std::cerr << msg << std::endl << std::endl;
+ return false;
+ } else if (section.Exists("Autoremove") == true)
+ type = "Autoremove";
+ else
+ continue;
+
+ size_t const id = section.FindULL(type.c_str(), VersionCount);
+ if (id == VersionCount) {
+ _error->Warning("Unable to parse %s request with id value '%s'!", type.c_str(), section.FindS(type.c_str()).c_str());
+ continue;
+ } else if (id > Cache.Head().VersionCount) {
+ _error->Warning("ID value '%s' in %s request stanza is to high to refer to a known version!", section.FindS(type.c_str()).c_str(), type.c_str());
+ continue;
+ }
+
+ pkgCache::VerIterator Ver(Cache.GetCache(), Cache.GetCache().VerP + VerIdx[id]);
+ Cache.SetCandidateVersion(Ver);
+ if (type == "Install")
+ Cache.MarkInstall(Ver.ParentPkg(), false, 0, false);
+ else if (type == "Remove")
+ Cache.MarkDelete(Ver.ParentPkg(), false);
+ else if (type == "Autoremove") {
+ Cache[Ver.ParentPkg()].Marked = false;
+ Cache[Ver.ParentPkg()].Garbage = true;
+ }
+ }
+ return true;
+}
+ /*}}}*/
+// EDSP::ReadLine - first line from the given file descriptor /*{{{*/
+// ---------------------------------------------------------------------
+/* Little helper method to read a complete line into a string. Similar to
+ fgets but we need to use the low-level read() here as otherwise the
+ listparser will be confused later on as mixing of fgets and read isn't
+ a supported action according to the manpages and results are undefined */
+bool EDSP::ReadLine(int const input, std::string &line) {
+ char one;
+ ssize_t data = 0;
+ line.erase();
+ line.reserve(100);
+ while ((data = read(input, &one, sizeof(one))) != -1) {
+ if (data != 1)
+ continue;
+ if (one == '\n')
+ return true;
+ if (one == '\r')
+ continue;
+ if (line.empty() == true && isblank(one) != 0)
+ continue;
+ line += one;
+ }
+ return false;
+}
+ /*}}}*/
+// EDSP::StringToBool - convert yes/no to bool /*{{{*/
+// ---------------------------------------------------------------------
+/* we are not as lazy as we are in the global StringToBool as we really
+ only accept yes/no here - but we will ignore leading spaces */
+bool EDSP::StringToBool(char const *answer, bool const defValue) {
+ for (; isspace(*answer) != 0; ++answer);
+ if (strncasecmp(answer, "yes", 3) == 0)
+ return true;
+ else if (strncasecmp(answer, "no", 2) == 0)
+ return false;
+ else
+ _error->Warning("Value '%s' is not a boolean 'yes' or 'no'!", answer);
+ return defValue;
+}
+ /*}}}*/
+// EDSP::ReadRequest - first stanza from the given file descriptor /*{{{*/
+bool EDSP::ReadRequest(int const input, std::list<std::string> &install,
+ std::list<std::string> &remove, bool &upgrade,
+ bool &distUpgrade, bool &autoRemove)
+{
+ install.clear();
+ remove.clear();
+ upgrade = false;
+ distUpgrade = false;
+ autoRemove = false;
+ std::string line;
+ while (ReadLine(input, line) == true)
+ {
+ // Skip empty lines before request
+ if (line.empty() == true)
+ continue;
+ // The first Tag must be a request, so search for it
+ if (line.compare(0, 8, "Request:") != 0)
+ continue;
+
+ while (ReadLine(input, line) == true)
+ {
+ // empty lines are the end of the request
+ if (line.empty() == true)
+ return true;
+
+ std::list<std::string> *request = NULL;
+ if (line.compare(0, 8, "Install:") == 0)
+ {
+ line.erase(0, 8);
+ request = &install;
+ }
+ else if (line.compare(0, 7, "Remove:") == 0)
+ {
+ line.erase(0, 7);
+ request = &remove;
+ }
+ else if (line.compare(0, 8, "Upgrade:") == 0)
+ upgrade = EDSP::StringToBool(line.c_str() + 9, false);
+ else if (line.compare(0, 13, "Dist-Upgrade:") == 0)
+ distUpgrade = EDSP::StringToBool(line.c_str() + 14, false);
+ else if (line.compare(0, 11, "Autoremove:") == 0)
+ autoRemove = EDSP::StringToBool(line.c_str() + 12, false);
+ else
+ _error->Warning("Unknown line in EDSP Request stanza: %s", line.c_str());
+
+ if (request == NULL)
+ continue;
+ size_t end = line.length();
+ do {
+ size_t begin = line.rfind(' ');
+ if (begin == std::string::npos)
+ {
+ request->push_back(line.substr(0, end));
+ break;
+ }
+ else if (begin < end)
+ request->push_back(line.substr(begin + 1, end));
+ line.erase(begin);
+ end = line.find_last_not_of(' ');
+ } while (end != std::string::npos);
+ }
+ }
+ return false;
+}
+ /*}}}*/
+// EDSP::ApplyRequest - first stanza from the given file descriptor /*{{{*/
+bool EDSP::ApplyRequest(std::list<std::string> const &install,
+ std::list<std::string> const &remove,
+ pkgDepCache &Cache)
+{
+ for (std::list<std::string>::const_iterator i = install.begin();
+ i != install.end(); ++i) {
+ pkgCache::PkgIterator P = Cache.FindPkg(*i);
+ if (P.end() == true)
+ _error->Warning("Package %s is not known, so can't be installed", i->c_str());
+ else
+ Cache.MarkInstall(P, false);
+ }
+
+ for (std::list<std::string>::const_iterator i = remove.begin();
+ i != remove.end(); ++i) {
+ pkgCache::PkgIterator P = Cache.FindPkg(*i);
+ if (P.end() == true)
+ _error->Warning("Package %s is not known, so can't be installed", i->c_str());
+ else
+ Cache.MarkDelete(P);
+ }
+ return true;
+}
+ /*}}}*/
+// EDSP::WriteSolution - to the given file descriptor /*{{{*/
+bool EDSP::WriteSolution(pkgDepCache &Cache, FILE* output)
+{
+ bool const Debug = _config->FindB("Debug::EDSP::WriteSolution", false);
+ for (pkgCache::PkgIterator Pkg = Cache.PkgBegin(); Pkg.end() == false; ++Pkg)
+ {
+ if (Cache[Pkg].Delete() == true)
+ {
+ fprintf(output, "Remove: %d\n", Pkg.CurrentVer()->ID);
+ if (Debug == true)
+ fprintf(output, "Package: %s\nVersion: %s\n", Pkg.FullName().c_str(), Pkg.CurrentVer().VerStr());
+ }
+ else if (Cache[Pkg].NewInstall() == true || Cache[Pkg].Upgrade() == true)
+ {
+ fprintf(output, "Install: %d\n", Cache.GetCandidateVer(Pkg)->ID);
+ if (Debug == true)
+ fprintf(output, "Package: %s\nVersion: %s\n", Pkg.FullName().c_str(), Cache.GetCandidateVer(Pkg).VerStr());
+ }
+ else if (Cache[Pkg].Garbage == true)
+ {
+ fprintf(output, "Autoremove: %d\n", Pkg.CurrentVer()->ID);
+ if (Debug == true)
+ fprintf(output, "Package: %s\nVersion: %s\n", Pkg.FullName().c_str(), Pkg.CurrentVer().VerStr());
+ fprintf(stderr, "Autoremove: %s\nVersion: %s\n", Pkg.FullName().c_str(), Pkg.CurrentVer().VerStr());
+ }
+ else
+ continue;
+ fprintf(output, "\n");
+ }
+
+ return true;
+}
+ /*}}}*/
+// EDSP::WriteProgess - pulse to the given file descriptor /*{{{*/
+bool EDSP::WriteProgress(unsigned short const percent, const char* const message, FILE* output) {
+ fprintf(output, "Progress: %s\n", TimeRFC1123(time(NULL)).c_str());
+ fprintf(output, "Percentage: %d\n", percent);
+ fprintf(output, "Message: %s\n\n", message);
+ fflush(output);
+ return true;
+}
+ /*}}}*/
+// EDSP::WriteError - format an error message to be send to file descriptor /*{{{*/
+bool EDSP::WriteError(char const * const uuid, std::string const &message, FILE* output) {
+ fprintf(output, "Error: %s\n", uuid);
+ fprintf(output, "Message: %s\n\n", SubstVar(SubstVar(message, "\n\n", "\n.\n"), "\n", "\n ").c_str());
+ return true;
+}
+ /*}}}*/
+// EDSP::ExecuteSolver - fork requested solver and setup ipc pipes {{{*/
+bool EDSP::ExecuteSolver(const char* const solver, int *solver_in, int *solver_out) {
+ std::vector<std::string> const solverDirs = _config->FindVector("Dir::Bin::Solvers");
+ std::string file;
+ for (std::vector<std::string>::const_iterator dir = solverDirs.begin();
+ dir != solverDirs.end(); ++dir) {
+ file = flCombine(*dir, solver);
+ if (RealFileExists(file.c_str()) == true)
+ break;
+ file.clear();
+ }
+
+ if (file.empty() == true)
+ return _error->Error("Can't call external solver '%s' as it is not in a configured directory!", solver);
+ int external[4] = {-1, -1, -1, -1};
+ if (pipe(external) != 0 || pipe(external + 2) != 0)
+ return _error->Errno("Resolve", "Can't create needed IPC pipes for EDSP");
+ for (int i = 0; i < 4; ++i)
+ SetCloseExec(external[i], true);
+
+ pid_t Solver = ExecFork();
+ if (Solver == 0) {
+ dup2(external[0], STDIN_FILENO);
+ dup2(external[3], STDOUT_FILENO);
+ const char* calling[2] = { file.c_str(), 0 };
+ execv(calling[0], (char**) calling);
+ std::cerr << "Failed to execute solver '" << solver << "'!" << std::endl;
+ _exit(100);
+ }
+ close(external[0]);
+ close(external[3]);
+
+ if (WaitFd(external[1], true, 5) == false)
+ return _error->Errno("Resolve", "Timed out while Waiting on availability of solver stdin");
+
+ *solver_in = external[1];
+ *solver_out = external[2];
+ return true;
+}
+ /*}}}*/
+// EDSP::ResolveExternal - resolve problems by asking external for help {{{*/
+bool EDSP::ResolveExternal(const char* const solver, pkgDepCache &Cache,
+ bool const upgrade, bool const distUpgrade,
+ bool const autoRemove, OpProgress *Progress) {
+ int solver_in, solver_out;
+ if (EDSP::ExecuteSolver(solver, &solver_in, &solver_out) == false)
+ return false;
+
+ FILE* output = fdopen(solver_in, "w");
+ if (output == NULL)
+ return _error->Errno("Resolve", "fdopen on solver stdin failed");
+
+ if (Progress != NULL)
+ Progress->OverallProgress(0, 100, 5, _("Execute external solver"));
+ EDSP::WriteRequest(Cache, output, upgrade, distUpgrade, autoRemove, Progress);
+ if (Progress != NULL)
+ Progress->OverallProgress(5, 100, 20, _("Execute external solver"));
+ EDSP::WriteScenario(Cache, output, Progress);
+ fclose(output);
+
+ if (Progress != NULL)
+ Progress->OverallProgress(25, 100, 75, _("Execute external solver"));
+ if (EDSP::ReadResponse(solver_out, Cache, Progress) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
diff --git a/apt-pkg/edsp.h b/apt-pkg/edsp.h
new file mode 100644
index 000000000..743c3f5d1
--- /dev/null
+++ b/apt-pkg/edsp.h
@@ -0,0 +1,222 @@
+// -*- mode: cpp; mode: fold -*-
+/** Description \file edsp.h {{{
+ ######################################################################
+ Set of methods to help writing and reading everything needed for EDSP
+ with the noteable exception of reading a scenario for conversion into
+ a Cache as this is handled by edsp interface for listparser and friends
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_EDSP_H
+#define PKGLIB_EDSP_H
+
+#include <apt-pkg/depcache.h>
+#include <apt-pkg/cacheset.h>
+#include <apt-pkg/progress.h>
+
+#include <string>
+
+class EDSP /*{{{*/
+{
+ // we could use pkgCache::DepType and ::Priority, but these would be localized strings…
+ static const char * const PrioMap[];
+ static const char * const DepMap[];
+
+ bool static ReadLine(int const input, std::string &line);
+ bool static StringToBool(char const *answer, bool const defValue);
+
+ void static WriteScenarioVersion(pkgDepCache &Cache, FILE* output,
+ pkgCache::PkgIterator const &Pkg,
+ pkgCache::VerIterator const &Ver);
+ void static WriteScenarioDependency(pkgDepCache &Cache, FILE* output,
+ pkgCache::PkgIterator const &Pkg,
+ pkgCache::VerIterator const &Ver);
+ void static WriteScenarioLimitedDependency(pkgDepCache &Cache, FILE* output,
+ pkgCache::PkgIterator const &Pkg,
+ pkgCache::VerIterator const &Ver,
+ APT::PackageSet const &pkgset);
+public:
+ /** \brief creates the EDSP request stanza
+ *
+ * In the EDSP protocol the first thing send to the resolver is a stanza
+ * encoding the request. This method will write this stanza by looking at
+ * the given Cache and requests the installation of all packages which were
+ * marked for installation in it (equally for remove).
+ *
+ * \param Cache in which the request is encoded
+ * \param output is written to this "file"
+ * \param upgrade is true if it is an request like apt-get upgrade
+ * \param distUpgrade is true if it is a request like apt-get dist-upgrade
+ * \param autoRemove is true if removal of unneeded packages should be performed
+ * \param Progress is an instance to report progress to
+ *
+ * \return true if request was composed successfully, otherwise false
+ */
+ bool static WriteRequest(pkgDepCache &Cache, FILE* output,
+ bool const upgrade = false,
+ bool const distUpgrade = false,
+ bool const autoRemove = false,
+ OpProgress *Progress = NULL);
+
+ /** \brief creates the scenario representing the package universe
+ *
+ * After the request all known information about a package are send
+ * to the solver. The output looks similar to a Packages or status file
+ *
+ * All packages and version included in this Cache are send, even if
+ * it doesn't make sense from an APT resolver point of view like versions
+ * with a negative pin to enable the solver to propose even that as a
+ * solution or at least to be able to give a hint what can be done to
+ * statisfy a request.
+ *
+ * \param Cache is the known package universe
+ * \param output is written to this "file"
+ * \param Progress is an instance to report progress to
+ *
+ * \return true if universe was composed successfully, otherwise false
+ */
+ bool static WriteScenario(pkgDepCache &Cache, FILE* output, OpProgress *Progress = NULL);
+
+ /** \brief creates a limited scenario representing the package universe
+ *
+ * This method works similar to #WriteScenario as it works in the same
+ * way but doesn't send the complete universe to the solver but only
+ * packages included in the pkgset which will have only dependencies
+ * on packages which are in the given set. All other dependencies will
+ * be removed, so that this method can be used to create testcases
+ *
+ * \param Cache is the known package universe
+ * \param output is written to this "file"
+ * \param pkgset is a set of packages the universe should be limited to
+ * \param Progress is an instance to report progress to
+ *
+ * \return true if universe was composed successfully, otherwise false
+ */
+ bool static WriteLimitedScenario(pkgDepCache &Cache, FILE* output,
+ APT::PackageSet const &pkgset,
+ OpProgress *Progress = NULL);
+
+ /** \brief waits and acts on the information returned from the solver
+ *
+ * This method takes care of interpreting whatever the solver sends
+ * through the standard output like a solution, progress or an error.
+ * The main thread should handle his control over to this method to
+ * wait for the solver to finish the given task
+ *
+ * \param input file descriptor with the response from the solver
+ * \param Cache the solution should be applied on if any
+ * \param Progress is an instance to report progress to
+ *
+ * \return true if a solution is found and applied correctly, otherwise false
+ */
+ bool static ReadResponse(int const input, pkgDepCache &Cache, OpProgress *Progress = NULL);
+
+ /** \brief search and read the request stanza for action later
+ *
+ * This method while ignore the input up to the point it finds the
+ * Request: line as an indicator for the Request stanza.
+ * The request is stored in the parameters install and remove then,
+ * as the cache isn't build yet as the scenario follows the request.
+ *
+ * \param input file descriptor with the edsp input for the solver
+ * \param[out] install is a list which gets populated with requested installs
+ * \param[out] remove is a list which gets populated with requested removals
+ * \param[out] upgrade is true if it is a request like apt-get upgrade
+ * \param[out] distUpgrade is true if it is a request like apt-get dist-upgrade
+ * \param[out] autoRemove is true if removal of uneeded packages should be performed
+ *
+ * \return true if the request could be found and worked on, otherwise false
+ */
+ bool static ReadRequest(int const input, std::list<std::string> &install,
+ std::list<std::string> &remove, bool &upgrade,
+ bool &distUpgrade, bool &autoRemove);
+
+ /** \brief takes the request lists and applies it on the cache
+ *
+ * The lists as created by #ReadRequest will be used to find the
+ * packages in question and mark them for install/remove.
+ * No solving is done and no auto-install/-remove.
+ *
+ * \param install is a list of packages to mark for installation
+ * \param remove is a list of packages to mark for removal
+ * \param Cache is there the markers should be set
+ *
+ * \return false if the request couldn't be applied, true otherwise
+ */
+ bool static ApplyRequest(std::list<std::string> const &install,
+ std::list<std::string> const &remove,
+ pkgDepCache &Cache);
+
+ /** \brief encodes the changes in the Cache as a EDSP solution
+ *
+ * The markers in the Cache are observed and send to given
+ * file. The solution isn't checked for consistency or alike,
+ * so even broken solutions can be written successfully,
+ * but the front-end revicing it will properly fail then.
+ *
+ * \param Cache which represents the solution
+ * \param output to write the stanzas forming the solution to
+ *
+ * \return true if solution could be written, otherwise false
+ */
+ bool static WriteSolution(pkgDepCache &Cache, FILE* output);
+
+ /** \brief sends a progress report
+ *
+ * \param percent of the solving completed
+ * \param message the solver wants the user to see
+ * \param output the front-end listens for progress report
+ */
+ bool static WriteProgress(unsigned short const percent, const char* const message, FILE* output);
+
+ /** \brief sends an error report
+ *
+ * Solvers are expected to execute successfully even if
+ * they were unable to calculate a solution for a given task.
+ * Obviously they can't send a solution through, so this
+ * methods deals with formatting an error message correctly
+ * so that the front-ends can recieve and display it.
+ *
+ * The first line of the message should be a short description
+ * of the error so it can be used for dialog titles or alike
+ *
+ * \param uuid of this error message
+ * \param message is free form text to discribe the error
+ * \param output the front-end listens for error messages
+ */
+ bool static WriteError(char const * const uuid, std::string const &message, FILE* output);
+
+
+ /** \brief executes the given solver and returns the pipe ends
+ *
+ * The given solver is executed if it can be found in one of the
+ * configured directories and setup for it is performed.
+ *
+ * \param solver to execute
+ * \param[out] solver_in will be the stdin of the solver
+ * \param[out] solver_out will be the stdout of the solver
+ *
+ * \return true if the solver could be started and the pipes
+ * are set up correctly, otherwise false and the pipes are invalid
+ */
+ bool static ExecuteSolver(const char* const solver, int *solver_in, int *solver_out);
+
+ /** \brief call an external resolver to handle the request
+ *
+ * This method wraps all the methods above to call an external solver
+ *
+ * \param solver to execute
+ * \param Cache with the problem and as universe to work in
+ * \param upgrade is true if it is a request like apt-get upgrade
+ * \param distUpgrade is true if it is a request like apt-get dist-upgrade
+ * \param autoRemove is true if unneeded packages should be removed
+ * \param Progress is an instance to report progress to
+ *
+ * \return true if the solver has successfully solved the problem,
+ * otherwise false
+ */
+ bool static ResolveExternal(const char* const solver, pkgDepCache &Cache,
+ bool const upgrade, bool const distUpgrade,
+ bool const autoRemove, OpProgress *Progress = NULL);
+};
+ /*}}}*/
+#endif
diff --git a/apt-pkg/edsp/edspindexfile.cc b/apt-pkg/edsp/edspindexfile.cc
new file mode 100644
index 000000000..f5881e663
--- /dev/null
+++ b/apt-pkg/edsp/edspindexfile.cc
@@ -0,0 +1,78 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+ The scenario file is designed to work as an intermediate file between
+ APT and the resolver. Its on propose very similar to a dpkg status file
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <apt-pkg/edspindexfile.h>
+#include <apt-pkg/edsplistparser.h>
+#include <apt-pkg/sourcelist.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/progress.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/acquire-item.h>
+
+#include <sys/stat.h>
+ /*}}}*/
+
+// edspIndex::edspIndex - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+edspIndex::edspIndex(string File) : debStatusIndex(File)
+{
+}
+ /*}}}*/
+// StatusIndex::Merge - Load the index file into a cache /*{{{*/
+bool edspIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const
+{
+ FileFd Pkg;
+ if (File != "stdin")
+ Pkg.Open(File, FileFd::ReadOnly);
+ else
+ Pkg.OpenDescriptor(STDIN_FILENO, FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+ edspListParser Parser(&Pkg);
+ if (_error->PendingError() == true)
+ return false;
+
+ if (Prog != NULL)
+ Prog->SubProgress(0,File);
+ if (Gen.SelectFile(File,string(),*this) == false)
+ return _error->Error("Problem with SelectFile %s",File.c_str());
+
+ // Store the IMS information
+ pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
+ struct stat St;
+ if (fstat(Pkg.Fd(),&St) != 0)
+ return _error->Errno("fstat","Failed to stat");
+ CFile->Size = St.st_size;
+ CFile->mtime = St.st_mtime;
+ CFile->Archive = Gen.WriteUniqString("edsp::scenario");
+
+ if (Gen.MergeList(Parser) == false)
+ return _error->Error("Problem with MergeList %s",File.c_str());
+ return true;
+}
+ /*}}}*/
+// Index File types for APT /*{{{*/
+class edspIFType: public pkgIndexFile::Type
+{
+ public:
+ virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator File) const
+ {
+ // we don't have a record parser for this type as the file is not presistent
+ return NULL;
+ };
+ edspIFType() {Label = "EDSP scenario file";};
+};
+static edspIFType _apt_Universe;
+
+const pkgIndexFile::Type *edspIndex::GetType() const
+{
+ return &_apt_Universe;
+}
+ /*}}}*/
diff --git a/apt-pkg/edsp/edspindexfile.h b/apt-pkg/edsp/edspindexfile.h
new file mode 100644
index 000000000..87c06557c
--- /dev/null
+++ b/apt-pkg/edsp/edspindexfile.h
@@ -0,0 +1,25 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+ The scenario file is designed to work as an intermediate file between
+ APT and the resolver. Its on propose very similar to a dpkg status file
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_EDSPINDEXFILE_H
+#define PKGLIB_EDSPINDEXFILE_H
+
+#include <apt-pkg/indexfile.h>
+#include <apt-pkg/debindexfile.h>
+
+class edspIndex : public debStatusIndex
+{
+ public:
+
+ virtual const Type *GetType() const;
+
+ virtual bool Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const;
+
+ edspIndex(string File);
+};
+
+#endif
diff --git a/apt-pkg/edsp/edsplistparser.cc b/apt-pkg/edsp/edsplistparser.cc
new file mode 100644
index 000000000..3349e8cce
--- /dev/null
+++ b/apt-pkg/edsp/edsplistparser.cc
@@ -0,0 +1,90 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ Package Cache Generator - Generator for the cache structure.
+
+ This builds the cache structure from the abstract package list parser.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <apt-pkg/edsplistparser.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/md5.h>
+#include <apt-pkg/macros.h>
+ /*}}}*/
+
+// ListParser::edspListParser - Constructor /*{{{*/
+edspListParser::edspListParser(FileFd *File, string const &Arch) : debListParser(File, Arch)
+{}
+ /*}}}*/
+// ListParser::NewVersion - Fill in the version structure /*{{{*/
+bool edspListParser::NewVersion(pkgCache::VerIterator &Ver)
+{
+ Ver->ID = Section.FindI("APT-ID", Ver->ID);
+ return debListParser::NewVersion(Ver);
+}
+ /*}}}*/
+// ListParser::Description - Return the description string /*{{{*/
+// ---------------------------------------------------------------------
+/* Sorry, no description for the resolvers… */
+string edspListParser::Description()
+{
+ return "";
+}
+string edspListParser::DescriptionLanguage()
+{
+ return "";
+}
+MD5SumValue edspListParser::Description_md5()
+{
+ return MD5SumValue("");
+}
+ /*}}}*/
+// ListParser::VersionHash - Compute a unique hash for this version /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+unsigned short edspListParser::VersionHash()
+{
+ if (Section.Exists("APT-Hash") == true)
+ return Section.FindI("APT-Hash");
+ else if (Section.Exists("APT-ID") == true)
+ return Section.FindI("APT-ID");
+ return 0;
+}
+ /*}}}*/
+// ListParser::ParseStatus - Parse the status field /*{{{*/
+// ---------------------------------------------------------------------
+/* The Status: line here is not a normal dpkg one but just one which tells
+ use if the package is installed or not, where missing means not. */
+bool edspListParser::ParseStatus(pkgCache::PkgIterator &Pkg,
+ pkgCache::VerIterator &Ver)
+{
+ unsigned long state = 0;
+ if (Section.FindFlag("Hold",state,pkgCache::State::Hold) == false)
+ return false;
+ if (state != 0)
+ Pkg->SelectedState = pkgCache::State::Hold;
+
+ state = 0;
+ if (Section.FindFlag("Installed",state,pkgCache::State::Installed) == false)
+ return false;
+ if (state != 0)
+ {
+ Pkg->CurrentState = pkgCache::State::Installed;
+ Pkg->CurrentVer = Ver.Index();
+ }
+
+ return true;
+}
+ /*}}}*/
+// ListParser::LoadReleaseInfo - Load the release information /*{{{*/
+bool edspListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,
+ FileFd &File, string component)
+{
+ return true;
+}
+ /*}}}*/
diff --git a/apt-pkg/edsp/edsplistparser.h b/apt-pkg/edsp/edsplistparser.h
new file mode 100644
index 000000000..ec9f09905
--- /dev/null
+++ b/apt-pkg/edsp/edsplistparser.h
@@ -0,0 +1,38 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ EDSP Package List Parser - This implements the abstract parser
+ interface for the APT specific intermediate format which is passed
+ to external resolvers
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_EDSPLISTPARSER_H
+#define PKGLIB_EDSPLISTPARSER_H
+
+#include <apt-pkg/deblistparser.h>
+#include <apt-pkg/pkgcachegen.h>
+#include <apt-pkg/indexfile.h>
+#include <apt-pkg/tagfile.h>
+
+class edspListParser : public debListParser
+{
+ public:
+ virtual bool NewVersion(pkgCache::VerIterator &Ver);
+ virtual string Description();
+ virtual string DescriptionLanguage();
+ virtual MD5SumValue Description_md5();
+ virtual unsigned short VersionHash();
+
+ bool LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,FileFd &File,
+ string section);
+
+ edspListParser(FileFd *File, string const &Arch = "");
+
+ protected:
+ virtual bool ParseStatus(pkgCache::PkgIterator &Pkg,pkgCache::VerIterator &Ver);
+
+};
+
+#endif
diff --git a/apt-pkg/edsp/edspsystem.cc b/apt-pkg/edsp/edspsystem.cc
new file mode 100644
index 000000000..ac0bb8beb
--- /dev/null
+++ b/apt-pkg/edsp/edspsystem.cc
@@ -0,0 +1,124 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ This system provides the abstraction to use the scenario file as the
+ only source of package information to be able to feed the created file
+ back to APT for its own consumption (eat your own dogfood).
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <apt-pkg/edspsystem.h>
+#include <apt-pkg/debversion.h>
+#include <apt-pkg/edspindexfile.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
+#include <apti18n.h>
+#include <sys/types.h>
+#include <unistd.h>
+#include <dirent.h>
+#include <errno.h>
+ /*}}}*/
+
+edspSystem edspSys;
+
+// System::debSystem - Constructor /*{{{*/
+edspSystem::edspSystem()
+{
+ StatusFile = 0;
+
+ Label = "Debian APT solver interface";
+ VS = &debVS;
+}
+ /*}}}*/
+// System::~debSystem - Destructor /*{{{*/
+edspSystem::~edspSystem()
+{
+ delete StatusFile;
+}
+ /*}}}*/
+// System::Lock - Get the lock /*{{{*/
+bool edspSystem::Lock()
+{
+ return true;
+}
+ /*}}}*/
+// System::UnLock - Drop a lock /*{{{*/
+bool edspSystem::UnLock(bool NoErrors)
+{
+ return true;
+}
+ /*}}}*/
+// System::CreatePM - Create the underlying package manager /*{{{*/
+// ---------------------------------------------------------------------
+/* we can't use edsp input as input for real installations - just a
+ simulation can work, but everything else will fail bigtime */
+pkgPackageManager *edspSystem::CreatePM(pkgDepCache *Cache) const
+{
+ return NULL;
+}
+ /*}}}*/
+// System::Initialize - Setup the configuration space.. /*{{{*/
+bool edspSystem::Initialize(Configuration &Cnf)
+{
+ Cnf.Set("Dir::State::extended_states", "/dev/null");
+ Cnf.Set("Dir::State::status","/dev/null");
+ Cnf.Set("Dir::State::lists","/dev/null");
+
+ Cnf.Set("Debug::NoLocking", "true");
+ Cnf.Set("APT::Get::Simulate", "true");
+
+ if (StatusFile) {
+ delete StatusFile;
+ StatusFile = 0;
+ }
+ return true;
+}
+ /*}}}*/
+// System::ArchiveSupported - Is a file format supported /*{{{*/
+bool edspSystem::ArchiveSupported(const char *Type)
+{
+ return false;
+}
+ /*}}}*/
+// System::Score - Determine if we should use the edsp system /*{{{*/
+signed edspSystem::Score(Configuration const &Cnf)
+{
+ if (Cnf.Find("edsp::scenario", "") == "stdin")
+ return 1000;
+ if (FileExists(Cnf.FindFile("edsp::scenario","")) == true)
+ return 1000;
+ return -1000;
+}
+ /*}}}*/
+// System::AddStatusFiles - Register the status files /*{{{*/
+bool edspSystem::AddStatusFiles(vector<pkgIndexFile *> &List)
+{
+ if (StatusFile == 0)
+ {
+ if (_config->Find("edsp::scenario", "") == "stdin")
+ StatusFile = new edspIndex("stdin");
+ else
+ StatusFile = new edspIndex(_config->FindFile("edsp::scenario"));
+ }
+ List.push_back(StatusFile);
+ return true;
+}
+ /*}}}*/
+// System::FindIndex - Get an index file for status files /*{{{*/
+bool edspSystem::FindIndex(pkgCache::PkgFileIterator File,
+ pkgIndexFile *&Found) const
+{
+ if (StatusFile == 0)
+ return false;
+ if (StatusFile->FindInCache(*File.Cache()) == File)
+ {
+ Found = StatusFile;
+ return true;
+ }
+
+ return false;
+}
+ /*}}}*/
diff --git a/apt-pkg/edsp/edspsystem.h b/apt-pkg/edsp/edspsystem.h
new file mode 100644
index 000000000..bc5be61d1
--- /dev/null
+++ b/apt-pkg/edsp/edspsystem.h
@@ -0,0 +1,38 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: debsystem.h,v 1.4 2003/01/11 07:16:33 jgg Exp $
+/* ######################################################################
+
+ System - Debian version of the System Class
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_EDSPSYSTEM_H
+#define PKGLIB_EDSPSYSTEM_H
+
+#include <apt-pkg/pkgsystem.h>
+
+class edspIndex;
+class edspSystem : public pkgSystem
+{
+ edspIndex *StatusFile;
+
+ public:
+
+ virtual bool Lock();
+ virtual bool UnLock(bool NoErrors = false);
+ virtual pkgPackageManager *CreatePM(pkgDepCache *Cache) const;
+ virtual bool Initialize(Configuration &Cnf);
+ virtual bool ArchiveSupported(const char *Type);
+ virtual signed Score(Configuration const &Cnf);
+ virtual bool AddStatusFiles(std::vector<pkgIndexFile *> &List);
+ virtual bool FindIndex(pkgCache::PkgFileIterator File,
+ pkgIndexFile *&Found) const;
+
+ edspSystem();
+ ~edspSystem();
+};
+
+extern edspSystem edspSys;
+
+#endif
diff --git a/apt-pkg/indexcopy.cc b/apt-pkg/indexcopy.cc
index f88d51fc5..064fb007c 100644
--- a/apt-pkg/indexcopy.cc
+++ b/apt-pkg/indexcopy.cc
@@ -75,7 +75,7 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List,
// Open the package file
FileFd Pkg;
- if (FileExists(*I + GetFileName()) == true)
+ if (RealFileExists(*I + GetFileName()) == true)
{
Pkg.Open(*I + GetFileName(),FileFd::ReadOnly);
FileSize = Pkg.Size();
@@ -532,7 +532,7 @@ bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex)
// we skip non-existing files in the verifcation to support a cdrom
// with no Packages file (just a Package.gz), see LP: #255545
// (non-existing files are not considered a error)
- if(!FileExists(prefix+file))
+ if(!RealFileExists(prefix+file))
{
_error->Warning(_("Skipping nonexistent file %s"), string(prefix+file).c_str());
return true;
@@ -601,7 +601,7 @@ bool SigVerify::CopyAndVerify(string CDROM,string Name,vector<string> &SigList,
string const release = *I+"Release";
// a Release.gpg without a Release should never happen
- if(FileExists(release) == false)
+ if(RealFileExists(release) == false)
{
delete MetaIndex;
continue;
@@ -681,7 +681,7 @@ bool SigVerify::RunGPGV(std::string const &File, std::string const &FileGPG,
std::vector<string> keyrings;
if (DirectoryExists(trustedPath))
keyrings = GetListOfFilesInDir(trustedPath, "gpg", false, true);
- if (FileExists(trustedFile) == true)
+ if (RealFileExists(trustedFile) == true)
keyrings.push_back(trustedFile);
std::vector<const char *> Args;
@@ -722,7 +722,8 @@ bool SigVerify::RunGPGV(std::string const &File, std::string const &FileGPG,
}
Args.push_back(FileGPG.c_str());
- Args.push_back(File.c_str());
+ if (FileGPG != File)
+ Args.push_back(File.c_str());
Args.push_back(NULL);
if (Debug == true)
@@ -787,7 +788,7 @@ bool TranslationsCopy::CopyTranslations(string CDROM,string Name, /*{{{*/
// Open the package file
FileFd Pkg;
- if (FileExists(*I) == true)
+ if (RealFileExists(*I) == true)
{
Pkg.Open(*I,FileFd::ReadOnly);
FileSize = Pkg.Size();
diff --git a/apt-pkg/indexrecords.cc b/apt-pkg/indexrecords.cc
index eb9a36866..10e154ad2 100644
--- a/apt-pkg/indexrecords.cc
+++ b/apt-pkg/indexrecords.cc
@@ -55,14 +55,17 @@ bool indexRecords::Load(const string Filename) /*{{{*/
}
pkgTagSection Section;
- if (TagFile.Step(Section) == false)
- {
- strprintf(ErrorText, _("No sections in Release file %s"), Filename.c_str());
- return false;
- }
-
const char *Start, *End;
- Section.Get (Start, End, 0);
+ // Skip over sections beginning with ----- as this is an idicator for clearsigns
+ do {
+ if (TagFile.Step(Section) == false)
+ {
+ strprintf(ErrorText, _("No sections in Release file %s"), Filename.c_str());
+ return false;
+ }
+
+ Section.Get (Start, End, 0);
+ } while (End - Start > 5 && strncmp(Start, "-----", 5) == 0);
Suite = Section.FindS("Suite");
Dist = Section.FindS("Codename");
diff --git a/apt-pkg/init.cc b/apt-pkg/init.cc
index f0bad78df..aff585e3b 100644
--- a/apt-pkg/init.cc
+++ b/apt-pkg/init.cc
@@ -52,7 +52,7 @@ bool pkgInitConfig(Configuration &Cnf)
Cnf.Set("Dir::State::lists","lists/");
Cnf.Set("Dir::State::cdroms","cdroms.list");
Cnf.Set("Dir::State::mirrors","mirrors/");
-
+
// Cache
Cnf.Set("Dir::Cache","var/cache/apt/");
Cnf.Set("Dir::Cache::archives","archives/");
@@ -72,7 +72,9 @@ bool pkgInitConfig(Configuration &Cnf)
Cnf.Set("Dir::Etc::preferencesparts","preferences.d");
Cnf.Set("Dir::Etc::trusted", "trusted.gpg");
Cnf.Set("Dir::Etc::trustedparts","trusted.gpg.d");
+
Cnf.Set("Dir::Bin::methods","/usr/lib/apt/methods");
+ Cnf.Set("Dir::Bin::solvers::","/usr/lib/apt/solvers");
Cnf.Set("Dir::Media::MountPath","/media/apt");
// State
@@ -88,16 +90,19 @@ bool pkgInitConfig(Configuration &Cnf)
// Translation
Cnf.Set("APT::Acquire::Translation", "environment");
+ // Default cdrom mount point
+ Cnf.Set("Acquire::cdrom::mount", "/media/cdrom/");
+
bool Res = true;
// Read an alternate config file
const char *Cfg = getenv("APT_CONFIG");
if (Cfg != 0)
{
- if (FileExists(Cfg) == true)
+ if (RealFileExists(Cfg) == true)
Res &= ReadConfigFile(Cnf,Cfg);
else
- _error->WarningE("FileExists",_("Unable to read %s"),Cfg);
+ _error->WarningE("RealFileExists",_("Unable to read %s"),Cfg);
}
// Read the configuration parts dir
@@ -109,7 +114,7 @@ bool pkgInitConfig(Configuration &Cnf)
// Read the main config file
string FName = Cnf.FindFile("Dir::Etc::main");
- if (FileExists(FName) == true)
+ if (RealFileExists(FName) == true)
Res &= ReadConfigFile(Cnf,FName);
if (Res == false)
diff --git a/apt-pkg/makefile b/apt-pkg/makefile
index 4e5ec107f..b11e35250 100644
--- a/apt-pkg/makefile
+++ b/apt-pkg/makefile
@@ -3,7 +3,7 @@ BASE=..
SUBDIR=apt-pkg
# Header location
-SUBDIRS = deb contrib
+SUBDIRS = deb edsp contrib
HEADER_TARGETDIRS = apt-pkg
# Bring in the default rules
@@ -20,11 +20,15 @@ APT_DOMAIN:=libapt-pkg$(LIBAPTPKG_MAJOR)
# Source code for the contributed non-core things
SOURCE = contrib/mmap.cc contrib/error.cc contrib/strutl.cc \
contrib/configuration.cc contrib/progress.cc contrib/cmndline.cc \
- contrib/md5.cc contrib/sha1.cc contrib/sha256.cc contrib/hashes.cc \
+ contrib/md5.cc contrib/sha1.cc contrib/sha2.cc \
+ contrib/sha2_internal.cc\
+ contrib/hashes.cc \
contrib/cdromutl.cc contrib/crc-16.cc contrib/netrc.cc \
contrib/fileutl.cc
HEADERS = mmap.h error.h configuration.h fileutl.h cmndline.h netrc.h\
- md5.h crc-16.h cdromutl.h strutl.h sptr.h sha1.h sha256.h hashes.h \
+ md5.h crc-16.h cdromutl.h strutl.h sptr.h sha1.h sha2.h \
+ sha2_internal.h \
+ hashes.h hashsum_template.h\
macros.h weakptr.h
# Source code for the core main library
@@ -35,7 +39,7 @@ SOURCE+= pkgcache.cc version.cc depcache.cc \
srcrecords.cc cachefile.cc versionmatch.cc policy.cc \
pkgsystem.cc indexfile.cc pkgcachegen.cc acquire-item.cc \
indexrecords.cc vendor.cc vendorlist.cc cdrom.cc indexcopy.cc \
- aptconfiguration.cc cachefilter.cc cacheset.cc
+ aptconfiguration.cc cachefilter.cc cacheset.cc edsp.cc
HEADERS+= algorithms.h depcache.h pkgcachegen.h cacheiterators.h \
orderlist.h sourcelist.h packagemanager.h tagfile.h \
init.h pkgcache.h version.h progress.h pkgrecords.h \
@@ -43,7 +47,7 @@ HEADERS+= algorithms.h depcache.h pkgcachegen.h cacheiterators.h \
clean.h srcrecords.h cachefile.h versionmatch.h policy.h \
pkgsystem.h indexfile.h metaindex.h indexrecords.h vendor.h \
vendorlist.h cdrom.h indexcopy.h aptconfiguration.h \
- cachefilter.h cacheset.h
+ cachefilter.h cacheset.h edsp.h
# Source code for the debian specific components
# In theory the deb headers do not need to be exported..
@@ -53,6 +57,10 @@ SOURCE+= deb/deblistparser.cc deb/debrecords.cc deb/dpkgpm.cc \
HEADERS+= debversion.h debsrcrecords.h dpkgpm.h debrecords.h \
deblistparser.h debsystem.h debindexfile.h debmetaindex.h
+# Source code for the APT resolver interface specific components
+SOURCE+= edsp/edsplistparser.cc edsp/edspindexfile.cc edsp/edspsystem.cc
+HEADERS+= edsplistparser.h edspindexfile.h edspsystem.h
+
HEADERS := $(addprefix apt-pkg/,$(HEADERS))
include $(LIBRARY_H)
diff --git a/apt-pkg/orderlist.cc b/apt-pkg/orderlist.cc
index a53854a26..ba43bc757 100644
--- a/apt-pkg/orderlist.cc
+++ b/apt-pkg/orderlist.cc
@@ -128,10 +128,6 @@ bool pkgOrderList::IsMissing(PkgIterator Pkg)
if (FileList[Pkg->ID].empty() == false)
return false;
- // Missing Pseudo packages are missing if the real package is missing
- if (pkgCache::VerIterator(Cache, Cache[Pkg].CandidateVer).Pseudo() == true)
- return IsMissing(Pkg.Group().FindPkg("all"));
-
return true;
}
/*}}}*/
@@ -494,7 +490,7 @@ bool pkgOrderList::VisitRProvides(DepFunc F,VerIterator Ver)
bool Res = true;
for (PrvIterator P = Ver.ProvidesList(); P.end() == false; P++)
Res &= (this->*F)(P.ParentPkg().RevDependsList());
- return true;
+ return Res;
}
/*}}}*/
// OrderList::VisitProvides - Visit all of the providing packages /*{{{*/
@@ -511,15 +507,11 @@ bool pkgOrderList::VisitProvides(DepIterator D,bool Critical)
if (Cache[Pkg].Keep() == true && Pkg.State() == PkgIterator::NeedsNothing)
continue;
- if (D->Type != pkgCache::Dep::Conflicts &&
- D->Type != pkgCache::Dep::DpkgBreaks &&
- D->Type != pkgCache::Dep::Obsoletes &&
+ if (D.IsNegative() == false &&
Cache[Pkg].InstallVer != *I)
continue;
- if ((D->Type == pkgCache::Dep::Conflicts ||
- D->Type == pkgCache::Dep::DpkgBreaks ||
- D->Type == pkgCache::Dep::Obsoletes) &&
+ if (D.IsNegative() == true &&
(Version *)Pkg.CurrentVer() != *I)
continue;
@@ -651,9 +643,7 @@ bool pkgOrderList::DepUnPackCrit(DepIterator D)
{
/* Forward critical dependencies MUST be correct before the
package can be unpacked. */
- if (D->Type != pkgCache::Dep::Conflicts &&
- D->Type != pkgCache::Dep::DpkgBreaks &&
- D->Type != pkgCache::Dep::Obsoletes &&
+ if (D.IsNegative() == false &&
D->Type != pkgCache::Dep::PreDepends)
continue;
@@ -1081,9 +1071,7 @@ bool pkgOrderList::CheckDep(DepIterator D)
/* Conflicts requires that all versions are not present, depends
just needs one */
- if (D->Type != pkgCache::Dep::Conflicts &&
- D->Type != pkgCache::Dep::DpkgBreaks &&
- D->Type != pkgCache::Dep::Obsoletes)
+ if (D.IsNegative() == false)
{
/* Try to find something that does not have the after flag set
if at all possible */
diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc
index cff34058c..1ae09347a 100644
--- a/apt-pkg/packagemanager.cc
+++ b/apt-pkg/packagemanager.cc
@@ -81,9 +81,6 @@ bool pkgPackageManager::GetArchives(pkgAcquire *Owner,pkgSourceList *Sources,
if (List->IsNow(Pkg) == false)
continue;
- if (pkgCache::VerIterator(Cache, Cache[Pkg].CandidateVer).Pseudo() == true)
- continue;
-
new pkgAcqArchive(Owner,Sources,Recs,Cache[Pkg].InstVerIter(Cache),
FileNames[Pkg->ID]);
}
@@ -281,9 +278,7 @@ bool pkgPackageManager::ConfigureAll()
{
PkgIterator Pkg(Cache,*I);
- if (ConfigurePkgs == true &&
- pkgCache::VerIterator(Cache, Cache[Pkg].CandidateVer).Pseudo() == false &&
- Configure(Pkg) == false)
+ if (ConfigurePkgs == true && Configure(Pkg) == false)
return false;
List->Flag(Pkg,pkgOrderList::Configured,pkgOrderList::States);
@@ -318,14 +313,23 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg)
{
PkgIterator Pkg(Cache,*I);
- if (ConfigurePkgs == true &&
- pkgCache::VerIterator(Cache, Cache[Pkg].CandidateVer).Pseudo() == false &&
- Configure(Pkg) == false)
+ if (ConfigurePkgs == true && Configure(Pkg) == false)
return false;
List->Flag(Pkg,pkgOrderList::Configured,pkgOrderList::States);
}
+ if ((Cache[Pkg].InstVerIter(Cache)->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same)
+ for (PkgIterator P = Pkg.Group().PackageList();
+ P.end() == false; P = Pkg.Group().NextPkg(P))
+ {
+ if (Pkg == P || List->IsFlag(P,pkgOrderList::Configured) == true ||
+ Cache[P].InstallVer == 0 || (P.CurrentVer() == Cache[P].InstallVer &&
+ (Cache[Pkg].iFlags & pkgDepCache::ReInstall) != pkgDepCache::ReInstall))
+ continue;
+ SmartConfigure(P);
+ }
+
// Sanity Check
if (List->IsFlag(Pkg,pkgOrderList::Configured) == false)
return _error->Error(_("Could not perform immediate configuration on '%s'. "
@@ -473,10 +477,7 @@ bool pkgPackageManager::SmartRemove(PkgIterator Pkg)
List->Flag(Pkg,pkgOrderList::Configured,pkgOrderList::States);
- if (pkgCache::VerIterator(Cache, Cache[Pkg].CandidateVer).Pseudo() == false)
- return Remove(Pkg,(Cache[Pkg].iFlags & pkgDepCache::Purge) == pkgDepCache::Purge);
- else
- return SmartRemove(Pkg.Group().FindPkg("all"));
+ return Remove(Pkg,(Cache[Pkg].iFlags & pkgDepCache::Purge) == pkgDepCache::Purge);
return true;
}
/*}}}*/
@@ -485,21 +486,28 @@ bool pkgPackageManager::SmartRemove(PkgIterator Pkg)
/* This performs the task of handling pre-depends. */
bool pkgPackageManager::SmartUnPack(PkgIterator Pkg)
{
+ return SmartUnPack(Pkg, true);
+}
+bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate)
+{
// Check if it is already unpacked
if (Pkg.State() == pkgCache::PkgIterator::NeedsConfigure &&
Cache[Pkg].Keep() == true)
{
List->Flag(Pkg,pkgOrderList::UnPacked,pkgOrderList::States);
- if (List->IsFlag(Pkg,pkgOrderList::Immediate) == true)
+ if (Immediate == true &&
+ List->IsFlag(Pkg,pkgOrderList::Immediate) == true)
if (SmartConfigure(Pkg) == false)
return _error->Error(_("Could not perform immediate configuration on already unpacked '%s'. "
"Please see man 5 apt.conf under APT::Immediate-Configure for details."),Pkg.Name());
return true;
}
+ VerIterator const instVer = Cache[Pkg].InstVerIter(Cache);
+
/* See if this packages install version has any predependencies
that are not met by 'now' packages. */
- for (DepIterator D = Cache[Pkg].InstVerIter(Cache).DependsList();
+ for (DepIterator D = instVer.DependsList();
D.end() == false; )
{
// Compute a single dependency element (glob or)
@@ -585,33 +593,32 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg)
// Check for reverse conflicts.
if (CheckRConflicts(Pkg,Pkg.RevDependsList(),
- Cache[Pkg].InstVerIter(Cache).VerStr()) == false)
+ instVer.VerStr()) == false)
return false;
- for (PrvIterator P = Cache[Pkg].InstVerIter(Cache).ProvidesList();
+ for (PrvIterator P = instVer.ProvidesList();
P.end() == false; P++)
CheckRConflicts(Pkg,P.ParentPkg().RevDependsList(),P.ProvideVersion());
- if (pkgCache::VerIterator(Cache, Cache[Pkg].CandidateVer).Pseudo() == false)
- {
- if(Install(Pkg,FileNames[Pkg->ID]) == false)
- return false;
- } else {
- // Pseudo packages will not be unpacked - instead we will do this
- // for the "real" package, but only once and if it is already
- // configured we don't need to unpack it again…
- PkgIterator const P = Pkg.Group().FindPkg("all");
- if (List->IsFlag(P,pkgOrderList::UnPacked) != true &&
- List->IsFlag(P,pkgOrderList::Configured) != true &&
- P.State() != pkgCache::PkgIterator::NeedsNothing) {
- if (SmartUnPack(P) == false)
- return false;
- }
- }
List->Flag(Pkg,pkgOrderList::UnPacked,pkgOrderList::States);
-
+
+ if ((instVer->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same)
+ for (PkgIterator P = Pkg.Group().PackageList();
+ P.end() == false; P = Pkg.Group().NextPkg(P))
+ {
+ if (Pkg == P || List->IsFlag(P,pkgOrderList::UnPacked) == true ||
+ Cache[P].InstallVer == 0 || (P.CurrentVer() == Cache[P].InstallVer &&
+ (Cache[Pkg].iFlags & pkgDepCache::ReInstall) != pkgDepCache::ReInstall))
+ continue;
+ SmartUnPack(P, false);
+ }
+
+ if(Install(Pkg,FileNames[Pkg->ID]) == false)
+ return false;
+
// Perform immedate configuration of the package.
- if (List->IsFlag(Pkg,pkgOrderList::Immediate) == true)
+ if (Immediate == true &&
+ List->IsFlag(Pkg,pkgOrderList::Immediate) == true)
if (SmartConfigure(Pkg) == false)
return _error->Error(_("Could not perform immediate configuration on '%s'. "
"Please see man 5 apt.conf under APT::Immediate-Configure for details. (%d)"),Pkg.Name(),2);
diff --git a/apt-pkg/packagemanager.h b/apt-pkg/packagemanager.h
index efd2cfac6..053b4dc13 100644
--- a/apt-pkg/packagemanager.h
+++ b/apt-pkg/packagemanager.h
@@ -69,7 +69,9 @@ class pkgPackageManager : protected pkgCache::Namespace
// Install helpers
bool ConfigureAll();
bool SmartConfigure(PkgIterator Pkg);
+ //FIXME: merge on abi break
bool SmartUnPack(PkgIterator Pkg);
+ bool SmartUnPack(PkgIterator Pkg, bool const Immediate);
bool SmartRemove(PkgIterator Pkg);
bool EarlyRemove(PkgIterator Pkg);
diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc
index 616d400a2..951caeb78 100644
--- a/apt-pkg/pkgcache.cc
+++ b/apt-pkg/pkgcache.cc
@@ -211,12 +211,18 @@ pkgCache::PkgIterator pkgCache::SingleArchFindPkg(const string &Name)
// ---------------------------------------------------------------------
/* Returns 0 on error, pointer to the package otherwise */
pkgCache::PkgIterator pkgCache::FindPkg(const string &Name) {
- if (MultiArchCache() == false)
- return SingleArchFindPkg(Name);
size_t const found = Name.find(':');
if (found == string::npos)
- return FindPkg(Name, "native");
+ {
+ if (MultiArchCache() == false)
+ return SingleArchFindPkg(Name);
+ else
+ return FindPkg(Name, "native");
+ }
string const Arch = Name.substr(found+1);
+ /* Beware: This is specialcased to handle pkg:any in dependencies as
+ these are linked to virtual pkg:any named packages with all archs.
+ If you want any arch from a given pkg, use FindPkg(pkg,arch) */
if (Arch == "any")
return FindPkg(Name, "any");
return FindPkg(Name.substr(0, found), Arch);
@@ -228,7 +234,7 @@ pkgCache::PkgIterator pkgCache::FindPkg(const string &Name) {
pkgCache::PkgIterator pkgCache::FindPkg(const string &Name, string const &Arch) {
if (MultiArchCache() == false) {
if (Arch == "native" || Arch == "all" || Arch == "any" ||
- Arch == _config->Find("APT::Architecture"))
+ Arch == NativeArch())
return SingleArchFindPkg(Name);
else
return PkgIterator(*this,0);
@@ -317,22 +323,22 @@ pkgCache::PkgIterator pkgCache::GrpIterator::FindPkg(string Arch) const {
if (unlikely(IsGood() == false || S->FirstPackage == 0))
return PkgIterator(*Owner, 0);
- static string const myArch = _config->Find("APT::Architecture");
+ /* If we accept any package we simply return the "first"
+ package in this group (the last one added). */
+ if (Arch == "any")
+ return PkgIterator(*Owner, Owner->PkgP + S->FirstPackage);
+
+ char const* const myArch = Owner->NativeArch();
/* Most of the time the package for our native architecture is
the one we add at first to the cache, but this would be the
last one we check, so we do it now. */
- if (Arch == "native" || Arch == myArch) {
- Arch = myArch;
+ if (Arch == "native" || Arch == myArch || Arch == "all") {
pkgCache::Package *Pkg = Owner->PkgP + S->LastPackage;
- if (stringcasecmp(Arch, Owner->StrP + Pkg->Arch) == 0)
+ if (strcasecmp(myArch, Owner->StrP + Pkg->Arch) == 0)
return PkgIterator(*Owner, Pkg);
+ Arch = myArch;
}
- /* If we accept any package we simply return the "first"
- package in this group (the last one added). */
- if (Arch == "any")
- return PkgIterator(*Owner, Owner->PkgP + S->FirstPackage);
-
/* Iterate over the list to find the matching arch
unfortunately this list includes "package noise"
(= different packages with same calculated hash),
@@ -503,7 +509,8 @@ std::string pkgCache::PkgIterator::FullName(bool const &Pretty) const
{
string fullname = Name();
if (Pretty == false ||
- (strcmp(Arch(), "all") != 0 && _config->Find("APT::Architecture") != Arch()))
+ (strcmp(Arch(), "all") != 0 &&
+ strcmp(Owner->NativeArch(), Arch()) != 0))
return fullname.append(":").append(Arch());
return fullname;
}
@@ -514,15 +521,24 @@ std::string pkgCache::PkgIterator::FullName(bool const &Pretty) const
conflicts (including dpkg's Breaks fields). */
bool pkgCache::DepIterator::IsCritical() const
{
- if (S->Type == pkgCache::Dep::Conflicts ||
- S->Type == pkgCache::Dep::DpkgBreaks ||
- S->Type == pkgCache::Dep::Obsoletes ||
+ if (IsNegative() == true ||
S->Type == pkgCache::Dep::Depends ||
S->Type == pkgCache::Dep::PreDepends)
return true;
return false;
}
/*}}}*/
+// DepIterator::IsNegative - Returns true if the dep is a negative one /*{{{*/
+// ---------------------------------------------------------------------
+/* Some dependencies are positive like Depends and Recommends, others
+ are negative like Conflicts which can and should be handled differently */
+bool pkgCache::DepIterator::IsNegative() const
+{
+ return S->Type == Dep::DpkgBreaks ||
+ S->Type == Dep::Conflicts ||
+ S->Type == Dep::Obsoletes;
+}
+ /*}}}*/
// DepIterator::SmartTargetPkg - Resolve dep target pointers w/provides /*{{{*/
// ---------------------------------------------------------------------
/* This intellegently looks at dep target packages and tries to figure
@@ -600,9 +616,7 @@ pkgCache::Version **pkgCache::DepIterator::AllTargets() const
if (Owner->VS->CheckDep(I.VerStr(),S->CompareOp,TargetVer()) == false)
continue;
- if ((S->Type == pkgCache::Dep::Conflicts ||
- S->Type == pkgCache::Dep::DpkgBreaks ||
- S->Type == pkgCache::Dep::Obsoletes) &&
+ if (IsNegative() == true &&
ParentPkg() == I.ParentPkg())
continue;
@@ -617,9 +631,7 @@ pkgCache::Version **pkgCache::DepIterator::AllTargets() const
if (Owner->VS->CheckDep(I.ProvideVersion(),S->CompareOp,TargetVer()) == false)
continue;
- if ((S->Type == pkgCache::Dep::Conflicts ||
- S->Type == pkgCache::Dep::DpkgBreaks ||
- S->Type == pkgCache::Dep::Obsoletes) &&
+ if (IsNegative() == true &&
ParentPkg() == I.OwnerPkg())
continue;
@@ -737,23 +749,6 @@ bool pkgCache::VerIterator::Automatic() const
return false;
}
/*}}}*/
-// VerIterator::Pseudo - Check if this version is a pseudo one /*{{{*/
-// ---------------------------------------------------------------------
-/* Sometimes you have the need to express dependencies with versions
- which doesn't really exist or exist multiply times for "different"
- packages. We need these versions for dependency resolution but they
- are a problem everytime we need to download/install something. */
-bool pkgCache::VerIterator::Pseudo() const
-{
- if (S->MultiArch == pkgCache::Version::All &&
- strcmp(Arch(true),"all") != 0)
- {
- GrpIterator const Grp = ParentPkg().Group();
- return (Grp->LastPackage != Grp->FirstPackage);
- }
- return false;
-}
- /*}}}*/
// VerIterator::NewestFile - Return the newest file version relation /*{{{*/
// ---------------------------------------------------------------------
/* This looks at the version numbers associated with all of the sources
diff --git a/apt-pkg/pkgcache.h b/apt-pkg/pkgcache.h
index 82a69b2ca..280f37bca 100644
--- a/apt-pkg/pkgcache.h
+++ b/apt-pkg/pkgcache.h
@@ -215,6 +215,7 @@ class pkgCache /*{{{*/
private:
bool MultiArchEnabled;
PkgIterator SingleArchFindPkg(const string &Name);
+ inline char const * const NativeArch() const;
};
/*}}}*/
// Header structure /*{{{*/
@@ -499,15 +500,18 @@ struct pkgCache::Version
map_ptrloc VerStr; // StringItem
/** \brief section this version is filled in */
map_ptrloc Section; // StringItem
+
+ /** \brief Multi-Arch capabilities of a package version */
+ enum VerMultiArch { None = 0, /*!< is the default and doesn't trigger special behaviour */
+ All = (1<<0), /*!< will cause that Ver.Arch() will report "all" */
+ Foreign = (1<<1), /*!< can satisfy dependencies in another architecture */
+ Same = (1<<2), /*!< can be co-installed with itself from other architectures */
+ Allowed = (1<<3) /*!< other packages are allowed to depend on thispkg:any */ };
/** \brief stores the MultiArch capabilities of this version
- None is the default and doesn't trigger special behaviour,
- Foreign means that this version can fulfill dependencies even
- if it is built for another architecture as the requester.
- Same indicates that builds for different architectures can
- be co-installed on the system and All is the marker for a
- version with the Architecture: all. */
- enum {None, All, Foreign, Same, Allowed} MultiArch;
+ Flags used are defined in pkgCache::Version::VerMultiArch
+ */
+ unsigned char MultiArch;
/** \brief references all the PackageFile's that this version came from
@@ -649,6 +653,11 @@ struct pkgCache::StringItem
map_ptrloc NextItem; // StringItem
};
/*}}}*/
+
+
+inline char const * const pkgCache::NativeArch() const
+ { return StrP + HeaderP->Architecture; };
+
#include <apt-pkg/cacheiterators.h>
inline pkgCache::GrpIterator pkgCache::GrpBegin()
diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc
index ed35174bb..8e088ba68 100644
--- a/apt-pkg/pkgcachegen.cc
+++ b/apt-pkg/pkgcachegen.cc
@@ -178,23 +178,12 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
if (PackageName.empty() == true)
return false;
- /* As we handle Arch all packages as architecture bounded
- we add all information to every (simulated) arch package */
- std::vector<string> genArch;
- if (List.ArchitectureAll() == true) {
- genArch = APT::Configuration::getArchitectures();
- if (genArch.size() != 1)
- genArch.push_back("all");
- } else
- genArch.push_back(List.Architecture());
-
- for (std::vector<string>::const_iterator arch = genArch.begin();
- arch != genArch.end(); ++arch)
- {
+ string const Arch = List.Architecture();
+
// Get a pointer to the package structure
pkgCache::PkgIterator Pkg;
Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
- if (NewPackage(Pkg, PackageName, *arch) == false)
+ if (NewPackage(Pkg, PackageName, Arch) == false)
return _error->Error(_("Error occurred while processing %s (NewPackage)"),PackageName.c_str());
Counter++;
if (Counter % 100 == 0 && Progress != 0)
@@ -351,7 +340,6 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
return _error->Error(_("Error occurred while processing %s (NewFileDesc2)"),PackageName.c_str());
- }
}
FoundFileDeps |= List.HasFileDeps();
@@ -491,7 +479,8 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name
// Set the name, arch and the ID
Pkg->Name = Grp->Name;
Pkg->Group = Grp.Index();
- map_ptrloc const idxArch = WriteUniqString(Arch.c_str());
+ // all is mapped to the native architecture
+ map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
if (unlikely(idxArch == 0))
return false;
Pkg->Arch = idxArch;
@@ -638,27 +627,22 @@ bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
Dynamic<pkgCache::PkgIterator> DynP(P);
for (; P.end() != true; P = G.NextPkg(P))
{
- if (strcmp(P.Arch(),"all") == 0)
- continue;
pkgCache::PkgIterator allPkg;
Dynamic<pkgCache::PkgIterator> DynallPkg(allPkg);
pkgCache::VerIterator V = P.VersionList();
Dynamic<pkgCache::VerIterator> DynV(V);
for (; V.end() != true; V++)
{
- string const Arch = V.Arch(true);
+ char const * const Arch = P.Arch();
map_ptrloc *OldDepLast = NULL;
/* MultiArch handling introduces a lot of implicit Dependencies:
- MultiArch: same → Co-Installable if they have the same version
- Architecture: all → Need to be Co-Installable for internal reasons
- All others conflict with all other group members */
- bool const coInstall = (V->MultiArch == pkgCache::Version::All ||
- V->MultiArch == pkgCache::Version::Same);
- if (V->MultiArch == pkgCache::Version::All && allPkg.end() == true)
- allPkg = G.FindPkg("all");
+ bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
for (vector<string>::const_iterator A = archs.begin(); A != archs.end(); ++A)
{
- if (*A == Arch)
+ if (Arch == 0 || *A == Arch)
continue;
/* We allow only one installed arch at the time
per group, therefore each group member conflicts
@@ -677,13 +661,6 @@ bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
NewDepends(D, V, V.VerStr(),
pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
OldDepLast);
- if (V->MultiArch == pkgCache::Version::All)
- {
- // Depend on ${self}:all which does depend on nothing
- NewDepends(allPkg, V, V.VerStr(),
- pkgCache::Dep::Equals, pkgCache::Dep::Depends,
- OldDepLast);
- }
} else {
// Conflicts: ${self}:other
NewDepends(D, V, "",
@@ -806,7 +783,8 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
pkgCache &Cache = Owner->Cache;
// We do not add self referencing provides
- if (Ver.ParentPkg().Name() == PkgName && PkgArch == Ver.Arch(true))
+ if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
+ (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
return true;
// Get a structure
diff --git a/apt-pkg/pkgrecords.h b/apt-pkg/pkgrecords.h
index 93e342534..78e39e577 100644
--- a/apt-pkg/pkgrecords.h
+++ b/apt-pkg/pkgrecords.h
@@ -59,6 +59,7 @@ class pkgRecords::Parser /*{{{*/
virtual string MD5Hash() {return string();};
virtual string SHA1Hash() {return string();};
virtual string SHA256Hash() {return string();};
+ virtual string SHA512Hash() {return string();};
virtual string SourcePkg() {return string();};
virtual string SourceVer() {return string();};
diff --git a/apt-pkg/policy.cc b/apt-pkg/policy.cc
index 4f9d56775..4fc272a74 100644
--- a/apt-pkg/policy.cc
+++ b/apt-pkg/policy.cc
@@ -45,6 +45,8 @@ using namespace std;
file matches the V0 policy engine. */
pkgPolicy::pkgPolicy(pkgCache *Owner) : Pins(0), PFPriority(0), Cache(Owner)
{
+ if (Owner == 0 || &(Owner->Head()) == 0)
+ return;
PFPriority = new signed short[Owner->Head().PackageFileCount];
Pins = new Pin[Owner->Head().PackageCount];
@@ -150,13 +152,6 @@ pkgCache::VerIterator pkgPolicy::GetCandidateVer(pkgCache::PkgIterator const &Pk
{
/* Lets see if this version is the installed version */
bool instVer = (Pkg.CurrentVer() == Ver);
- if (Ver.Pseudo() == true && instVer == false)
- {
- pkgCache::PkgIterator const allPkg = Ver.ParentPkg().Group().FindPkg("all");
- if (allPkg->CurrentVer != 0 && allPkg.CurrentVer()->Hash == Ver->Hash &&
- strcmp(allPkg.CurVersion(), Ver.VerStr()) == 0)
- instVer = true;
- }
for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; VF++)
{
@@ -221,10 +216,22 @@ void pkgPolicy::CreatePin(pkgVersionMatch::MatchType Type,string Name,
P->Data = Data;
return;
}
+
+ // Allow pinning by wildcards
+ // TODO: Maybe we should always prefer specific pins over non-
+ // specific ones.
+ if (Name[0] == '/' || Name.find_first_of("*[?") != string::npos)
+ {
+ pkgVersionMatch match(Data, Type);
+ for (pkgCache::GrpIterator G = Cache->GrpBegin(); G.end() != true; ++G)
+ if (match.ExpressionMatches(Name, G.Name()))
+ CreatePin(Type, G.Name(), Data, Priority);
+ return;
+ }
// Get a spot to put the pin
pkgCache::GrpIterator Grp = Cache->FindGrp(Name);
- for (pkgCache::PkgIterator Pkg = Grp.FindPkg("any");
+ for (pkgCache::PkgIterator Pkg = Grp.PackageList();
Pkg.end() != true; Pkg = Grp.NextPkg(Pkg))
{
Pin *P = 0;
@@ -275,6 +282,10 @@ signed short pkgPolicy::GetPriority(pkgCache::PkgIterator const &Pkg)
return 0;
}
+signed short pkgPolicy::GetPriority(pkgCache::PkgFileIterator const &File)
+{
+ return PFPriority[File->ID];
+}
/*}}}*/
// PreferenceSection class - Overriding the default TrimRecord method /*{{{*/
// ---------------------------------------------------------------------
@@ -328,7 +339,7 @@ bool ReadPinFile(pkgPolicy &Plcy,string File)
if (File.empty() == true)
File = _config->FindFile("Dir::Etc::Preferences");
- if (FileExists(File) == false)
+ if (RealFileExists(File) == false)
return true;
FileFd Fd(File,FileFd::ReadOnly);
diff --git a/apt-pkg/policy.h b/apt-pkg/policy.h
index f8b2678de..e7f36d618 100644
--- a/apt-pkg/policy.h
+++ b/apt-pkg/policy.h
@@ -69,14 +69,13 @@ class pkgPolicy : public pkgDepCache::Policy
// Things for manipulating pins
void CreatePin(pkgVersionMatch::MatchType Type,string Pkg,
string Data,signed short Priority);
- inline signed short GetPriority(pkgCache::PkgFileIterator const &File)
- {return PFPriority[File->ID];};
- signed short GetPriority(pkgCache::PkgIterator const &Pkg);
pkgCache::VerIterator GetMatch(pkgCache::PkgIterator const &Pkg);
// Things for the cache interface.
virtual pkgCache::VerIterator GetCandidateVer(pkgCache::PkgIterator const &Pkg);
- virtual bool IsImportantDep(pkgCache::DepIterator const &Dep) {return pkgDepCache::Policy::IsImportantDep(Dep);};
+ virtual signed short GetPriority(pkgCache::PkgIterator const &Pkg);
+ virtual signed short GetPriority(pkgCache::PkgFileIterator const &File);
+
bool InitDefaults();
pkgPolicy(pkgCache *Owner);
diff --git a/apt-pkg/sourcelist.cc b/apt-pkg/sourcelist.cc
index c3ec9865a..851eefdfe 100644
--- a/apt-pkg/sourcelist.cc
+++ b/apt-pkg/sourcelist.cc
@@ -197,7 +197,7 @@ bool pkgSourceList::ReadMainList()
string Main = _config->FindFile("Dir::Etc::sourcelist");
string Parts = _config->FindDir("Dir::Etc::sourceparts");
- if (FileExists(Main) == true)
+ if (RealFileExists(Main) == true)
Res &= ReadAppend(Main);
else if (DirectoryExists(Parts) == false)
// Only warn if there are no sources.list.d.
@@ -205,9 +205,9 @@ bool pkgSourceList::ReadMainList()
if (DirectoryExists(Parts) == true)
Res &= ReadSourceDir(Parts);
- else if (FileExists(Main) == false)
+ else if (RealFileExists(Main) == false)
// Only warn if there is no sources.list file.
- _error->WarningE("FileExists", _("Unable to read %s"), Main.c_str());
+ _error->WarningE("RealFileExists", _("Unable to read %s"), Main.c_str());
return Res;
}
diff --git a/apt-pkg/tagfile.cc b/apt-pkg/tagfile.cc
index ff6593e26..a8f04b23a 100644
--- a/apt-pkg/tagfile.cc
+++ b/apt-pkg/tagfile.cc
@@ -420,9 +420,13 @@ bool pkgTagSection::FindFlag(const char *Tag,unsigned long &Flags,
const char *Stop;
if (Find(Tag,Start,Stop) == false)
return true;
-
- switch (StringToBool(string(Start,Stop)))
- {
+ return FindFlag(Flags, Flag, Start, Stop);
+}
+bool const pkgTagSection::FindFlag(unsigned long &Flags, unsigned long Flag,
+ char const* Start, char const* Stop)
+{
+ switch (StringToBool(string(Start, Stop)))
+ {
case 0:
Flags &= ~Flag;
return true;
@@ -474,6 +478,7 @@ static const char *iTFRewritePackageOrder[] = {
"MD5Sum",
"SHA1",
"SHA256",
+ "SHA512",
"MSDOS-Filename", // Obsolete
"Description",
0};
diff --git a/apt-pkg/tagfile.h b/apt-pkg/tagfile.h
index f361a787f..23f5c57e6 100644
--- a/apt-pkg/tagfile.h
+++ b/apt-pkg/tagfile.h
@@ -59,6 +59,8 @@ class pkgTagSection
unsigned long long FindULL(const char *Tag, unsigned long long const &Default = 0) const;
bool FindFlag(const char *Tag,unsigned long &Flags,
unsigned long Flag) const;
+ bool static const FindFlag(unsigned long &Flags, unsigned long Flag,
+ const char* Start, const char* Stop);
bool Scan(const char *Start,unsigned long MaxLength);
inline unsigned long size() const {return Stop - Section;};
void Trim();
diff --git a/apt-pkg/vendor.h b/apt-pkg/vendor.h
index 2d39fd15f..df229737a 100644
--- a/apt-pkg/vendor.h
+++ b/apt-pkg/vendor.h
@@ -4,11 +4,12 @@
#include <vector>
#include <map>
+#include <apt-pkg/macros.h>
using std::string;
// A class representing a particular software provider.
-class Vendor
+class __deprecated Vendor
{
public:
struct Fingerprint
diff --git a/apt-pkg/vendorlist.cc b/apt-pkg/vendorlist.cc
index 589997081..48ac12cee 100644
--- a/apt-pkg/vendorlist.cc
+++ b/apt-pkg/vendorlist.cc
@@ -1,8 +1,13 @@
-#include <apt-pkg/vendorlist.h>
#include <apt-pkg/fileutl.h>
#include <apt-pkg/error.h>
#include <apti18n.h>
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+
+#include <apt-pkg/vendorlist.h>
+
pkgVendorList::~pkgVendorList()
{
for (vector<const Vendor *>::const_iterator I = VendorList.begin();
@@ -21,11 +26,11 @@ bool pkgVendorList::ReadMainList()
Configuration Cnf;
string CnfFile = _config->FindDir("Dir::Etc::vendorparts");
- if (FileExists(CnfFile) == true)
+ if (DirectoryExists(CnfFile) == true)
if (ReadConfigDir(Cnf,CnfFile,true) == false)
return false;
CnfFile = _config->FindFile("Dir::Etc::vendorlist");
- if (FileExists(CnfFile) == true)
+ if (RealFileExists(CnfFile) == true)
if (ReadConfigFile(Cnf,CnfFile,true) == false)
return false;
@@ -143,3 +148,7 @@ const Vendor* pkgVendorList::FindVendor(const std::vector<string> GPGVOutput) /*
return NULL;
}
/*}}}*/
+
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic warning "-Wdeprecated-declarations"
+#endif
diff --git a/apt-pkg/vendorlist.h b/apt-pkg/vendorlist.h
index ff2f4ed5d..eaeecb173 100644
--- a/apt-pkg/vendorlist.h
+++ b/apt-pkg/vendorlist.h
@@ -17,12 +17,13 @@
#include <vector>
#include <apt-pkg/vendor.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/macros.h>
using std::string;
using std::vector;
-class pkgVendorList
+class __deprecated pkgVendorList
{
protected:
vector<Vendor const *> VendorList;