summaryrefslogtreecommitdiff
path: root/apt-pkg
diff options
context:
space:
mode:
Diffstat (limited to 'apt-pkg')
-rw-r--r--apt-pkg/acquire-item.cc369
-rw-r--r--apt-pkg/acquire-item.h269
-rw-r--r--apt-pkg/acquire-method.cc48
-rw-r--r--apt-pkg/acquire-method.h10
-rw-r--r--apt-pkg/acquire-worker.cc40
-rw-r--r--apt-pkg/acquire.cc36
-rw-r--r--apt-pkg/acquire.h4
-rw-r--r--apt-pkg/aptconfiguration.cc2
-rw-r--r--apt-pkg/cachefilter.h70
-rw-r--r--apt-pkg/cacheset.cc9
-rw-r--r--apt-pkg/cacheset.h2
-rw-r--r--apt-pkg/contrib/configuration.cc4
-rw-r--r--apt-pkg/contrib/configuration.h6
-rw-r--r--apt-pkg/contrib/fileutl.cc119
-rw-r--r--apt-pkg/contrib/fileutl.h22
-rw-r--r--apt-pkg/contrib/hashes.cc213
-rw-r--r--apt-pkg/contrib/hashes.h165
-rw-r--r--apt-pkg/contrib/macros.h2
-rw-r--r--apt-pkg/contrib/netrc.cc12
-rw-r--r--apt-pkg/contrib/netrc.h4
-rw-r--r--apt-pkg/deb/debindexfile.cc173
-rw-r--r--apt-pkg/deb/debindexfile.h53
-rw-r--r--apt-pkg/deb/deblistparser.cc60
-rw-r--r--apt-pkg/deb/deblistparser.h19
-rw-r--r--apt-pkg/deb/debmetaindex.cc39
-rw-r--r--apt-pkg/deb/debmetaindex.h27
-rw-r--r--apt-pkg/deb/debrecords.cc101
-rw-r--r--apt-pkg/deb/debrecords.h30
-rw-r--r--apt-pkg/deb/debsrcrecords.cc161
-rw-r--r--apt-pkg/deb/debsrcrecords.h7
-rw-r--r--apt-pkg/deb/dpkgpm.cc2
-rw-r--r--apt-pkg/indexcopy.cc8
-rw-r--r--apt-pkg/indexfile.h1
-rw-r--r--apt-pkg/indexrecords.cc33
-rw-r--r--apt-pkg/indexrecords.h18
-rw-r--r--apt-pkg/packagemanager.cc1
-rw-r--r--apt-pkg/packagemanager.h5
-rw-r--r--apt-pkg/pkgcache.cc34
-rw-r--r--apt-pkg/pkgcache.h12
-rw-r--r--apt-pkg/pkgcachegen.cc107
-rw-r--r--apt-pkg/pkgcachegen.h7
-rw-r--r--apt-pkg/pkgrecords.cc2
-rw-r--r--apt-pkg/pkgrecords.h44
-rw-r--r--apt-pkg/pkgsystem.h6
-rw-r--r--apt-pkg/sourcelist.h12
-rw-r--r--apt-pkg/srcrecords.h16
-rw-r--r--apt-pkg/tagfile.cc231
-rw-r--r--apt-pkg/tagfile.h68
48 files changed, 1860 insertions, 823 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index 0178456a8..e45308557 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -50,12 +50,24 @@
using namespace std;
+static void printHashSumComparision(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/
+{
+ if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false)
+ return;
+ std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl;
+ for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs)
+ std::cerr << "\t- " << hs->toStr() << std::endl;
+ std::cerr << " Actual Hash: " << std::endl;
+ for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs)
+ std::cerr << "\t- " << hs->toStr() << std::endl;
+}
+ /*}}}*/
+
// Acquire::Item::Item - Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-pkgAcquire::Item::Item(pkgAcquire *Owner) : Owner(Owner), FileSize(0),
- PartialSize(0), Mode(0), ID(0), Complete(false),
- Local(false), QueueCounter(0)
+pkgAcquire::Item::Item(pkgAcquire *Owner, HashStringList const &ExpectedHashes) :
+ Owner(Owner), FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false),
+ Local(false), QueueCounter(0), ExpectedAdditionalItems(0),
+ ExpectedHashes(ExpectedHashes)
{
Owner->Add(this);
Status = StatIdle;
@@ -117,7 +129,7 @@ void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size)
// Acquire::Item::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcquire::Item::Done(string Message,unsigned long long Size,string /*Hash*/,
+void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringList const &/*Hash*/,
pkgAcquire::MethodConfig * /*Cnf*/)
{
// We just downloaded something..
@@ -228,8 +240,8 @@ void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
possibly query additional files */
pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,
string const &URIDesc, string const &ShortDesc,
- HashString const &ExpectedHash)
- : Item(Owner), ExpectedHash(ExpectedHash)
+ HashStringList const &ExpectedHashes)
+ : Item(Owner, ExpectedHashes)
{
/* XXX: Beware: Currently this class does nothing (of value) anymore ! */
Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false);
@@ -251,7 +263,7 @@ pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,
// AcqSubIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqSubIndex::Custom600Headers()
+string pkgAcqSubIndex::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(Desc.URI);
@@ -274,7 +286,7 @@ void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*
// No good Index is provided
}
/*}}}*/
-void pkgAcqSubIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqSubIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
@@ -296,7 +308,7 @@ void pkgAcqSubIndex::Done(string Message,unsigned long long Size,string Md5Hash,
return;
}
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message, Size, Hashes, Cnf);
string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI);
@@ -342,21 +354,22 @@ bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/
* the original packages file
*/
pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
- string URI,string URIDesc,string ShortDesc,
- HashString ExpectedHash)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
- Description(URIDesc)
+ IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser)
+ : pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser)
{
Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
- Desc.Description = URIDesc + "/DiffIndex";
+ RealURI = Target->URI;
Desc.Owner = this;
- Desc.ShortDesc = ShortDesc;
- Desc.URI = URI + ".diff/Index";
+ Desc.Description = Target->Description + "/DiffIndex";
+ Desc.ShortDesc = Target->ShortDesc;
+ Desc.URI = Target->URI + ".diff/Index";
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI) + string(".DiffIndex");
+ DestFile += URItoFileName(Target->URI) + string(".DiffIndex");
if(Debug)
std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
@@ -389,7 +402,7 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqDiffIndex::Custom600Headers()
+string pkgAcqDiffIndex::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI) + string(".IndexDiff");
@@ -442,8 +455,8 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
std::clog << "Package file is up-to-date" << std::endl;
// list cleanup needs to know that this file as well as the already
// present index is ours, so we create an empty diff to save it for us
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
+ new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser,
+ ServerSha1, available_patches);
return true;
}
else
@@ -528,14 +541,19 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
}
if (pdiff_merge == false)
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
- else
+ {
+ new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser,
+ ServerSha1, available_patches);
+ }
+ else
{
std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
for(size_t i = 0; i < available_patches.size(); ++i)
- (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, RealURI, Description, Desc.ShortDesc, ExpectedHash,
- available_patches[i], diffs);
+ (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, Target,
+ ExpectedHashes,
+ MetaIndexParser,
+ available_patches[i],
+ diffs);
}
Complete = false;
@@ -559,21 +577,20 @@ void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/
std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
<< "Falling back to normal index file acquire" << std::endl;
- new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash);
+ new pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser);
Complete = false;
Status = StatDone;
Dequeue();
}
/*}}}*/
-void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl;
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message, Size, Hashes, Cnf);
string FinalFile;
FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
@@ -603,22 +620,24 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash
* for each diff and the index
*/
pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
- string URI,string URIDesc,string ShortDesc,
- HashString ExpectedHash,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser,
string ServerSha1,
vector<DiffInfo> diffs)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
+ : pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser),
available_patches(diffs), ServerSha1(ServerSha1)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
+ DestFile += URItoFileName(Target->URI);
Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
- Description = URIDesc;
+ RealURI = Target->URI;
Desc.Owner = this;
- Desc.ShortDesc = ShortDesc;
+ Description = Target->Description;
+ Desc.ShortDesc = Target->ShortDesc;
if(available_patches.empty() == true)
{
@@ -638,8 +657,7 @@ void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)
if(Debug)
std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
<< "Falling back to normal index file acquire" << std::endl;
- new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
- ExpectedHash);
+ new pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser);
Finish();
}
/*}}}*/
@@ -653,7 +671,7 @@ void pkgAcqIndexDiffs::Finish(bool allDone)
DestFile = _config->FindDir("Dir::State::lists");
DestFile += URItoFileName(RealURI);
- if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
+ if(HashSums().usable() && !HashSums().VerifyFile(DestFile))
{
RenameOnError(HashSumMismatch);
Dequeue();
@@ -731,13 +749,13 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
return true;
}
/*}}}*/
-void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl;
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message, Size, Hashes, Cnf);
string FinalFile;
FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
@@ -779,8 +797,9 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Has
// see if there is more to download
if(available_patches.empty() == false) {
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
+ new pkgAcqIndexDiffs(Owner, Target,
+ ExpectedHashes, MetaIndexParser,
+ ServerSha1, available_patches);
return Finish();
} else
return Finish(true);
@@ -789,22 +808,24 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Has
/*}}}*/
// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
- string const &URI, string const &URIDesc,
- string const &ShortDesc, HashString const &ExpectedHash,
- DiffInfo const &patch,
- std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
- patch(patch),allPatches(allPatches), State(StateFetchDiff)
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser,
+ DiffInfo const &patch,
+ std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
+ : pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser),
+ patch(patch), allPatches(allPatches), State(StateFetchDiff)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
+ DestFile += URItoFileName(Target->URI);
Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
- Description = URIDesc;
+ RealURI = Target->URI;
Desc.Owner = this;
- Desc.ShortDesc = ShortDesc;
+ Description = Target->Description;
+ Desc.ShortDesc = Target->ShortDesc;
Desc.URI = RealURI + ".diff/" + patch.file + ".gz";
Desc.Description = Description + " " + patch.file + string(".pdiff");
@@ -835,17 +856,16 @@ void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*C
// first failure means we should fallback
State = StateErrorDiff;
std::clog << "Falling back to normal index file acquire" << std::endl;
- new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
- ExpectedHash);
+ new pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser);
}
/*}}}*/
-void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message,Size,Hashes,Cnf);
string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
@@ -881,7 +901,7 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string M
else if (State == StateApplyDiff)
{
// see if we really got the expected file
- if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
+ if(ExpectedHashes.usable() && !ExpectedHashes.VerifyFile(DestFile))
{
RenameOnError(HashSumMismatch);
return;
@@ -918,8 +938,8 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string M
instantiated to fetch the revision file */
pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
string URI,string URIDesc,string ShortDesc,
- HashString ExpectedHash, string comprExt)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash)
+ HashStringList const &ExpectedHash, string comprExt)
+ : pkgAcqBaseIndex(Owner, NULL, ExpectedHash, NULL), RealURI(URI)
{
if(comprExt.empty() == true)
{
@@ -937,13 +957,15 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
Init(URI, URIDesc, ShortDesc);
}
pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
- : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash)
+ HashStringList const &ExpectedHash,
+ indexRecords *MetaIndexParser)
+ : pkgAcqBaseIndex(Owner, Target, ExpectedHash, MetaIndexParser),
+ RealURI(Target->URI)
{
// autoselect the compression method
std::vector<std::string> types = APT::Configuration::getCompressionTypes();
CompressionExtension = "";
- if (ExpectedHash.empty() == false)
+ if (ExpectedHashes.usable())
{
for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true)
@@ -976,10 +998,29 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &S
DestFile += URItoFileName(URI);
std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
+ std::string MetaKey;
if (comprExt == "uncompressed")
+ {
Desc.URI = URI;
+ if(Target)
+ MetaKey = string(Target->MetaKey);
+ }
else
+ {
Desc.URI = URI + '.' + comprExt;
+ if(Target)
+ MetaKey = string(Target->MetaKey) + '.' + comprExt;
+ }
+
+ // load the filesize
+ if(MetaIndexParser)
+ {
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
+ if(Record)
+ FileSize = Record->Size;
+
+ InitByHashIfNeeded(MetaKey);
+ }
Desc.Description = URIDesc;
Desc.Owner = this;
@@ -988,10 +1029,42 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &S
QueueURI(Desc);
}
/*}}}*/
+// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey)
+{
+ // TODO:
+ // - (maybe?) add support for by-hash into the sources.list as flag
+ // - make apt-ftparchive generate the hashes (and expire?)
+ std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash";
+ if(_config->FindB("APT::Acquire::By-Hash", false) == true ||
+ _config->FindB(HostKnob, false) == true ||
+ MetaIndexParser->GetSupportsAcquireByHash())
+ {
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
+ if(Record)
+ {
+ // FIXME: should we really use the best hash here? or a fixed one?
+ const HashString *TargetHash = Record->Hashes.find("");
+ std::string ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
+ size_t trailing_slash = Desc.URI.find_last_of("/");
+ Desc.URI = Desc.URI.replace(
+ trailing_slash,
+ Desc.URI.substr(trailing_slash+1).size()+1,
+ ByHash);
+ } else {
+ _error->Warning(
+ "Fetching ByHash requested but can not find record for %s",
+ MetaKey.c_str());
+ }
+ }
+}
+ /*}}}*/
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqIndex::Custom600Headers()
+string pkgAcqIndex::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
@@ -1037,22 +1110,17 @@ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
to the uncompressed version of the file. If this is so the file
is copied into the partial directory. In all other cases the file
is decompressed with a gzip uri. */
-void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
+void pkgAcqIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,Hash,Cfg);
+ Item::Done(Message,Size,Hashes,Cfg);
if (Decompression == true)
{
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
- std::cerr << std::endl << RealURI << ": Computed Hash: " << Hash;
- std::cerr << " Expected Hash: " << ExpectedHash.toStr() << std::endl;
- }
-
- if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash)
+ if (ExpectedHashes.usable() && ExpectedHashes != Hashes)
{
RenameOnError(HashSumMismatch);
+ printHashSumComparision(RealURI, ExpectedHashes, Hashes);
return;
}
@@ -1081,7 +1149,7 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
FinalFile += URItoFileName(RealURI);
Rename(DestFile,FinalFile);
chmod(FinalFile.c_str(),0644);
-
+
/* We restore the original name to DestFile so that the clean operation
will work OK */
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
@@ -1090,6 +1158,7 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
// Remove the compressed version.
if (Erase == true)
unlink(DestFile.c_str());
+
return;
}
@@ -1173,18 +1242,22 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
/* The Translation file is added to the queue */
pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
string URI,string URIDesc,string ShortDesc)
- : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
+ : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashStringList(), "")
{
}
-pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const *Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
- : pkgAcqIndex(Owner, Target, ExpectedHash, MetaIndexParser)
+pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes, indexRecords *MetaIndexParser)
+ : pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser)
{
+ // load the filesize
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(string(Target->MetaKey));
+ if(Record)
+ FileSize = Record->Size;
}
/*}}}*/
// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
-string pkgAcqIndexTrans::Custom600Headers()
+string pkgAcqIndexTrans::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
@@ -1228,7 +1301,7 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/
string MetaIndexShortDesc,
const vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser) :
- Item(Owner), RealURI(URI), MetaIndexURI(MetaIndexURI),
+ Item(Owner, HashStringList()), RealURI(URI), MetaIndexURI(MetaIndexURI),
MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets)
{
@@ -1259,6 +1332,9 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/
Rename(Final,LastGoodSig);
}
+ // we expect the indextargets + one additional Release file
+ ExpectedAdditionalItems = IndexTargets->size() + 1;
+
QueueURI(Desc);
}
/*}}}*/
@@ -1278,7 +1354,7 @@ pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqMetaSig::Custom600Headers()
+string pkgAcqMetaSig::Custom600Headers() const
{
struct stat Buf;
if (stat(LastGoodSig.c_str(),&Buf) != 0)
@@ -1287,10 +1363,10 @@ string pkgAcqMetaSig::Custom600Headers()
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
-void pkgAcqMetaSig::Done(string Message,unsigned long long Size,string MD5,
+void pkgAcqMetaSig::Done(string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,MD5,Cfg);
+ Item::Done(Message, Size, Hashes, Cfg);
string FileName = LookupTag(Message,"Filename");
if (FileName.empty() == true)
@@ -1311,6 +1387,9 @@ void pkgAcqMetaSig::Done(string Message,unsigned long long Size,string MD5,
Complete = true;
+ // at this point pkgAcqMetaIndex takes over
+ ExpectedAdditionalItems = 0;
+
// put the last known good file back on i-m-s hit (it will
// be re-verified again)
// Else do nothing, we have the new file in DestFile then
@@ -1328,6 +1407,9 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/
{
string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+ // at this point pkgAcqMetaIndex takes over
+ ExpectedAdditionalItems = 0;
+
// if we get a network error we fail gracefully
if(Status == StatTransientNetworkError)
{
@@ -1364,9 +1446,9 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/
pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/
string URI,string URIDesc,string ShortDesc,
string SigFile,
- const vector<struct IndexTarget*>* IndexTargets,
+ const vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser) :
- Item(Owner), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets),
+ Item(Owner, HashStringList()), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets),
MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
@@ -1378,13 +1460,16 @@ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/
Desc.ShortDesc = ShortDesc;
Desc.URI = URI;
+ // we expect more item
+ ExpectedAdditionalItems = IndexTargets->size();
+
QueueURI(Desc);
}
/*}}}*/
// pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqMetaIndex::Custom600Headers()
+string pkgAcqMetaIndex::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
@@ -1396,10 +1481,10 @@ string pkgAcqMetaIndex::Custom600Headers()
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
-void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,string Hash, /*{{{*/
+void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,Hash,Cfg);
+ Item::Done(Message,Size,Hashes,Cfg);
// MetaIndexes are done in two passes: one to download the
// metaindex with an appropriate method, and a second to verify it
@@ -1428,9 +1513,20 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,string Hash, /
}
else
{
+ // FIXME: move this into pkgAcqMetaClearSig::Done on the next
+ // ABI break
+
+ // if we expect a ClearTextSignature (InRelase), ensure that
+ // this is what we get and if not fail to queue a
+ // Release/Release.gpg, see #346386
+ if (SigFile == DestFile && !StartsWithGPGClearTextSignature(DestFile))
+ {
+ Failed(Message, Cfg);
+ return;
+ }
+
// There was a signature file, so pass it to gpgv for
// verification
-
if (_config->FindB("Debug::pkgAcquire::Auth", false))
std::cerr << "Metaindex acquired, queueing gpg verification ("
<< SigFile << "," << DestFile << ")\n";
@@ -1556,11 +1652,13 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
}
}
- for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
+ // at this point the real Items are loaded in the fetcher
+ ExpectedAdditionalItems = 0;
+ for (vector <IndexTarget*>::const_iterator Target = IndexTargets->begin();
Target != IndexTargets->end();
++Target)
{
- HashString ExpectedIndexHash;
+ HashStringList ExpectedIndexHashes;
const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
bool compressedAvailable = false;
if (Record == NULL)
@@ -1584,14 +1682,16 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
}
else
{
- ExpectedIndexHash = Record->Hash;
+ ExpectedIndexHashes = Record->Hashes;
if (_config->FindB("Debug::pkgAcquire::Auth", false))
{
- std::cerr << "Queueing: " << (*Target)->URI << std::endl;
- std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
+ std::cerr << "Queueing: " << (*Target)->URI << std::endl
+ << "Expected Hash:" << std::endl;
+ for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs)
+ std::cerr << "\t- " << hs->toStr() << std::endl;
std::cerr << "For: " << Record->MetaKeyFilename << std::endl;
}
- if (verify == true && ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false)
+ if (verify == true && ExpectedIndexHashes.empty() == true && (*Target)->IsOptional() == false)
{
Status = StatAuthError;
strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str());
@@ -1603,15 +1703,14 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
{
if ((*Target)->IsSubIndex() == true)
new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ (*Target)->ShortDesc, ExpectedIndexHashes);
else if (transInRelease == false || Record != NULL || compressedAvailable == true)
{
if (_config->FindB("Acquire::PDiffs",true) == true && transInRelease == true &&
MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)
- new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ new pkgAcqDiffIndex(Owner, *Target, ExpectedIndexHashes, MetaIndexParser);
else
- new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHashes, MetaIndexParser);
}
continue;
}
@@ -1622,10 +1721,9 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
instead, but passing the required info to it is to much hassle */
if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true))
- new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ new pkgAcqDiffIndex(Owner, *Target, ExpectedIndexHashes, MetaIndexParser);
else
- new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ new pkgAcqIndex(Owner, *Target, ExpectedIndexHashes, MetaIndexParser);
}
}
/*}}}*/
@@ -1778,7 +1876,7 @@ pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/
string const &URI, string const &URIDesc, string const &ShortDesc,
string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc,
string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc,
- const vector<struct IndexTarget*>* IndexTargets,
+ const vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser) :
pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser),
MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
@@ -1786,6 +1884,10 @@ pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/
{
SigFile = DestFile;
+ // index targets + (worst case:) Release/Release.gpg
+ ExpectedAdditionalItems = IndexTargets->size() + 2;
+
+
// keep the old InRelease around in case of transistent network errors
string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
if (RealFileExists(Final) == true)
@@ -1810,7 +1912,7 @@ pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
// FIXME: this can go away once the InRelease file is used widely
-string pkgAcqMetaClearSig::Custom600Headers()
+string pkgAcqMetaClearSig::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
@@ -1828,6 +1930,9 @@ string pkgAcqMetaClearSig::Custom600Headers()
/*}}}*/
void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
{
+ // we failed, we will not get additional items from this method
+ ExpectedAdditionalItems = 0;
+
if (AuthPass == false)
{
// Remove the 'old' InRelease file if we try Release.gpg now as otherwise
@@ -1856,7 +1961,7 @@ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*
pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
pkgRecords *Recs,pkgCache::VerIterator const &Version,
string &StoreFilename) :
- Item(Owner), Version(Version), Sources(Sources), Recs(Recs),
+ Item(Owner, HashStringList()), Version(Version), Sources(Sources), Recs(Recs),
StoreFilename(StoreFilename), Vf(Version.FileList()),
Trusted(false)
{
@@ -1941,7 +2046,6 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
checking later. */
bool pkgAcqArchive::QueueNext()
{
- string const ForceHash = _config->Find("Acquire::ForceHash");
for (; Vf.end() == false; ++Vf)
{
// Ignore not source sources
@@ -1962,31 +2066,10 @@ bool pkgAcqArchive::QueueNext()
pkgRecords::Parser &Parse = Recs->Lookup(Vf);
if (_error->PendingError() == true)
return false;
-
+
string PkgFile = Parse.FileName();
- if (ForceHash.empty() == false)
- {
- if(stringcasecmp(ForceHash, "sha512") == 0)
- ExpectedHash = HashString("SHA512", Parse.SHA512Hash());
- else if(stringcasecmp(ForceHash, "sha256") == 0)
- ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
- else if (stringcasecmp(ForceHash, "sha1") == 0)
- ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
- else
- ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
- }
- else
- {
- string Hash;
- if ((Hash = Parse.SHA512Hash()).empty() == false)
- ExpectedHash = HashString("SHA512", Hash);
- else if ((Hash = Parse.SHA256Hash()).empty() == false)
- ExpectedHash = HashString("SHA256", Hash);
- else if ((Hash = Parse.SHA1Hash()).empty() == false)
- ExpectedHash = HashString("SHA1", Hash);
- else
- ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
- }
+ ExpectedHashes = Parse.Hashes();
+
if (PkgFile.empty() == true)
return _error->Error(_("The package index files are corrupted. No Filename: "
"field for package %s."),
@@ -2073,10 +2156,10 @@ bool pkgAcqArchive::QueueNext()
// AcqArchive::Done - Finished fetching /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
+void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList const &CalcHashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,CalcHash,Cfg);
+ Item::Done(Message, Size, CalcHashes, Cfg);
// Check the size
if (Size != Version->Size)
@@ -2084,11 +2167,12 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
RenameOnError(SizeMismatch);
return;
}
-
- // Check the hash
- if(ExpectedHash.toStr() != CalcHash)
+
+ // FIXME: could this empty() check impose *any* sort of security issue?
+ if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes)
{
RenameOnError(HashSumMismatch);
+ printHashSumComparision(DestFile, ExpectedHashes, CalcHashes);
return;
}
@@ -2160,7 +2244,7 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
/*}}}*/
// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/
// ---------------------------------------------------------------------
-APT_PURE bool pkgAcqArchive::IsTrusted()
+APT_PURE bool pkgAcqArchive::IsTrusted() const
{
return Trusted;
}
@@ -2179,11 +2263,11 @@ void pkgAcqArchive::Finished()
// AcqFile::pkgAcqFile - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The file is added to the queue */
-pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
+pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashes,
unsigned long long Size,string Dsc,string ShortDesc,
const string &DestDir, const string &DestFilename,
bool IsIndexFile) :
- Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
+ Item(Owner, Hashes), IsIndexFile(IsIndexFile)
{
Retries = _config->FindI("Acquire::Retries",0);
@@ -2220,15 +2304,16 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
// AcqFile::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash,
+void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList const &CalcHashes,
pkgAcquire::MethodConfig *Cnf)
{
- Item::Done(Message,Size,CalcHash,Cnf);
+ Item::Done(Message,Size,CalcHashes,Cnf);
// Check the hash
- if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
+ if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes)
{
RenameOnError(HashSumMismatch);
+ printHashSumComparision(DestFile, ExpectedHashes, CalcHashes);
return;
}
@@ -2299,7 +2384,7 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqFile::Custom600Headers()
+string pkgAcqFile::Custom600Headers() const
{
if (IsIndexFile)
return "\nIndex-File: true";
diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h
index f48d2a0d7..b4cac2f04 100644
--- a/apt-pkg/acquire-item.h
+++ b/apt-pkg/acquire-item.h
@@ -46,6 +46,7 @@
class indexRecords;
class pkgRecords;
class pkgSourceList;
+class IndexTarget;
/** \brief Represents the process by which a pkgAcquire object should {{{
* retrieve a file or a collection of files.
@@ -166,6 +167,16 @@ class pkgAcquire::Item : public WeakPointable
* \sa pkgAcquire
*/
unsigned int QueueCounter;
+
+ /** \brief The number of additional fetch items that are expected
+ * once this item is done.
+ *
+ * Some items like pkgAcqMeta{Index,Sig} will queue additional
+ * items. This variable can be set by the methods if it knows
+ * in advance how many items to expect to get a more accurate
+ * progress.
+ */
+ unsigned int ExpectedAdditionalItems;
/** \brief The name of the file into which the retrieved object
* will be written.
@@ -201,12 +212,12 @@ class pkgAcquire::Item : public WeakPointable
* \param Message Data from the acquire method. Use LookupTag()
* to parse it.
* \param Size The size of the object that was fetched.
- * \param Hash The HashSum of the object that was fetched.
+ * \param Hashes The HashSums of the object that was fetched.
* \param Cnf The method via which the object was fetched.
*
* \sa pkgAcqMethod
*/
- virtual void Done(std::string Message,unsigned long long Size,std::string Hash,
+ virtual void Done(std::string Message, unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
/** \brief Invoked when the worker starts to fetch this object.
@@ -228,34 +239,35 @@ class pkgAcquire::Item : public WeakPointable
* line, so they should (if nonempty) have a leading newline and
* no trailing newline.
*/
- virtual std::string Custom600Headers() {return std::string();};
+ virtual std::string Custom600Headers() const {return std::string();};
/** \brief A "descriptive" URI-like string.
*
* \return a URI that should be used to describe what is being fetched.
*/
- virtual std::string DescURI() = 0;
+ virtual std::string DescURI() const = 0;
/** \brief Short item description.
*
* \return a brief description of the object being fetched.
*/
- virtual std::string ShortDesc() {return DescURI();}
+ virtual std::string ShortDesc() const {return DescURI();}
/** \brief Invoked by the worker when the download is completely done. */
virtual void Finished() {};
- /** \brief HashSum
+ /** \brief HashSums
*
- * \return the HashSum of this object, if applicable; otherwise, an
- * empty string.
+ * \return the HashSums of this object, if applicable; otherwise, an
+ * empty list.
*/
- virtual std::string HashSum() {return std::string();};
+ HashStringList HashSums() const {return ExpectedHashes;};
+ std::string HashSum() const {HashStringList const hashes = HashSums(); HashString const * const hs = hashes.find(NULL); return hs != NULL ? hs->toStr() : ""; };
/** \return the acquire process with which this item is associated. */
- pkgAcquire *GetOwner() {return Owner;};
+ pkgAcquire *GetOwner() const {return Owner;};
/** \return \b true if this object is being fetched from a trusted source. */
- virtual bool IsTrusted() {return false;};
+ virtual bool IsTrusted() const {return false;};
// report mirror problems
/** \brief Report mirror problem
@@ -274,12 +286,10 @@ class pkgAcquire::Item : public WeakPointable
* process, but does not place it into any fetch queues (you must
* manually invoke QueueURI() to do so).
*
- * Initializes all fields of the item other than Owner to 0,
- * false, or the empty string.
- *
* \param Owner The new owner of this item.
+ * \param ExpectedHashes of the file represented by this item
*/
- Item(pkgAcquire *Owner);
+ Item(pkgAcquire *Owner, HashStringList const &ExpectedHashes);
/** \brief Remove this item from its owner's queue by invoking
* pkgAcquire::Remove.
@@ -299,6 +309,12 @@ class pkgAcquire::Item : public WeakPointable
* \param state respresenting the error we encountered
*/
bool RenameOnError(RenameOnErrorState const state);
+
+ /** \brief The HashSums of the item is supposed to have than done */
+ HashStringList ExpectedHashes;
+
+ /** \brief The item that is currently being downloaded. */
+ pkgAcquire::ItemDesc Desc;
};
/*}}}*/
/** \brief Information about an index patch (aka diff). */ /*{{{*/
@@ -325,20 +341,13 @@ class pkgAcqSubIndex : public pkgAcquire::Item
/** \brief If \b true, debugging information will be written to std::clog. */
bool Debug;
- /** \brief The item that is currently being downloaded. */
- pkgAcquire::ItemDesc Desc;
-
- /** \brief The Hash that this file should have after download
- */
- HashString ExpectedHash;
-
public:
// Specialized action members
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string DescURI() {return Desc.URI;};
- virtual std::string Custom600Headers();
+ virtual std::string DescURI() const {return Desc.URI;};
+ virtual std::string Custom600Headers() const;
virtual bool ParseIndex(std::string const &IndexFile);
/** \brief Create a new pkgAcqSubIndex.
@@ -351,10 +360,31 @@ class pkgAcqSubIndex : public pkgAcquire::Item
*
* \param ShortDesc A short description of the list file to download.
*
- * \param ExpectedHash The list file's MD5 signature.
+ * \param ExpectedHashes The list file's hashsums which are expected.
*/
pkgAcqSubIndex(pkgAcquire *Owner, std::string const &URI,std::string const &URIDesc,
- std::string const &ShortDesc, HashString const &ExpectedHash);
+ std::string const &ShortDesc, HashStringList const &ExpectedHashes);
+};
+ /*}}}*/
+
+/** \brief Common base class for all classes that deal with fetching {{{
+ indexes
+ */
+class pkgAcqBaseIndex : public pkgAcquire::Item
+{
+ protected:
+ /** \brief Pointer to the IndexTarget data
+ */
+ const struct IndexTarget * Target;
+ indexRecords *MetaIndexParser;
+
+ pkgAcqBaseIndex(pkgAcquire *Owner,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser)
+ : Item(Owner, ExpectedHashes), Target(Target),
+ MetaIndexParser(MetaIndexParser) {};
+
};
/*}}}*/
/** \brief An item that is responsible for fetching an index file of {{{
@@ -366,25 +396,17 @@ class pkgAcqSubIndex : public pkgAcquire::Item
*
* \sa pkgAcqIndexDiffs, pkgAcqIndex
*/
-class pkgAcqDiffIndex : public pkgAcquire::Item
+class pkgAcqDiffIndex : public pkgAcqBaseIndex
{
protected:
/** \brief If \b true, debugging information will be written to std::clog. */
bool Debug;
- /** \brief The item that is currently being downloaded. */
- pkgAcquire::ItemDesc Desc;
-
/** \brief The URI of the index file to recreate at our end (either
* by downloading it or by applying partial patches).
*/
std::string RealURI;
- /** \brief The Hash that the real index file should have after
- * all patches have been applied.
- */
- HashString ExpectedHash;
-
/** \brief The index file which will be patched to generate the new
* file.
*/
@@ -398,10 +420,10 @@ class pkgAcqDiffIndex : public pkgAcquire::Item
public:
// Specialized action members
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string DescURI() {return RealURI + "Index";};
- virtual std::string Custom600Headers();
+ virtual std::string DescURI() const {return RealURI + "Index";};
+ virtual std::string Custom600Headers() const;
/** \brief Parse the Index file for a set of Packages diffs.
*
@@ -426,10 +448,12 @@ class pkgAcqDiffIndex : public pkgAcquire::Item
*
* \param ShortDesc A short description of the list file to download.
*
- * \param ExpectedHash The list file's MD5 signature.
+ * \param ExpectedHashes The list file's hashsums which are expected.
*/
- pkgAcqDiffIndex(pkgAcquire *Owner,std::string URI,std::string URIDesc,
- std::string ShortDesc, HashString ExpectedHash);
+ pkgAcqDiffIndex(pkgAcquire *Owner,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser);
};
/*}}}*/
/** \brief An item that is responsible for fetching client-merge patches {{{
@@ -443,7 +467,7 @@ class pkgAcqDiffIndex : public pkgAcquire::Item
*
* \sa pkgAcqDiffIndex, pkgAcqIndex
*/
-class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
+class pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex
{
protected:
@@ -452,21 +476,11 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
*/
bool Debug;
- /** \brief description of the item that is currently being
- * downloaded.
- */
- pkgAcquire::ItemDesc Desc;
-
/** \brief URI of the package index file that is being
* reconstructed.
*/
std::string RealURI;
- /** \brief HashSum of the package index file that is being
- * reconstructed.
- */
- HashString ExpectedHash;
-
/** \brief description of the file being downloaded. */
std::string Description;
@@ -499,10 +513,9 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
* outright; its arguments are ignored.
*/
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
-
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
- pkgAcquire::MethodConfig *Cnf);
- virtual std::string DescURI() {return RealURI + "Index";};
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
+ pkgAcquire::MethodConfig *Cnf);
+ virtual std::string DescURI() const {return RealURI + "Index";};
/** \brief Create an index merge-diff item.
*
@@ -515,7 +528,7 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
*
* \param ShortDesc A brief description of this item.
*
- * \param ExpectedHash The expected md5sum of the completely
+ * \param ExpectedHashes The expected md5sum of the completely
* reconstructed package index file; the index file will be tested
* against this value when it is entirely reconstructed.
*
@@ -525,9 +538,12 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
* \param allPatches contains all related items so that each item can
* check if it was the last one to complete the download step
*/
- pkgAcqIndexMergeDiffs(pkgAcquire *Owner,std::string const &URI,std::string const &URIDesc,
- std::string const &ShortDesc, HashString const &ExpectedHash,
- DiffInfo const &patch, std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches);
+ pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHash,
+ indexRecords *MetaIndexParser,
+ DiffInfo const &patch,
+ std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches);
};
/*}}}*/
/** \brief An item that is responsible for fetching server-merge patches {{{
@@ -541,7 +557,7 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
*
* \sa pkgAcqDiffIndex, pkgAcqIndex
*/
-class pkgAcqIndexDiffs : public pkgAcquire::Item
+class pkgAcqIndexDiffs : public pkgAcqBaseIndex
{
private:
@@ -559,8 +575,8 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
/** \brief Handle tasks that must be performed after the item
* finishes downloading.
*
- * Dequeues the item and checks the resulting file's md5sum
- * against ExpectedHash after the last patch was applied.
+ * Dequeues the item and checks the resulting file's hashsums
+ * against ExpectedHashes after the last patch was applied.
* There is no need to check the md5/sha1 after a "normal"
* patch because QueueNextDiff() will check the sha1 later.
*
@@ -576,21 +592,11 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
*/
bool Debug;
- /** \brief A description of the item that is currently being
- * downloaded.
- */
- pkgAcquire::ItemDesc Desc;
-
/** \brief The URI of the package index file that is being
* reconstructed.
*/
std::string RealURI;
- /** \brief The HashSum of the package index file that is being
- * reconstructed.
- */
- HashString ExpectedHash;
-
/** A description of the file being downloaded. */
std::string Description;
@@ -632,9 +638,9 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
*/
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string DescURI() {return RealURI + "Index";};
+ virtual std::string DescURI() const {return RealURI + "Index";};
/** \brief Create an index diff item.
*
@@ -650,7 +656,7 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
*
* \param ShortDesc A brief description of this item.
*
- * \param ExpectedHash The expected md5sum of the completely
+ * \param ExpectedHashes The expected md5sum of the completely
* reconstructed package index file; the index file will be tested
* against this value when it is entirely reconstructed.
*
@@ -660,8 +666,10 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
* should be ordered so that each diff appears before any diff
* that depends on it.
*/
- pkgAcqIndexDiffs(pkgAcquire *Owner,std::string URI,std::string URIDesc,
- std::string ShortDesc, HashString ExpectedHash,
+ pkgAcqIndexDiffs(pkgAcquire *Owner,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHash,
+ indexRecords *MetaIndexParser,
std::string ServerSha1,
std::vector<DiffInfo> diffs=std::vector<DiffInfo>());
};
@@ -673,7 +681,7 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
*
* \todo Why does pkgAcqIndex have protected members?
*/
-class pkgAcqIndex : public pkgAcquire::Item
+class pkgAcqIndex : public pkgAcqBaseIndex
{
protected:
@@ -695,33 +703,27 @@ class pkgAcqIndex : public pkgAcquire::Item
// the downloaded file contains the expected tag
bool Verify;
- /** \brief The download request that is currently being
- * processed.
- */
- pkgAcquire::ItemDesc Desc;
-
/** \brief The object that is actually being fetched (minus any
* compression-related extensions).
*/
std::string RealURI;
- /** \brief The expected hashsum of the decompressed index file. */
- HashString ExpectedHash;
-
/** \brief The compression-related file extensions that are being
* added to the downloaded file one by one if first fails (e.g., "gz bz2").
*/
std::string CompressionExtension;
+ /** \brief Do the changes needed to fetch via AptByHash (if needed) */
+ void InitByHashIfNeeded(const std::string MetaKey);
+
public:
// Specialized action members
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
- virtual std::string DescURI() {return Desc.URI;};
- virtual std::string HashSum() {return ExpectedHash.toStr(); };
+ virtual std::string Custom600Headers() const;
+ virtual std::string DescURI() const {return Desc.URI;};
/** \brief Create a pkgAcqIndex.
*
@@ -734,7 +736,7 @@ class pkgAcqIndex : public pkgAcquire::Item
*
* \param ShortDesc A brief description of this index file.
*
- * \param ExpectedHash The expected hashsum of this index file.
+ * \param ExpectedHashes The expected hashsum of this index file.
*
* \param compressExt The compression-related extension with which
* this index file should be downloaded, or "" to autodetect
@@ -743,11 +745,14 @@ class pkgAcqIndex : public pkgAcquire::Item
* fallback is ".gz" or none.
*/
pkgAcqIndex(pkgAcquire *Owner,std::string URI,std::string URIDesc,
- std::string ShortDesc, HashString ExpectedHash,
+ std::string ShortDesc, HashStringList const &ExpectedHashes,
std::string compressExt="");
- pkgAcqIndex(pkgAcquire *Owner, struct IndexTarget const * const Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser);
- void Init(std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc);
+ pkgAcqIndex(pkgAcquire *Owner,
+ IndexTarget const * const Target,
+ HashStringList const &ExpectedHash,
+ indexRecords *MetaIndexParser);
+ void Init(std::string const &URI, std::string const &URIDesc,
+ std::string const &ShortDesc);
};
/*}}}*/
/** \brief An acquire item that is responsible for fetching a {{{
@@ -762,7 +767,7 @@ class pkgAcqIndexTrans : public pkgAcqIndex
public:
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
+ virtual std::string Custom600Headers() const;
/** \brief Create a pkgAcqIndexTrans.
*
@@ -777,8 +782,8 @@ class pkgAcqIndexTrans : public pkgAcqIndex
*/
pkgAcqIndexTrans(pkgAcquire *Owner,std::string URI,std::string URIDesc,
std::string ShortDesc);
- pkgAcqIndexTrans(pkgAcquire *Owner, struct IndexTarget const * const Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser);
+ pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes, indexRecords *MetaIndexParser);
};
/*}}}*/
/** \brief Information about an index file. */ /*{{{*/
@@ -846,9 +851,6 @@ class pkgAcqMetaSig : public pkgAcquire::Item
/** \brief The last good signature file */
std::string LastGoodSig;
- /** \brief The fetch request that is currently being processed. */
- pkgAcquire::ItemDesc Desc;
-
/** \brief The URI of the signature file. Unlike Desc.URI, this is
* never modified; it is used to determine the file that is being
* downloaded.
@@ -876,21 +878,21 @@ class pkgAcqMetaSig : public pkgAcquire::Item
*
* \todo Why a list of pointers instead of a list of structs?
*/
- const std::vector<struct IndexTarget*>* IndexTargets;
+ const std::vector<IndexTarget*>* IndexTargets;
public:
// Specialized action members
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
- virtual std::string DescURI() {return RealURI; };
+ virtual std::string Custom600Headers() const;
+ virtual std::string DescURI() const {return RealURI; };
/** \brief Create a new pkgAcqMetaSig. */
pkgAcqMetaSig(pkgAcquire *Owner,std::string URI,std::string URIDesc, std::string ShortDesc,
std::string MetaIndexURI, std::string MetaIndexURIDesc, std::string MetaIndexShortDesc,
- const std::vector<struct IndexTarget*>* IndexTargets,
+ const std::vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser);
virtual ~pkgAcqMetaSig();
};
@@ -908,9 +910,6 @@ class pkgAcqMetaSig : public pkgAcquire::Item
class pkgAcqMetaIndex : public pkgAcquire::Item
{
protected:
- /** \brief The fetch command that is currently being processed. */
- pkgAcquire::ItemDesc Desc;
-
/** \brief The URI that is actually being downloaded; never
* modified by pkgAcqMetaIndex.
*/
@@ -924,7 +923,7 @@ class pkgAcqMetaIndex : public pkgAcquire::Item
std::string SigFile;
/** \brief The index files to download. */
- const std::vector<struct IndexTarget*>* IndexTargets;
+ const std::vector<IndexTarget*>* IndexTargets;
/** \brief The parser for the meta-index file. */
indexRecords* MetaIndexParser;
@@ -978,16 +977,16 @@ class pkgAcqMetaIndex : public pkgAcquire::Item
// Specialized action members
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size, std::string Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
- virtual std::string DescURI() {return RealURI; };
+ virtual std::string Custom600Headers() const;
+ virtual std::string DescURI() const {return RealURI; };
/** \brief Create a new pkgAcqMetaIndex. */
pkgAcqMetaIndex(pkgAcquire *Owner,
std::string URI,std::string URIDesc, std::string ShortDesc,
std::string SigFile,
- const std::vector<struct IndexTarget*>* IndexTargets,
+ const std::vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser);
};
/*}}}*/
@@ -1014,14 +1013,14 @@ class pkgAcqMetaClearSig : public pkgAcqMetaIndex
public:
void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
+ virtual std::string Custom600Headers() const;
/** \brief Create a new pkgAcqMetaClearSig. */
pkgAcqMetaClearSig(pkgAcquire *Owner,
std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc,
std::string const &MetaIndexURI, std::string const &MetaIndexURIDesc, std::string const &MetaIndexShortDesc,
std::string const &MetaSigURI, std::string const &MetaSigURIDesc, std::string const &MetaSigShortDesc,
- const std::vector<struct IndexTarget*>* IndexTargets,
+ const std::vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser);
virtual ~pkgAcqMetaClearSig();
};
@@ -1037,9 +1036,6 @@ class pkgAcqArchive : public pkgAcquire::Item
/** \brief The package version being fetched. */
pkgCache::VerIterator Version;
- /** \brief The fetch command that is currently being processed. */
- pkgAcquire::ItemDesc Desc;
-
/** \brief The list of sources from which to pick archives to
* download this package from.
*/
@@ -1050,9 +1046,6 @@ class pkgAcqArchive : public pkgAcquire::Item
*/
pkgRecords *Recs;
- /** \brief The hashsum of this package. */
- HashString ExpectedHash;
-
/** \brief A location in which the actual filename of the package
* should be stored.
*/
@@ -1079,13 +1072,12 @@ class pkgAcqArchive : public pkgAcquire::Item
public:
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string DescURI() {return Desc.URI;};
- virtual std::string ShortDesc() {return Desc.ShortDesc;};
+ virtual std::string DescURI() const {return Desc.URI;};
+ virtual std::string ShortDesc() const {return Desc.ShortDesc;};
virtual void Finished();
- virtual std::string HashSum() {return ExpectedHash.toStr(); };
- virtual bool IsTrusted();
+ virtual bool IsTrusted() const;
/** \brief Create a new pkgAcqArchive.
*
@@ -1118,12 +1110,6 @@ class pkgAcqArchive : public pkgAcquire::Item
*/
class pkgAcqFile : public pkgAcquire::Item
{
- /** \brief The currently active download process. */
- pkgAcquire::ItemDesc Desc;
-
- /** \brief The hashsum of the file to download, if it is known. */
- HashString ExpectedHash;
-
/** \brief How many times to retry the download, set from
* Acquire::Retries.
*/
@@ -1136,11 +1122,10 @@ class pkgAcqFile : public pkgAcquire::Item
// Specialized action members
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string CalcHash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &CalcHashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string DescURI() {return Desc.URI;};
- virtual std::string HashSum() {return ExpectedHash.toStr(); };
- virtual std::string Custom600Headers();
+ virtual std::string DescURI() const {return Desc.URI;};
+ virtual std::string Custom600Headers() const;
/** \brief Create a new pkgAcqFile object.
*
@@ -1149,8 +1134,8 @@ class pkgAcqFile : public pkgAcquire::Item
*
* \param URI The URI to download.
*
- * \param Hash The hashsum of the file to download, if it is known;
- * otherwise "".
+ * \param Hashes The hashsums of the file to download, if they are known;
+ * otherwise empty list.
*
* \param Size The size of the file to download, if it is known;
* otherwise 0.
@@ -1173,7 +1158,7 @@ class pkgAcqFile : public pkgAcquire::Item
* is the absolute name to which the file should be downloaded.
*/
- pkgAcqFile(pkgAcquire *Owner, std::string URI, std::string Hash, unsigned long long Size,
+ pkgAcqFile(pkgAcquire *Owner, std::string URI, HashStringList const &Hashes, unsigned long long Size,
std::string Desc, std::string ShortDesc,
const std::string &DestDir="", const std::string &DestFilename="",
bool IsIndexFile=false);
diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc
index 746c553f1..e4a937d1d 100644
--- a/apt-pkg/acquire-method.cc
+++ b/apt-pkg/acquire-method.cc
@@ -147,6 +147,16 @@ void pkgAcqMethod::URIStart(FetchResult &Res)
// AcqMethod::URIDone - A URI is finished /*{{{*/
// ---------------------------------------------------------------------
/* */
+static void printHashStringList(HashStringList const * const list)
+{
+ for (HashStringList::const_iterator hash = list->begin(); hash != list->end(); ++hash)
+ {
+ // very old compatibility name for MD5Sum
+ if (hash->HashType() == "MD5Sum")
+ std::cout << "MD5-Hash: " << hash->HashValue() << "\n";
+ std::cout << hash->HashType() << "-Hash: " << hash->HashValue() << "\n";
+ }
+}
void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
{
if (Queue == 0)
@@ -164,15 +174,8 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
if (Res.LastModified != 0)
std::cout << "Last-Modified: " << TimeRFC1123(Res.LastModified) << "\n";
- if (Res.MD5Sum.empty() == false)
- std::cout << "MD5-Hash: " << Res.MD5Sum << "\n"
- << "MD5Sum-Hash: " << Res.MD5Sum << "\n";
- if (Res.SHA1Sum.empty() == false)
- std::cout << "SHA1-Hash: " << Res.SHA1Sum << "\n";
- if (Res.SHA256Sum.empty() == false)
- std::cout << "SHA256-Hash: " << Res.SHA256Sum << "\n";
- if (Res.SHA512Sum.empty() == false)
- std::cout << "SHA512-Hash: " << Res.SHA512Sum << "\n";
+ printHashStringList(&Res.Hashes);
+
if (UsedMirror.empty() == false)
std::cout << "UsedMirror: " << UsedMirror << "\n";
if (Res.GPGVOutput.empty() == false)
@@ -200,15 +203,8 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
if (Alt->LastModified != 0)
std::cout << "Alt-Last-Modified: " << TimeRFC1123(Alt->LastModified) << "\n";
- if (Alt->MD5Sum.empty() == false)
- std::cout << "Alt-MD5-Hash: " << Alt->MD5Sum << "\n";
- if (Alt->SHA1Sum.empty() == false)
- std::cout << "Alt-SHA1-Hash: " << Alt->SHA1Sum << "\n";
- if (Alt->SHA256Sum.empty() == false)
- std::cout << "Alt-SHA256-Hash: " << Alt->SHA256Sum << "\n";
- if (Alt->SHA512Sum.empty() == false)
- std::cout << "Alt-SHA512-Hash: " << Alt->SHA512Sum << "\n";
-
+ printHashStringList(&Alt->Hashes);
+
if (Alt->IMSHit == true)
std::cout << "Alt-IMS-Hit: true\n";
}
@@ -355,6 +351,15 @@ int pkgAcqMethod::Run(bool Single)
Tmp->LastModified = 0;
Tmp->IndexFile = StringToBool(LookupTag(Message,"Index-File"),false);
Tmp->FailIgnore = StringToBool(LookupTag(Message,"Fail-Ignore"),false);
+ Tmp->ExpectedHashes = HashStringList();
+ for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t)
+ {
+ std::string tag = "Expected-";
+ tag.append(*t);
+ std::string const hash = LookupTag(Message, tag.c_str());
+ if (hash.empty() == false)
+ Tmp->ExpectedHashes.push_back(HashString(*t, hash));
+ }
Tmp->Next = 0;
// Append it to the list
@@ -442,12 +447,9 @@ pkgAcqMethod::FetchResult::FetchResult() : LastModified(0),
// ---------------------------------------------------------------------
/* This hides the number of hashes we are supporting from the caller.
It just deals with the hash class. */
-void pkgAcqMethod::FetchResult::TakeHashes(Hashes &Hash)
+void pkgAcqMethod::FetchResult::TakeHashes(class Hashes &Hash)
{
- MD5Sum = Hash.MD5.Result();
- SHA1Sum = Hash.SHA1.Result();
- SHA256Sum = Hash.SHA256.Result();
- SHA512Sum = Hash.SHA512.Result();
+ Hashes = Hash.GetHashStringList();
}
/*}}}*/
void pkgAcqMethod::Dequeue() { /*{{{*/
diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h
index 221ccf273..cbf79f860 100644
--- a/apt-pkg/acquire-method.h
+++ b/apt-pkg/acquire-method.h
@@ -20,6 +20,7 @@
#ifndef PKGLIB_ACQUIRE_METHOD_H
#define PKGLIB_ACQUIRE_METHOD_H
+#include <apt-pkg/hashes.h>
#include <apt-pkg/macros.h>
#include <stdarg.h>
@@ -33,7 +34,6 @@
#include <apt-pkg/strutl.h>
#endif
-class Hashes;
class pkgAcqMethod
{
protected:
@@ -47,14 +47,12 @@ class pkgAcqMethod
time_t LastModified;
bool IndexFile;
bool FailIgnore;
+ HashStringList ExpectedHashes;
};
struct FetchResult
{
- std::string MD5Sum;
- std::string SHA1Sum;
- std::string SHA256Sum;
- std::string SHA512Sum;
+ HashStringList Hashes;
std::vector<std::string> GPGVOutput;
time_t LastModified;
bool IMSHit;
@@ -62,7 +60,7 @@ class pkgAcqMethod
unsigned long long Size;
unsigned long long ResumePoint;
- void TakeHashes(Hashes &Hash);
+ void TakeHashes(class Hashes &Hash);
FetchResult();
};
diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc
index 047a655ce..54be8e99f 100644
--- a/apt-pkg/acquire-worker.cc
+++ b/apt-pkg/acquire-worker.cc
@@ -326,25 +326,30 @@ bool pkgAcquire::Worker::RunMessages()
Owner->DestFile.c_str(), LookupTag(Message,"Size","0").c_str(),TotalSize);
// see if there is a hash to verify
- string RecivedHash;
- HashString expectedHash(Owner->HashSum());
- if(!expectedHash.empty())
+ HashStringList RecivedHashes;
+ HashStringList expectedHashes = Owner->HashSums();
+ for (HashStringList::const_iterator hs = expectedHashes.begin(); hs != expectedHashes.end(); ++hs)
{
- string hashTag = expectedHash.HashType()+"-Hash";
- string hashSum = LookupTag(Message, hashTag.c_str());
- if(!hashSum.empty())
- RecivedHash = expectedHash.HashType() + ":" + hashSum;
- if(_config->FindB("Debug::pkgAcquire::Auth", false) == true)
- {
- clog << "201 URI Done: " << Owner->DescURI() << endl
- << "RecivedHash: " << RecivedHash << endl
- << "ExpectedHash: " << expectedHash.toStr()
- << endl << endl;
- }
+ std::string const tagname = hs->HashType() + "-Hash";
+ std::string const hashsum = LookupTag(Message, tagname.c_str());
+ if (hashsum.empty() == false)
+ RecivedHashes.push_back(HashString(hs->HashType(), hashsum));
+ }
+
+ if(_config->FindB("Debug::pkgAcquire::Auth", false) == true)
+ {
+ std::clog << "201 URI Done: " << Owner->DescURI() << endl
+ << "RecivedHash:" << endl;
+ for (HashStringList::const_iterator hs = RecivedHashes.begin(); hs != RecivedHashes.end(); ++hs)
+ std::clog << "\t- " << hs->toStr() << std::endl;
+ std::clog << "ExpectedHash:" << endl;
+ for (HashStringList::const_iterator hs = expectedHashes.begin(); hs != expectedHashes.end(); ++hs)
+ std::clog << "\t- " << hs->toStr() << std::endl;
+ std::clog << endl;
}
- Owner->Done(Message, ServerSize, RecivedHash.c_str(), Config);
+ Owner->Done(Message, ServerSize, RecivedHashes, Config);
ItemDone();
-
+
// Log that we are done
if (Log != 0)
{
@@ -525,6 +530,9 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item)
Message.reserve(300);
Message += "URI: " + Item->URI;
Message += "\nFilename: " + Item->Owner->DestFile;
+ HashStringList const hsl = Item->Owner->HashSums();
+ for (HashStringList::const_iterator hs = hsl.begin(); hs != hsl.end(); ++hs)
+ Message += "\nExpected-" + hs->HashType() + ": " + hs->HashValue();
Message += Item->Owner->Custom600Headers();
Message += "\n\n";
diff --git a/apt-pkg/acquire.cc b/apt-pkg/acquire.cc
index a187a00ae..57cbba169 100644
--- a/apt-pkg/acquire.cc
+++ b/apt-pkg/acquire.cc
@@ -31,6 +31,7 @@
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
+#include <iomanip>
#include <dirent.h>
#include <sys/time.h>
@@ -821,7 +822,9 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
// Compute the total number of bytes to fetch
unsigned int Unknown = 0;
unsigned int Count = 0;
- for (pkgAcquire::ItemCIterator I = Owner->ItemsBegin(); I != Owner->ItemsEnd();
+ bool UnfetchedReleaseFiles = false;
+ for (pkgAcquire::ItemCIterator I = Owner->ItemsBegin();
+ I != Owner->ItemsEnd();
++I, ++Count)
{
TotalItems++;
@@ -832,6 +835,13 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
if ((*I)->Local == true)
continue;
+ // see if the method tells us to expect more
+ TotalItems += (*I)->ExpectedAdditionalItems;
+
+ // check if there are unfetched Release files
+ if ((*I)->Complete == false && (*I)->ExpectedAdditionalItems > 0)
+ UnfetchedReleaseFiles = true;
+
TotalBytes += (*I)->FileSize;
if ((*I)->Complete == true)
CurrentBytes += (*I)->FileSize;
@@ -843,6 +853,7 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
unsigned long long ResumeSize = 0;
for (pkgAcquire::Worker *I = Owner->WorkersBegin(); I != 0;
I = Owner->WorkerStep(I))
+ {
if (I->CurrentItem != 0 && I->CurrentItem->Owner->Complete == false)
{
CurrentBytes += I->CurrentSize;
@@ -853,6 +864,7 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
I->CurrentItem->Owner->Complete == false)
TotalBytes += I->CurrentSize;
}
+ }
// Normalize the figures and account for unknown size downloads
if (TotalBytes <= 0)
@@ -863,6 +875,12 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
// Wha?! Is not supposed to happen.
if (CurrentBytes > TotalBytes)
CurrentBytes = TotalBytes;
+
+ // debug
+ if (_config->FindB("Debug::acquire::progress", false) == true)
+ std::clog << " Bytes: "
+ << SizeToStr(CurrentBytes) << " / " << SizeToStr(TotalBytes)
+ << std::endl;
// Compute the CPS
struct timeval NewTime;
@@ -883,6 +901,14 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
Time = NewTime;
}
+ // calculate the percentage, if we have too little data assume 1%
+ if (TotalBytes > 0 && UnfetchedReleaseFiles)
+ Percent = 0;
+ else
+ // use both files and bytes because bytes can be unreliable
+ Percent = (0.8 * (CurrentBytes/float(TotalBytes)*100.0) +
+ 0.2 * (CurrentItems/float(TotalItems)*100.0));
+
int fd = _config->FindI("APT::Status-Fd",-1);
if(fd > 0)
{
@@ -900,13 +926,11 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
else
snprintf(msg,sizeof(msg), _("Retrieving file %li of %li"), i, TotalItems);
-
-
// build the status str
status << "dlstatus:" << i
- << ":" << (CurrentBytes/float(TotalBytes)*100.0)
- << ":" << msg
- << endl;
+ << ":" << std::setprecision(3) << Percent
+ << ":" << msg
+ << endl;
std::string const dlstatus = status.str();
FileFd::Write(fd, dlstatus.c_str(), dlstatus.size());
diff --git a/apt-pkg/acquire.h b/apt-pkg/acquire.h
index ef16d8556..0113021b2 100644
--- a/apt-pkg/acquire.h
+++ b/apt-pkg/acquire.h
@@ -714,6 +714,10 @@ class pkgAcquireStatus
/** \brief The number of items that have been successfully downloaded. */
unsigned long CurrentItems;
+ /** \brief The estimated percentage of the download (0-100)
+ */
+ double Percent;
+
public:
/** \brief If \b true, the download scheduler should call Pulse()
diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc
index 9982759c6..94b6bc246 100644
--- a/apt-pkg/aptconfiguration.cc
+++ b/apt-pkg/aptconfiguration.cc
@@ -540,7 +540,7 @@ std::string const Configuration::getBuildProfilesString() {
return "";
std::vector<std::string>::const_iterator p = profiles.begin();
std::string list = *p;
- for (; p != profiles.end(); ++p)
+ for (++p; p != profiles.end(); ++p)
list.append(",").append(*p);
return list;
}
diff --git a/apt-pkg/cachefilter.h b/apt-pkg/cachefilter.h
index 49d2855f5..d9b957c67 100644
--- a/apt-pkg/cachefilter.h
+++ b/apt-pkg/cachefilter.h
@@ -16,71 +16,14 @@
namespace APT {
namespace CacheFilter {
-#define PACKAGE_MATCHER_ABI_COMPAT 1
-#ifdef PACKAGE_MATCHER_ABI_COMPAT
-
-// PackageNameMatchesRegEx /*{{{*/
-class PackageNameMatchesRegEx {
- /** \brief dpointer placeholder (for later in case we need it) */
- void *d;
- regex_t* pattern;
-public:
- PackageNameMatchesRegEx(std::string const &Pattern);
- bool operator() (pkgCache::PkgIterator const &Pkg);
- bool operator() (pkgCache::GrpIterator const &Grp);
- ~PackageNameMatchesRegEx();
-};
- /*}}}*/
-// PackageNameMatchesFnmatch /*{{{*/
- class PackageNameMatchesFnmatch {
- /** \brief dpointer placeholder (for later in case we need it) */
- void *d;
- const std::string Pattern;
-public:
- PackageNameMatchesFnmatch(std::string const &Pattern)
- : Pattern(Pattern) {};
- bool operator() (pkgCache::PkgIterator const &Pkg);
- bool operator() (pkgCache::GrpIterator const &Grp);
- ~PackageNameMatchesFnmatch() {};
-};
- /*}}}*/
-// PackageArchitectureMatchesSpecification /*{{{*/
-/** \class PackageArchitectureMatchesSpecification
- \brief matching against architecture specification strings
-
- The strings are of the format \<kernel\>-\<cpu\> where either component,
- or the whole string, can be the wildcard "any" as defined in
- debian-policy §11.1 "Architecture specification strings".
-
- Examples: i386, mipsel, linux-any, any-amd64, any */
-class PackageArchitectureMatchesSpecification {
- std::string literal;
- std::string complete;
- bool isPattern;
- /** \brief dpointer placeholder (for later in case we need it) */
- void *d;
-public:
- /** \brief matching against architecture specification strings
- *
- * @param pattern is the architecture specification string
- * @param isPattern defines if the given \b pattern is a
- * architecture specification pattern to match others against
- * or if it is the fixed string and matched against patterns
- */
- PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern = true);
- bool operator() (char const * const &arch);
- bool operator() (pkgCache::PkgIterator const &Pkg);
- bool operator() (pkgCache::VerIterator const &Ver);
- ~PackageArchitectureMatchesSpecification();
-};
-
-#else
-
class PackageMatcher {
public:
- virtual bool operator() (pkgCache::PkgIterator const &Pkg) { return false; };
- virtual bool operator() (pkgCache::GrpIterator const &Grp) { return false; };
- virtual bool operator() (pkgCache::VerIterator const &Ver) { return false; };
+ virtual bool operator() (pkgCache::PkgIterator const &/*Pkg*/) {
+ return false; };
+ virtual bool operator() (pkgCache::GrpIterator const &/*Grp*/) {
+ return false; };
+ virtual bool operator() (pkgCache::VerIterator const &/*Ver*/) {
+ return false; };
virtual ~PackageMatcher() {};
};
@@ -139,7 +82,6 @@ public:
virtual bool operator() (pkgCache::VerIterator const &Ver);
virtual ~PackageArchitectureMatchesSpecification();
};
-#endif
/*}}}*/
}
}
diff --git a/apt-pkg/cacheset.cc b/apt-pkg/cacheset.cc
index 2ed6a96da..5d7f28515 100644
--- a/apt-pkg/cacheset.cc
+++ b/apt-pkg/cacheset.cc
@@ -321,7 +321,8 @@ bool PackageContainerInterface::FromString(PackageContainerInterface * const pci
if (FromGroup(pci, Cache, str, helper) == false &&
FromTask(pci, Cache, str, helper) == false &&
#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
- FromFnmatch(pci, Cache, str, helper) == false)
+ // FIXME: hm, hm, regexp/fnmatch incompatible?
+ FromFnmatch(pci, Cache, str, helper) == false &&
#endif
FromRegEx(pci, Cache, str, helper) == false)
{
@@ -610,7 +611,7 @@ void CacheSetHelper::canNotFindRegEx(PackageContainerInterface * const /*pci*/,
}
#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
// canNotFindFnmatch - handle the case no package is found by a fnmatch /*{{{*/
-void CacheSetHelper::canNotFindFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern) {
+ void CacheSetHelper::canNotFindFnmatch(PackageContainerInterface * const /*pci*/, pkgCacheFile &/*Cache*/, std::string pattern) {
if (ShowError == true)
_error->Insert(ErrorType, _("Couldn't find any package by glob '%s'"), pattern.c_str());
}
@@ -676,8 +677,8 @@ APT_CONST void CacheSetHelper::showRegExSelection(pkgCache::PkgIterator const &/
/*}}}*/
#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
// showFnmatchSelection /*{{{*/
-APT_CONST void CacheSetHelper::showFnmatchSelection(pkgCache::PkgIterator const &pkg,
- std::string const &pattern) {
+APT_CONST void CacheSetHelper::showFnmatchSelection(pkgCache::PkgIterator const &/*pkg*/,
+ std::string const &/*pattern*/) {
}
/*}}}*/
#endif
diff --git a/apt-pkg/cacheset.h b/apt-pkg/cacheset.h
index 16a3daa42..dde4e221e 100644
--- a/apt-pkg/cacheset.h
+++ b/apt-pkg/cacheset.h
@@ -584,7 +584,7 @@ public: /*{{{*/
static VersionContainer FromString(pkgCacheFile &Cache, std::string const &pkg,
Version const &fallback, CacheSetHelper &helper,
- bool const onlyFromName = false) {
+ bool const /*onlyFromName = false*/) {
VersionContainer vercon;
VersionContainerInterface::FromString(&vercon, Cache, pkg, fallback, helper);
return vercon;
diff --git a/apt-pkg/contrib/configuration.cc b/apt-pkg/contrib/configuration.cc
index 00f6ad0f9..fbe180f8e 100644
--- a/apt-pkg/contrib/configuration.cc
+++ b/apt-pkg/contrib/configuration.cc
@@ -254,7 +254,9 @@ string Configuration::FindDir(const char *Name,const char *Default) const
// ---------------------------------------------------------------------
/* Returns a vector of config values under the given item */
#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR < 13)
-vector<string> Configuration::FindVector(const char *Name) const { return FindVector(Name, ""); }
+vector<string> Configuration::FindVector(const char *Name) const {
+ return FindVector(Name, "");
+}
#endif
vector<string> Configuration::FindVector(const char *Name, std::string const &Default) const
{
diff --git a/apt-pkg/contrib/configuration.h b/apt-pkg/contrib/configuration.h
index c256139f4..6345c8a5d 100644
--- a/apt-pkg/contrib/configuration.h
+++ b/apt-pkg/contrib/configuration.h
@@ -84,12 +84,10 @@ class Configuration
* \param Default list of values separated by commas */
std::vector<std::string> FindVector(const char *Name, std::string const &Default) const;
std::vector<std::string> FindVector(std::string const &Name, std::string const &Default) const { return FindVector(Name.c_str(), Default); };
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
- std::vector<std::string> FindVector(const char *Name) const { return FindVector(Name, ""); };
-#else
+#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR < 13)
std::vector<std::string> FindVector(const char *Name) const;
#endif
- std::vector<std::string> FindVector(std::string const &Name) const { return FindVector(Name.c_str(), ""); };
+ std::vector<std::string> FindVector(std::string const &Name="") const { return FindVector(Name.c_str(), ""); };
int FindI(const char *Name,int const &Default = 0) const;
int FindI(std::string const &Name,int const &Default = 0) const {return FindI(Name.c_str(),Default);};
bool FindB(const char *Name,bool const &Default = false) const;
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index da81edbcc..6b8f04dea 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -657,6 +657,22 @@ string flCombine(string Dir,string File)
return Dir + '/' + File;
}
/*}}}*/
+// flAbsPath - Return the absolute path of the filename /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string flAbsPath(string File)
+{
+ char *p = realpath(File.c_str(), NULL);
+ if (p == NULL)
+ {
+ _error->Errno("realpath", "flAbsPath failed");
+ return "";
+ }
+ std::string AbsPath(p);
+ free(p);
+ return AbsPath;
+}
+ /*}}}*/
// SetCloseExec - Set the close on exec flag /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -837,6 +853,27 @@ bool ExecWait(pid_t Pid,const char *Name,bool Reap)
}
/*}}}*/
+
+// StartsWithGPGClearTextSignature - Check if a file is Pgp/GPG clearsigned /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool StartsWithGPGClearTextSignature(string const &FileName)
+{
+ static const char* SIGMSG = "-----BEGIN PGP SIGNED MESSAGE-----\n";
+ char buffer[strlen(SIGMSG)+1];
+ FILE* gpg = fopen(FileName.c_str(), "r");
+ if (gpg == NULL)
+ return false;
+
+ char const * const test = fgets(buffer, sizeof(buffer), gpg);
+ fclose(gpg);
+ if (test == NULL || strcmp(buffer, SIGMSG) != 0)
+ return false;
+
+ return true;
+}
+
+
class FileFdPrivate { /*{{{*/
public:
#ifdef HAVE_ZLIB
@@ -1914,7 +1951,6 @@ bool FileFd::Close()
{
if ((Flags & Compressed) != Compressed && iFd > 0 && close(iFd) != 0)
Res &= _error->Errno("close",_("Problem closing the file %s"), FileName.c_str());
-
if (d != NULL)
{
Res &= d->CloseDown(FileName);
@@ -2041,6 +2077,31 @@ std::string GetTempDir()
return string(tmpdir);
}
+FileFd* GetTempFile(std::string const &Prefix, bool ImmediateUnlink)
+{
+ char fn[512];
+ FileFd *Fd = new FileFd();
+
+ std::string tempdir = GetTempDir();
+ snprintf(fn, sizeof(fn), "%s/%s.XXXXXX",
+ tempdir.c_str(), Prefix.c_str());
+ int fd = mkstemp(fn);
+ if(ImmediateUnlink)
+ unlink(fn);
+ if (fd < 0)
+ {
+ _error->Errno("GetTempFile",_("Unable to mkstemp %s"), fn);
+ return NULL;
+ }
+ if (!Fd->OpenDescriptor(fd, FileFd::WriteOnly, FileFd::None, true))
+ {
+ _error->Errno("GetTempFile",_("Unable to write to %s"),fn);
+ return NULL;
+ }
+
+ return Fd;
+}
+
bool Rename(std::string From, std::string To)
{
if (rename(From.c_str(),To.c_str()) != 0)
@@ -2052,6 +2113,61 @@ bool Rename(std::string From, std::string To)
return true;
}
+bool Popen(const char* Args[], FileFd &Fd, pid_t &Child, FileFd::OpenMode Mode)
+{
+ int fd;
+ if (Mode != FileFd::ReadOnly && Mode != FileFd::WriteOnly)
+ return _error->Error("Popen supports ReadOnly (x)or WriteOnly mode only");
+
+ int Pipe[2] = {-1, -1};
+ if(pipe(Pipe) != 0)
+ {
+ return _error->Errno("pipe", _("Failed to create subprocess IPC"));
+ return NULL;
+ }
+ std::set<int> keep_fds;
+ keep_fds.insert(Pipe[0]);
+ keep_fds.insert(Pipe[1]);
+ Child = ExecFork(keep_fds);
+ if(Child < 0)
+ return _error->Errno("fork", "Failed to fork");
+ if(Child == 0)
+ {
+ if(Mode == FileFd::ReadOnly)
+ {
+ close(Pipe[0]);
+ fd = Pipe[1];
+ }
+ else if(Mode == FileFd::WriteOnly)
+ {
+ close(Pipe[1]);
+ fd = Pipe[0];
+ }
+
+ if(Mode == FileFd::ReadOnly)
+ {
+ dup2(fd, 1);
+ dup2(fd, 2);
+ } else if(Mode == FileFd::WriteOnly)
+ dup2(fd, 0);
+
+ execv(Args[0], (char**)Args);
+ _exit(100);
+ }
+ if(Mode == FileFd::ReadOnly)
+ {
+ close(Pipe[1]);
+ fd = Pipe[0];
+ } else if(Mode == FileFd::WriteOnly)
+ {
+ close(Pipe[0]);
+ fd = Pipe[1];
+ }
+ Fd.OpenDescriptor(fd, Mode, FileFd::None, true);
+
+ return true;
+}
+
bool DropPrivs()
{
if (getuid() != 0)
@@ -2065,5 +2181,6 @@ bool DropPrivs()
return _error->Errno("setgid", "Failed to setgid");
if (setuid(pw->pw_uid) != 0)
return _error->Errno("setuid", "Failed to setuid");
+
return true;
}
diff --git a/apt-pkg/contrib/fileutl.h b/apt-pkg/contrib/fileutl.h
index 683c04157..e04f75e2a 100644
--- a/apt-pkg/contrib/fileutl.h
+++ b/apt-pkg/contrib/fileutl.h
@@ -168,6 +168,8 @@ time_t GetModificationTime(std::string const &Path);
bool Rename(std::string From, std::string To);
std::string GetTempDir();
+FileFd* GetTempFile(std::string const &Prefix = "",
+ bool ImmediateUnlink = true);
/** \brief Ensure the existence of the given Path
*
@@ -191,6 +193,10 @@ pid_t ExecFork(std::set<int> keep_fds);
void MergeKeepFdsFromConfiguration(std::set<int> &keep_fds);
bool ExecWait(pid_t Pid,const char *Name,bool Reap = false);
+
+// check if the given file starts with a PGP cleartext signature
+bool StartsWithGPGClearTextSignature(std::string const &FileName);
+
// process releated
bool DropPrivs();
@@ -201,7 +207,23 @@ std::string flNoLink(std::string File);
std::string flExtension(std::string File);
std::string flCombine(std::string Dir,std::string File);
+/** \brief Takes a file path and returns the absolute path
+ */
+std::string flAbsPath(std::string File);
+
// simple c++ glob
std::vector<std::string> Glob(std::string const &pattern, int flags=0);
+/** \brief Popen() implementation that execv() instead of using a shell
+ *
+ * \param Args the execv style command to run
+ * \param FileFd is a referenz to the FileFd to use for input or output
+ * \param Child a reference to the integer that stores the child pid
+ * Note that you must call ExecWait() or similar to cleanup
+ * \param Mode is either FileFd::ReadOnly or FileFd::WriteOnly
+ * \return true on success, false on failure with _error set
+ */
+bool Popen(const char* Args[], FileFd &Fd, pid_t &Child, FileFd::OpenMode Mode);
+
+
#endif
diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc
index 15f83615d..199e395f6 100644
--- a/apt-pkg/contrib/hashes.cc
+++ b/apt-pkg/contrib/hashes.cc
@@ -27,7 +27,7 @@
#include <iostream>
/*}}}*/
-const char* HashString::_SupportedHashes[] =
+const char * HashString::_SupportedHashes[] =
{
"SHA512", "SHA256", "SHA1", "MD5Sum", NULL
};
@@ -42,11 +42,16 @@ HashString::HashString(std::string Type, std::string Hash) : Type(Type), Hash(Ha
HashString::HashString(std::string StringedHash) /*{{{*/
{
- // legacy: md5sum without "MD5Sum:" prefix
- if (StringedHash.find(":") == std::string::npos && StringedHash.size() == 32)
+ if (StringedHash.find(":") == std::string::npos)
{
- Type = "MD5Sum";
- Hash = StringedHash;
+ // legacy: md5sum without "MD5Sum:" prefix
+ if (StringedHash.size() == 32)
+ {
+ Type = "MD5Sum";
+ Hash = StringedHash;
+ }
+ if(_config->FindB("Debug::Hashes",false) == true)
+ std::clog << "HashString(string): invalid StringedHash " << StringedHash << std::endl;
return;
}
std::string::size_type pos = StringedHash.find(":");
@@ -82,25 +87,25 @@ std::string HashString::GetHashForFile(std::string filename) const /*{{{*/
std::string fileHash;
FileFd Fd(filename, FileFd::ReadOnly);
- if(Type == "MD5Sum")
+ if(strcasecmp(Type.c_str(), "MD5Sum") == 0)
{
MD5Summation MD5;
MD5.AddFD(Fd);
fileHash = (std::string)MD5.Result();
}
- else if (Type == "SHA1")
+ else if (strcasecmp(Type.c_str(), "SHA1") == 0)
{
SHA1Summation SHA1;
SHA1.AddFD(Fd);
fileHash = (std::string)SHA1.Result();
}
- else if (Type == "SHA256")
+ else if (strcasecmp(Type.c_str(), "SHA256") == 0)
{
SHA256Summation SHA256;
SHA256.AddFD(Fd);
fileHash = (std::string)SHA256.Result();
}
- else if (Type == "SHA512")
+ else if (strcasecmp(Type.c_str(), "SHA512") == 0)
{
SHA512Summation SHA512;
SHA512.AddFD(Fd);
@@ -111,26 +116,147 @@ std::string HashString::GetHashForFile(std::string filename) const /*{{{*/
return fileHash;
}
/*}}}*/
-const char** HashString::SupportedHashes()
+const char** HashString::SupportedHashes() /*{{{*/
{
return _SupportedHashes;
}
-
-APT_PURE bool HashString::empty() const
+ /*}}}*/
+APT_PURE bool HashString::empty() const /*{{{*/
{
return (Type.empty() || Hash.empty());
}
+ /*}}}*/
+std::string HashString::toStr() const /*{{{*/
+{
+ return Type + ":" + Hash;
+}
+ /*}}}*/
+APT_PURE bool HashString::operator==(HashString const &other) const /*{{{*/
+{
+ return (strcasecmp(Type.c_str(), other.Type.c_str()) == 0 && Hash == other.Hash);
+}
+APT_PURE bool HashString::operator!=(HashString const &other) const
+{
+ return !(*this == other);
+}
+ /*}}}*/
+
+bool HashStringList::usable() const /*{{{*/
+{
+ if (empty() == true)
+ return false;
+ std::string const forcedType = _config->Find("Acquire::ForceHash", "");
+ if (forcedType.empty() == true)
+ return true;
+ return find(forcedType) != NULL;
+}
+ /*}}}*/
+HashString const * HashStringList::find(char const * const type) const /*{{{*/
+{
+ if (type == NULL || type[0] == '\0')
+ {
+ std::string const forcedType = _config->Find("Acquire::ForceHash", "");
+ if (forcedType.empty() == false)
+ return find(forcedType.c_str());
+ for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t)
+ for (std::vector<HashString>::const_iterator hs = list.begin(); hs != list.end(); ++hs)
+ if (strcasecmp(hs->HashType().c_str(), *t) == 0)
+ return &*hs;
+ return NULL;
+ }
+ for (std::vector<HashString>::const_iterator hs = list.begin(); hs != list.end(); ++hs)
+ if (strcasecmp(hs->HashType().c_str(), type) == 0)
+ return &*hs;
+ return NULL;
+}
+ /*}}}*/
+bool HashStringList::supported(char const * const type) /*{{{*/
+{
+ for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t)
+ if (strcasecmp(*t, type) == 0)
+ return true;
+ return false;
+}
+ /*}}}*/
+bool HashStringList::push_back(const HashString &hashString) /*{{{*/
+{
+ if (hashString.HashType().empty() == true ||
+ hashString.HashValue().empty() == true ||
+ supported(hashString.HashType().c_str()) == false)
+ return false;
+
+ // ensure that each type is added only once
+ HashString const * const hs = find(hashString.HashType().c_str());
+ if (hs != NULL)
+ return *hs == hashString;
-std::string HashString::toStr() const
+ list.push_back(hashString);
+ return true;
+}
+ /*}}}*/
+bool HashStringList::VerifyFile(std::string filename) const /*{{{*/
{
- return Type + std::string(":") + Hash;
+ if (list.empty() == true)
+ return false;
+ HashString const * const hs = find(NULL);
+ if (hs == NULL || hs->VerifyFile(filename) == false)
+ return false;
+ return true;
}
+ /*}}}*/
+bool HashStringList::operator==(HashStringList const &other) const /*{{{*/
+{
+ std::string const forcedType = _config->Find("Acquire::ForceHash", "");
+ if (forcedType.empty() == false)
+ {
+ HashString const * const hs = other.find(forcedType);
+ HashString const * const ohs = other.find(forcedType);
+ if (hs == NULL || ohs == NULL)
+ return false;
+ return hs == ohs;
+ }
+ short matches = 0;
+ for (const_iterator hs = begin(); hs != end(); ++hs)
+ {
+ HashString const * const ohs = other.find(hs->HashType());
+ if (ohs == NULL)
+ continue;
+ if (*hs != *ohs)
+ return false;
+ ++matches;
+ }
+ if (matches == 0)
+ return false;
+ return true;
+}
+bool HashStringList::operator!=(HashStringList const &other) const
+{
+ return !(*this == other);
+}
+ /*}}}*/
-// Hashes::AddFD - Add the contents of the FD /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5,
- bool const addSHA1, bool const addSHA256, bool const addSHA512)
+// Hashes::Add* - Add the contents of data or FD /*{{{*/
+bool Hashes::Add(const unsigned char * const Data,unsigned long long const Size, unsigned int const Hashes)
+{
+ bool Res = true;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ if ((Hashes & MD5SUM) == MD5SUM)
+ Res &= MD5.Add(Data, Size);
+ if ((Hashes & SHA1SUM) == SHA1SUM)
+ Res &= SHA1.Add(Data, Size);
+ if ((Hashes & SHA256SUM) == SHA256SUM)
+ Res &= SHA256.Add(Data, Size);
+ if ((Hashes & SHA512SUM) == SHA512SUM)
+ Res &= SHA512.Add(Data, Size);
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ return Res;
+}
+bool Hashes::AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes)
{
unsigned char Buf[64*64];
bool const ToEOF = (Size == UntilEOF);
@@ -144,19 +270,12 @@ bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5,
if (ToEOF && Res == 0) // EOF
break;
Size -= Res;
- if (addMD5 == true)
- MD5.Add(Buf,Res);
- if (addSHA1 == true)
- SHA1.Add(Buf,Res);
- if (addSHA256 == true)
- SHA256.Add(Buf,Res);
- if (addSHA512 == true)
- SHA512.Add(Buf,Res);
+ if (Add(Buf, Res, Hashes) == false)
+ return false;
}
return true;
}
-bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, bool const addMD5,
- bool const addSHA1, bool const addSHA256, bool const addSHA512)
+bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes)
{
unsigned char Buf[64*64];
bool const ToEOF = (Size == 0);
@@ -175,15 +294,35 @@ bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, bool const addMD5,
else if (a == 0) // EOF
break;
Size -= a;
- if (addMD5 == true)
- MD5.Add(Buf, a);
- if (addSHA1 == true)
- SHA1.Add(Buf, a);
- if (addSHA256 == true)
- SHA256.Add(Buf, a);
- if (addSHA512 == true)
- SHA512.Add(Buf, a);
+ if (Add(Buf, a, Hashes) == false)
+ return false;
}
return true;
}
/*}}}*/
+HashStringList Hashes::GetHashStringList()
+{
+ HashStringList hashes;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ hashes.push_back(HashString("MD5Sum", MD5.Result().Value()));
+ hashes.push_back(HashString("SHA1", SHA1.Result().Value()));
+ hashes.push_back(HashString("SHA256", SHA256.Result().Value()));
+ hashes.push_back(HashString("SHA512", SHA512.Result().Value()));
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ return hashes;
+}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+ #pragma GCC diagnostic ignored "-Wsuggest-attribute=const"
+#endif
+Hashes::Hashes() {}
+Hashes::~Hashes() {}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h
index 7a62f8a8f..caeba006d 100644
--- a/apt-pkg/contrib/hashes.h
+++ b/apt-pkg/contrib/hashes.h
@@ -17,6 +17,7 @@
#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
#include <apt-pkg/sha2.h>
+#include <apt-pkg/macros.h>
#include <cstring>
#include <string>
@@ -41,7 +42,7 @@ class HashString
protected:
std::string Type;
std::string Hash;
- static const char* _SupportedHashes[10];
+ static const char * _SupportedHashes[10];
// internal helper
std::string GetHashForFile(std::string filename) const;
@@ -52,7 +53,8 @@ class HashString
HashString();
// get hash type used
- std::string HashType() { return Type; };
+ std::string HashType() const { return Type; };
+ std::string HashValue() const { return Hash; };
// verify the given filename against the currently loaded hash
bool VerifyFile(std::string filename) const;
@@ -64,37 +66,160 @@ class HashString
// helper
std::string toStr() const; // convert to str as "type:hash"
bool empty() const;
+ bool operator==(HashString const &other) const;
+ bool operator!=(HashString const &other) const;
// return the list of hashes we support
static APT_CONST const char** SupportedHashes();
};
+class HashStringList
+{
+ public:
+ /** find best hash if no specific one is requested
+ *
+ * @param type of the checksum to return, can be \b NULL
+ * @return If type is \b NULL (or the empty string) it will
+ * return the 'best' hash; otherwise the hash which was
+ * specifically requested. If no hash is found \b NULL will be returned.
+ */
+ HashString const * find(char const * const type) const;
+ HashString const * find(std::string const &type) const { return find(type.c_str()); }
+ /** check if the given hash type is supported
+ *
+ * @param type to check
+ * @return true if supported, otherwise false
+ */
+ static APT_PURE bool supported(char const * const type);
+ /** add the given #HashString to the list
+ *
+ * @param hashString to add
+ * @return true if the hash is added because it is supported and
+ * not already a different hash of the same type included, otherwise false
+ */
+ bool push_back(const HashString &hashString);
+ /** @return size of the list of HashStrings */
+ size_t size() const { return list.size(); }
+
+ /** take the 'best' hash and verify file with it
+ *
+ * @param filename to verify
+ * @return true if the file matches the hashsum, otherwise false
+ */
+ bool VerifyFile(std::string filename) const;
+
+ /** is the list empty ?
+ *
+ * @return \b true if the list is empty, otherwise \b false
+ */
+ bool empty() const { return list.empty(); }
+
+ /** has the list at least one good entry
+ *
+ * similar to #empty, but handles forced hashes.
+ *
+ * @return if no hash is forced, same result as #empty,
+ * if one is forced \b true if this has is available, \b false otherwise
+ */
+ bool usable() const;
+
+ typedef std::vector<HashString>::const_iterator const_iterator;
+
+ /** iterator to the first element */
+ const_iterator begin() const { return list.begin(); }
+
+ /** iterator to the end element */
+ const_iterator end() const { return list.end(); }
+
+ /** start fresh with a clear list */
+ void clear() { list.clear(); }
+
+ /** compare two HashStringList for similarity.
+ *
+ * Two lists are similar if at least one hashtype is in both lists
+ * and the hashsum matches. All hashes are checked by default,
+ * if one doesn't match false is returned regardless of how many
+ * matched before. If a hash is forced, only this hash is compared,
+ * all others are ignored.
+ */
+ bool operator==(HashStringList const &other) const;
+ bool operator!=(HashStringList const &other) const;
+
+ HashStringList() {}
+
+ // simplifying API-compatibility constructors
+ HashStringList(std::string const &hash) {
+ if (hash.empty() == false)
+ list.push_back(HashString(hash));
+ }
+ HashStringList(char const * const hash) {
+ if (hash != NULL && hash[0] != '\0')
+ list.push_back(HashString(hash));
+ }
+
+ private:
+ std::vector<HashString> list;
+};
+
class Hashes
{
+ /** \brief dpointer placeholder */
+ void *d;
+
public:
+ /* those will disappear in the future as it is hard to add new ones this way.
+ * Use Add* to build the results and get them via GetHashStringList() instead */
+ APT_DEPRECATED MD5Summation MD5;
+ APT_DEPRECATED SHA1Summation SHA1;
+ APT_DEPRECATED SHA256Summation SHA256;
+ APT_DEPRECATED SHA512Summation SHA512;
- MD5Summation MD5;
- SHA1Summation SHA1;
- SHA256Summation SHA256;
- SHA512Summation SHA512;
-
static const int UntilEOF = 0;
- inline bool Add(const unsigned char *Data,unsigned long long Size)
+ bool Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes = ~0);
+ inline bool Add(const char * const Data)
+ {return Add((unsigned char const * const)Data,strlen(Data));};
+ inline bool Add(const unsigned char * const Beg,const unsigned char * const End)
+ {return Add(Beg,End-Beg);};
+
+ enum SupportedHashes { MD5SUM = (1 << 0), SHA1SUM = (1 << 1), SHA256SUM = (1 << 2),
+ SHA512SUM = (1 << 3) };
+ bool AddFD(int const Fd,unsigned long long Size = 0, unsigned int const Hashes = ~0);
+ bool AddFD(FileFd &Fd,unsigned long long Size = 0, unsigned int const Hashes = ~0);
+
+ HashStringList GetHashStringList();
+
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ Hashes();
+ virtual ~Hashes();
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+
+ private:
+ APT_HIDDEN APT_CONST inline unsigned int boolsToFlag(bool const addMD5, bool const addSHA1, bool const addSHA256, bool const addSHA512)
{
- return MD5.Add(Data,Size) && SHA1.Add(Data,Size) && SHA256.Add(Data,Size) && SHA512.Add(Data,Size);
+ unsigned int Hashes = ~0;
+ if (addMD5 == false) Hashes &= ~MD5SUM;
+ if (addSHA1 == false) Hashes &= ~SHA1SUM;
+ if (addSHA256 == false) Hashes &= ~SHA256SUM;
+ if (addSHA512 == false) Hashes &= ~SHA512SUM;
+ return Hashes;
+ }
+
+ public:
+ APT_DEPRECATED bool AddFD(int const Fd, unsigned long long Size, bool const addMD5,
+ bool const addSHA1, bool const addSHA256, bool const addSHA512) {
+ return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512));
+ };
+
+ APT_DEPRECATED bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5,
+ bool const addSHA1, bool const addSHA256, bool const addSHA512) {
+ return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512));
};
- inline bool Add(const char *Data) {return Add((unsigned char const *)Data,strlen(Data));};
- inline bool AddFD(int const Fd,unsigned long long Size = 0)
- { return AddFD(Fd, Size, true, true, true, true); };
- bool AddFD(int const Fd, unsigned long long Size, bool const addMD5,
- bool const addSHA1, bool const addSHA256, bool const addSHA512);
- inline bool AddFD(FileFd &Fd,unsigned long long Size = 0)
- { return AddFD(Fd, Size, true, true, true, true); };
- bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5,
- bool const addSHA1, bool const addSHA256, bool const addSHA512);
- inline bool Add(const unsigned char *Beg,const unsigned char *End)
- {return Add(Beg,End-Beg);};
};
#endif
diff --git a/apt-pkg/contrib/macros.h b/apt-pkg/contrib/macros.h
index 2d6448e5e..b268ce24c 100644
--- a/apt-pkg/contrib/macros.h
+++ b/apt-pkg/contrib/macros.h
@@ -138,7 +138,7 @@
// Non-ABI-Breaks should only increase RELEASE number.
// See also buildlib/libversion.mak
#define APT_PKG_MAJOR 4
-#define APT_PKG_MINOR 12
+#define APT_PKG_MINOR 13
#define APT_PKG_RELEASE 0
#endif
diff --git a/apt-pkg/contrib/netrc.cc b/apt-pkg/contrib/netrc.cc
index feaed67c8..1e3778f45 100644
--- a/apt-pkg/contrib/netrc.cc
+++ b/apt-pkg/contrib/netrc.cc
@@ -152,18 +152,6 @@ static int parsenetrc_string (char *host, std::string &login, std::string &passw
return retcode;
}
-// for some unknown reason this method is exported so keep a compatible interface for now …
-int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL)
-{
- std::string login_string, password_string;
- int const ret = parsenetrc_string(host, login_string, password_string, netrcfile);
- if (ret < 0)
- return ret;
- strncpy(login, login_string.c_str(), LOGINSIZE - 1);
- strncpy(password, password_string.c_str(), PASSWORDSIZE - 1);
- return ret;
-}
-
void maybe_add_auth (URI &Uri, string NetRCFile)
{
diff --git a/apt-pkg/contrib/netrc.h b/apt-pkg/contrib/netrc.h
index dbeb45386..b5b56f5d4 100644
--- a/apt-pkg/contrib/netrc.h
+++ b/apt-pkg/contrib/netrc.h
@@ -27,9 +27,5 @@
class URI;
-// FIXME: kill this export on the next ABI break - strongly doubt its in use anyway
-// outside of the apt itself, its really a internal interface
-APT_DEPRECATED int parsenetrc (char *host, char *login, char *password, char *filename);
-
void maybe_add_auth (URI &Uri, std::string NetRCFile);
#endif
diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc
index a0dd15cd8..3bdc551b4 100644
--- a/apt-pkg/deb/debindexfile.cc
+++ b/apt-pkg/deb/debindexfile.cc
@@ -30,6 +30,7 @@
#include <apt-pkg/pkgcachegen.h>
#include <apt-pkg/pkgrecords.h>
#include <apt-pkg/srcrecords.h>
+#include <apt-pkg/sptr.h>
#include <stdio.h>
#include <iostream>
@@ -667,6 +668,133 @@ APT_CONST bool debStatusIndex::Exists() const
}
/*}}}*/
+// debDebPkgFile - Single .deb file /*{{{*/
+// ---------------------------------------------------------------------
+debDebPkgFileIndex::debDebPkgFileIndex(std::string DebFile)
+ : pkgIndexFile(true), DebFile(DebFile)
+{
+ DebFileFullPath = flAbsPath(DebFile);
+}
+
+std::string debDebPkgFileIndex::ArchiveURI(std::string /*File*/) const
+{
+ return "file:" + DebFileFullPath;
+}
+
+bool debDebPkgFileIndex::Exists() const
+{
+ return FileExists(DebFile);
+}
+bool debDebPkgFileIndex::Merge(pkgCacheGenerator& Gen, OpProgress* Prog) const
+{
+ if(Prog)
+ Prog->SubProgress(0, "Reading deb file");
+
+ // get the control data out of the deb file vid dpkg -I
+ // ... can I haz libdpkg?
+ std::string dpkg = _config->Find("Dir::Bin::dpkg","dpkg");
+ const char *Args[5] = {dpkg.c_str(),
+ "-I",
+ DebFile.c_str(),
+ "control",
+ NULL};
+ FileFd PipeFd;
+ pid_t Child;
+ if(Popen(Args, PipeFd, Child, FileFd::ReadOnly) == false)
+ return _error->Error("Popen failed");
+ // FIXME: static buffer
+ char buf[8*1024];
+ unsigned long long n = 0;
+ if(PipeFd.Read(buf, sizeof(buf)-1, &n) == false)
+ return _error->Errno("read", "Failed to read dpkg pipe");
+ ExecWait(Child, "Popen");
+
+ // now write the control data to a tempfile
+ SPtr<FileFd> DebControl = GetTempFile("deb-file-" + DebFile);
+ if(DebControl == NULL)
+ return false;
+ DebControl->Write(buf, n);
+ // append size of the file
+ FileFd Fd(DebFile, FileFd::ReadOnly);
+ string Size;
+ strprintf(Size, "Size: %llu\n", Fd.Size());
+ DebControl->Write(Size.c_str(), Size.size());
+ // and rewind for the listparser
+ DebControl->Seek(0);
+
+ // and give it to the list parser
+ debDebFileParser Parser(DebControl, DebFile);
+ if(Gen.SelectFile(DebFile, "local", *this) == false)
+ return _error->Error("Problem with SelectFile %s", DebFile.c_str());
+
+ pkgCache::PkgFileIterator File = Gen.GetCurFile();
+ File->Size = DebControl->Size();
+ File->mtime = DebControl->ModificationTime();
+
+ if (Gen.MergeList(Parser) == false)
+ return _error->Error("Problem with MergeLister for %s", DebFile.c_str());
+
+ return true;
+}
+pkgCache::PkgFileIterator debDebPkgFileIndex::FindInCache(pkgCache &Cache) const
+{
+ // FIXME: we could simply always return pkgCache::PkgFileIterator(Cache);
+ // to indicate its never in the cache which will force a Merge()
+ pkgCache::PkgFileIterator File = Cache.FileBegin();
+ for (; File.end() == false; ++File)
+ {
+ if (File.FileName() == NULL || DebFile != File.FileName())
+ continue;
+
+ return File;
+ }
+
+ return File;
+}
+unsigned long debDebPkgFileIndex::Size() const
+{
+ struct stat buf;
+ if(stat(DebFile.c_str(), &buf) != 0)
+ return 0;
+ return buf.st_size;
+}
+ /*}}}*/
+
+// debDscFileIndex stuff
+debDscFileIndex::debDscFileIndex(std::string &DscFile)
+ : pkgIndexFile(true), DscFile(DscFile)
+{
+}
+
+bool debDscFileIndex::Exists() const
+{
+ return FileExists(DscFile);
+}
+
+unsigned long debDscFileIndex::Size() const
+{
+ struct stat buf;
+ if(stat(DscFile.c_str(), &buf) == 0)
+ return buf.st_size;
+ return 0;
+}
+
+// DscFileIndex::CreateSrcParser - Get a parser for the .dsc file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgSrcRecords::Parser *debDscFileIndex::CreateSrcParser() const
+{
+ if (!FileExists(DscFile))
+ return NULL;
+
+ return new debDscRecordParser(DscFile,this);
+}
+ /*}}}*/
+
+
+
+
+// ---------------------------------------------------------------------
// Index File types for Debian /*{{{*/
class debIFTypeSrc : public pkgIndexFile::Type
{
@@ -699,10 +827,42 @@ class debIFTypeStatus : public pkgIndexFile::Type
};
debIFTypeStatus() {Label = "Debian dpkg status file";};
};
+class debIFTypeDebPkgFile : public pkgIndexFile::Type
+{
+ public:
+ virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator File) const
+ {
+ return new debDebFileRecordParser(File.FileName(),*File.Cache());
+ };
+ debIFTypeDebPkgFile() {Label = "deb Package file";};
+};
+class debIFTypeDscFile : public pkgIndexFile::Type
+{
+ public:
+ virtual pkgSrcRecords::Parser *CreateSrcPkgParser(std::string DscFile) const
+ {
+ return new debDscRecordParser(DscFile, NULL);
+ };
+ debIFTypeDscFile() {Label = "dsc File Source Index";};
+};
+class debIFTypeDebianSourceDir : public pkgIndexFile::Type
+{
+ public:
+ virtual pkgSrcRecords::Parser *CreateSrcPkgParser(std::string SourceDir) const
+ {
+ return new debDscRecordParser(SourceDir + string("/debian/control"), NULL);
+ };
+ debIFTypeDebianSourceDir() {Label = "debian/control File Source Index";};
+};
+
static debIFTypeSrc _apt_Src;
static debIFTypePkg _apt_Pkg;
static debIFTypeTrans _apt_Trans;
static debIFTypeStatus _apt_Status;
+static debIFTypeDebPkgFile _apt_DebPkgFile;
+// file based pseudo indexes
+static debIFTypeDscFile _apt_DscFile;
+static debIFTypeDebianSourceDir _apt_DebianSourceDir;
const pkgIndexFile::Type *debSourcesIndex::GetType() const
{
@@ -720,5 +880,16 @@ const pkgIndexFile::Type *debStatusIndex::GetType() const
{
return &_apt_Status;
}
-
+const pkgIndexFile::Type *debDebPkgFileIndex::GetType() const
+{
+ return &_apt_DebPkgFile;
+}
+const pkgIndexFile::Type *debDscFileIndex::GetType() const
+{
+ return &_apt_DscFile;
+}
+const pkgIndexFile::Type *debDebianSourceDirIndex::GetType() const
+{
+ return &_apt_DebianSourceDir;
+}
/*}}}*/
diff --git a/apt-pkg/deb/debindexfile.h b/apt-pkg/deb/debindexfile.h
index 017c69a0a..18322dc1b 100644
--- a/apt-pkg/deb/debindexfile.h
+++ b/apt-pkg/deb/debindexfile.h
@@ -164,4 +164,57 @@ class debSourcesIndex : public pkgIndexFile
virtual ~debSourcesIndex() {};
};
+class debDebPkgFileIndex : public pkgIndexFile
+{
+ private:
+ void *d;
+ std::string DebFile;
+ std::string DebFileFullPath;
+
+ public:
+ virtual const Type *GetType() const APT_CONST;
+
+ virtual std::string Describe(bool /*Short*/) const {
+ return DebFile;
+ }
+
+ // Interface for the Cache Generator
+ virtual bool Exists() const;
+ virtual bool HasPackages() const {
+ return true;
+ };
+ virtual unsigned long Size() const;
+ virtual bool Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const;
+ virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
+
+ // Interface for acquire
+ virtual std::string ArchiveURI(std::string /*File*/) const;
+
+ debDebPkgFileIndex(std::string DebFile);
+ virtual ~debDebPkgFileIndex() {};
+};
+
+class debDscFileIndex : public pkgIndexFile
+{
+ private:
+ std::string DscFile;
+ public:
+ virtual const Type *GetType() const APT_CONST;
+ virtual pkgSrcRecords::Parser *CreateSrcParser() const;
+ virtual bool Exists() const;
+ virtual bool HasPackages() const {return false;};
+ virtual unsigned long Size() const;
+ virtual std::string Describe(bool /*Short*/) const {
+ return DscFile;
+ };
+
+ debDscFileIndex(std::string &DscFile);
+ virtual ~debDscFileIndex() {};
+};
+
+class debDebianSourceDirIndex : public debDscFileIndex
+{
+ virtual const Type *GetType() const APT_CONST;
+};
+
#endif
diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc
index a1bcfb710..40d332196 100644
--- a/apt-pkg/deb/deblistparser.cc
+++ b/apt-pkg/deb/deblistparser.cc
@@ -194,35 +194,31 @@ bool debListParser::NewVersion(pkgCache::VerIterator &Ver)
/* This is to return the string describing the package in debian
form. If this returns the blank string then the entry is assumed to
only describe package properties */
-string debListParser::Description()
+string debListParser::Description(std::string const &lang)
{
- string const lang = DescriptionLanguage();
if (lang.empty())
return Section.FindS("Description");
else
return Section.FindS(string("Description-").append(lang).c_str());
}
- /*}}}*/
-// ListParser::DescriptionLanguage - Return the description lang string /*{{{*/
-// ---------------------------------------------------------------------
-/* This is to return the string describing the language of
- description. If this returns the blank string then the entry is
- assumed to describe original description. */
-string debListParser::DescriptionLanguage()
+ /*}}}*/
+// ListParser::AvailableDescriptionLanguages /*{{{*/
+std::vector<std::string> debListParser::AvailableDescriptionLanguages()
{
- if (Section.FindS("Description").empty() == false)
- return "";
-
- std::vector<string> const lang = APT::Configuration::getLanguages(true);
- for (std::vector<string>::const_iterator l = lang.begin();
- l != lang.end(); ++l)
- if (Section.FindS(string("Description-").append(*l).c_str()).empty() == false)
- return *l;
-
- return "";
+ std::vector<std::string> const understood = APT::Configuration::getLanguages();
+ std::vector<std::string> avail;
+ if (Section.Exists("Description") == true)
+ avail.push_back("");
+ for (std::vector<std::string>::const_iterator lang = understood.begin(); lang != understood.end(); ++lang)
+ {
+ std::string const tagname = "Description-" + *lang;
+ if (Section.Exists(tagname.c_str()) == true)
+ avail.push_back(*lang);
+ }
+ return avail;
}
- /*}}}*/
-// ListParser::Description - Return the description_md5 MD5SumValue /*{{{*/
+ /*}}}*/
+// ListParser::Description_md5 - Return the description_md5 MD5SumValue /*{{{*/
// ---------------------------------------------------------------------
/* This is to return the md5 string to allow the check if it is the right
description. If no Description-md5 is found in the section it will be
@@ -233,7 +229,7 @@ MD5SumValue debListParser::Description_md5()
string const value = Section.FindS("Description-md5");
if (value.empty() == true)
{
- std::string const desc = Description() + "\n";
+ std::string const desc = Description("") + "\n";
if (desc == "\n")
return MD5SumValue();
@@ -959,3 +955,23 @@ bool debListParser::SameVersion(unsigned short const Hash, /*{{{*/
}
/*}}}*/
#endif
+
+
+debDebFileParser::debDebFileParser(FileFd *File, std::string const &DebFile)
+ : debListParser(File, ""), DebFile(DebFile)
+{
+}
+
+bool debDebFileParser::UsePackage(pkgCache::PkgIterator &Pkg,
+ pkgCache::VerIterator &Ver)
+{
+ bool res = debListParser::UsePackage(Pkg, Ver);
+ // we use the full file path as a provides so that the file is found
+ // by its name
+ if(NewProvidesAllArch(Ver, DebFile, Ver.VerStr()) == false)
+ return false;
+ return res;
+}
+
+
+
diff --git a/apt-pkg/deb/deblistparser.h b/apt-pkg/deb/deblistparser.h
index 3b6963211..56a83b36e 100644
--- a/apt-pkg/deb/deblistparser.h
+++ b/apt-pkg/deb/deblistparser.h
@@ -56,7 +56,8 @@ class debListParser : public pkgCacheGenerator::ListParser
bool ParseProvides(pkgCache::VerIterator &Ver);
bool NewProvidesAllArch(pkgCache::VerIterator &Ver, std::string const &Package, std::string const &Version);
static bool GrabWord(std::string Word,WordList *List,unsigned char &Out);
-
+ APT_HIDDEN unsigned char ParseMultiArch(bool const showErrors);
+
public:
static unsigned char GetPrio(std::string Str);
@@ -67,8 +68,8 @@ class debListParser : public pkgCacheGenerator::ListParser
virtual bool ArchitectureAll();
virtual std::string Version();
virtual bool NewVersion(pkgCache::VerIterator &Ver);
- virtual std::string Description();
- virtual std::string DescriptionLanguage();
+ virtual std::string Description(std::string const &lang);
+ virtual std::vector<std::string> AvailableDescriptionLanguages();
virtual MD5SumValue Description_md5();
virtual unsigned short VersionHash();
#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
@@ -101,9 +102,17 @@ class debListParser : public pkgCacheGenerator::ListParser
debListParser(FileFd *File, std::string const &Arch = "");
virtual ~debListParser() {};
+};
- private:
- APT_HIDDEN unsigned char ParseMultiArch(bool const showErrors);
+class debDebFileParser : public debListParser
+{
+ private:
+ std::string DebFile;
+
+ public:
+ debDebFileParser(FileFd *File, std::string const &DebFile);
+ virtual bool UsePackage(pkgCache::PkgIterator &Pkg,
+ pkgCache::VerIterator &Ver);
};
class debTranslationsParser : public debListParser
diff --git a/apt-pkg/deb/debmetaindex.cc b/apt-pkg/deb/debmetaindex.cc
index 6fd12add8..73010e867 100644
--- a/apt-pkg/deb/debmetaindex.cc
+++ b/apt-pkg/deb/debmetaindex.cc
@@ -186,8 +186,8 @@ debReleaseIndex::~debReleaseIndex() {
delete *S;
}
-vector <struct IndexTarget *>* debReleaseIndex::ComputeIndexTargets() const {
- vector <struct IndexTarget *>* IndexTargets = new vector <IndexTarget *>;
+vector <IndexTarget *>* debReleaseIndex::ComputeIndexTargets() const {
+ vector <IndexTarget *>* IndexTargets = new vector <IndexTarget *>;
map<string, vector<debSectionEntry const*> >::const_iterator const src = ArchEntries.find("source");
if (src != ArchEntries.end()) {
@@ -255,10 +255,10 @@ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool const &GetAll) const
// special case for --print-uris
if (GetAll) {
- vector <struct IndexTarget *> *targets = ComputeIndexTargets();
- for (vector <struct IndexTarget*>::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) {
+ vector <IndexTarget *> *targets = ComputeIndexTargets();
+ for (vector <IndexTarget*>::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) {
new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, HashString());
+ (*Target)->ShortDesc, HashStringList());
}
delete targets;
@@ -471,6 +471,15 @@ class debSLTypeDebian : public pkgSourceList::Type
}
};
+debDebFileMetaIndex::debDebFileMetaIndex(std::string const &DebFile)
+ : metaIndex(DebFile, "local-uri", "deb-dist"), DebFile(DebFile)
+{
+ DebIndex = new debDebPkgFileIndex(DebFile);
+ Indexes = new vector<pkgIndexFile *>();
+ Indexes->push_back(DebIndex);
+}
+
+
class debSLTypeDeb : public debSLTypeDebian
{
public:
@@ -507,5 +516,25 @@ class debSLTypeDebSrc : public debSLTypeDebian
}
};
+class debSLTypeDebFile : public pkgSourceList::Type
+{
+ public:
+
+ bool CreateItem(vector<metaIndex *> &List, string const &URI,
+ string const &/*Dist*/, string const &/*Section*/,
+ std::map<string, string> const &/*Options*/) const
+ {
+ metaIndex *mi = new debDebFileMetaIndex(URI);
+ List.push_back(mi);
+ return true;
+ }
+
+ debSLTypeDebFile()
+ {
+ Name = "deb-file";
+ Label = "Debian Deb File";
+ }
+};
debSLTypeDeb _apt_DebType;
debSLTypeDebSrc _apt_DebSrcType;
+debSLTypeDebFile _apt_DebFileType;
diff --git a/apt-pkg/deb/debmetaindex.h b/apt-pkg/deb/debmetaindex.h
index 2286fa8b2..7091c198f 100644
--- a/apt-pkg/deb/debmetaindex.h
+++ b/apt-pkg/deb/debmetaindex.h
@@ -18,6 +18,8 @@
class pkgAcquire;
class pkgIndexFile;
+class debDebPkgFileIndex;
+class IndexTarget;
class debReleaseIndex : public metaIndex {
public:
@@ -44,7 +46,7 @@ class debReleaseIndex : public metaIndex {
virtual std::string ArchiveURI(std::string const &File) const {return URI + File;};
virtual bool GetIndexes(pkgAcquire *Owner, bool const &GetAll=false) const;
- std::vector <struct IndexTarget *>* ComputeIndexTargets() const;
+ std::vector <IndexTarget *>* ComputeIndexTargets() const;
std::string Info(const char *Type, std::string const &Section, std::string const &Arch="") const;
std::string MetaIndexInfo(const char *Type) const;
@@ -71,4 +73,27 @@ class debReleaseIndex : public metaIndex {
void PushSectionEntry(const debSectionEntry *Entry);
};
+class debDebFileMetaIndex : public metaIndex
+{
+ private:
+ std::string DebFile;
+ debDebPkgFileIndex *DebIndex;
+ public:
+ virtual std::string ArchiveURI(std::string const& /*File*/) const {
+ return DebFile;
+ }
+ virtual bool GetIndexes(pkgAcquire* /*Owner*/, const bool& /*GetAll=false*/) const {
+ return true;
+ }
+ virtual std::vector<pkgIndexFile *> *GetIndexFiles() {
+ return Indexes;
+ }
+ virtual bool IsTrusted() const {
+ return true;
+ }
+ debDebFileMetaIndex(std::string const &DebFile);
+ virtual ~debDebFileMetaIndex() {};
+
+};
+
#endif
diff --git a/apt-pkg/deb/debrecords.cc b/apt-pkg/deb/debrecords.cc
index 6063db5a8..d2c04d8b2 100644
--- a/apt-pkg/deb/debrecords.cc
+++ b/apt-pkg/deb/debrecords.cc
@@ -73,36 +73,17 @@ string debRecordParser::Homepage()
return Section.FindS("Homepage");
}
/*}}}*/
-// RecordParser::MD5Hash - Return the archive hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string debRecordParser::MD5Hash()
-{
- return Section.FindS("MD5Sum");
-}
- /*}}}*/
-// RecordParser::SHA1Hash - Return the archive hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string debRecordParser::SHA1Hash()
-{
- return Section.FindS("SHA1");
-}
- /*}}}*/
-// RecordParser::SHA256Hash - Return the archive hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string debRecordParser::SHA256Hash()
-{
- return Section.FindS("SHA256");
-}
- /*}}}*/
-// RecordParser::SHA512Hash - Return the archive hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string debRecordParser::SHA512Hash()
+// RecordParser::Hashes - return the available archive hashes /*{{{*/
+HashStringList debRecordParser::Hashes() const
{
- return Section.FindS("SHA512");
+ HashStringList hashes;
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
+ {
+ std::string const hash = Section.FindS(*type);
+ if (hash.empty() == false)
+ hashes.push_back(HashString(*type, hash));
+ }
+ return hashes;
}
/*}}}*/
// RecordParser::Maintainer - Return the maintainer email /*{{{*/
@@ -125,10 +106,12 @@ string debRecordParser::RecordField(const char *fieldName)
// RecordParser::ShortDesc - Return a 1 line description /*{{{*/
// ---------------------------------------------------------------------
/* */
-string debRecordParser::ShortDesc()
+string debRecordParser::ShortDesc(std::string const &lang)
{
- string Res = LongDesc();
- string::size_type Pos = Res.find('\n');
+ string const Res = LongDesc(lang);
+ if (Res.empty() == true)
+ return "";
+ string::size_type const Pos = Res.find('\n');
if (Pos == string::npos)
return Res;
return string(Res,0,Pos);
@@ -137,26 +120,44 @@ string debRecordParser::ShortDesc()
// RecordParser::LongDesc - Return a longer description /*{{{*/
// ---------------------------------------------------------------------
/* */
-string debRecordParser::LongDesc()
-{
- string orig, dest;
+string debRecordParser::LongDesc(std::string const &lang)
+{
+ string orig;
+ if (lang.empty() == true)
+ {
+ std::vector<string> const lang = APT::Configuration::getLanguages();
+ for (std::vector<string>::const_iterator l = lang.begin();
+ l != lang.end(); ++l)
+ {
+ std::string const tagname = "Description-" + *l;
+ orig = Section.FindS(tagname.c_str());
+ if (orig.empty() == false)
+ break;
+ else if (*l == "en")
+ {
+ orig = Section.FindS("Description");
+ if (orig.empty() == false)
+ break;
+ }
+ }
+ if (orig.empty() == true)
+ orig = Section.FindS("Description");
+ }
+ else
+ {
+ std::string const tagname = "Description-" + lang;
+ orig = Section.FindS(tagname.c_str());
+ if (orig.empty() == true && lang == "en")
+ orig = Section.FindS("Description");
+ }
- if (!Section.FindS("Description").empty())
- orig = Section.FindS("Description").c_str();
- else
- {
- std::vector<string> const lang = APT::Configuration::getLanguages();
- for (std::vector<string>::const_iterator l = lang.begin();
- orig.empty() && l != lang.end(); ++l)
- orig = Section.FindS(string("Description-").append(*l).c_str());
- }
+ char const * const codeset = nl_langinfo(CODESET);
+ if (strcmp(codeset,"UTF-8") != 0) {
+ string dest;
+ UTF8ToCodeset(codeset, orig, &dest);
+ return dest;
+ }
- char const * const codeset = nl_langinfo(CODESET);
- if (strcmp(codeset,"UTF-8") != 0) {
- UTF8ToCodeset(codeset, orig, &dest);
- orig = dest;
- }
-
return orig;
}
/*}}}*/
diff --git a/apt-pkg/deb/debrecords.h b/apt-pkg/deb/debrecords.h
index bdac6c90b..2bd3f3c8f 100644
--- a/apt-pkg/deb/debrecords.h
+++ b/apt-pkg/deb/debrecords.h
@@ -29,31 +29,28 @@ class debRecordParser : public pkgRecords::Parser
{
/** \brief dpointer placeholder (for later in case we need it) */
void *d;
-
+
+ protected:
FileFd File;
pkgTagFile Tags;
pkgTagSection Section;
- protected:
-
virtual bool Jump(pkgCache::VerFileIterator const &Ver);
virtual bool Jump(pkgCache::DescFileIterator const &Desc);
- public:
+ public:
// These refer to the archive file for the Version
virtual std::string FileName();
- virtual std::string MD5Hash();
- virtual std::string SHA1Hash();
- virtual std::string SHA256Hash();
- virtual std::string SHA512Hash();
virtual std::string SourcePkg();
virtual std::string SourceVer();
-
+
+ virtual HashStringList Hashes() const;
+
// These are some general stats about the package
virtual std::string Maintainer();
- virtual std::string ShortDesc();
- virtual std::string LongDesc();
+ virtual std::string ShortDesc(std::string const &lang);
+ virtual std::string LongDesc(std::string const &lang);
virtual std::string Name();
virtual std::string Homepage();
@@ -66,4 +63,15 @@ class debRecordParser : public pkgRecords::Parser
virtual ~debRecordParser() {};
};
+// custom record parser that reads deb files directly
+class debDebFileRecordParser : public debRecordParser
+{
+ public:
+ virtual std::string FileName() {
+ return File.Name();
+ }
+ debDebFileRecordParser(std::string FileName,pkgCache &Cache)
+ : debRecordParser(FileName, Cache) {};
+};
+
#endif
diff --git a/apt-pkg/deb/debsrcrecords.cc b/apt-pkg/deb/debsrcrecords.cc
index a444cbe4d..97f43aca2 100644
--- a/apt-pkg/deb/debsrcrecords.cc
+++ b/apt-pkg/deb/debsrcrecords.cc
@@ -18,6 +18,8 @@
#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/srcrecords.h>
#include <apt-pkg/tagfile.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/gpgv.h>
#include <ctype.h>
#include <stdlib.h>
@@ -55,12 +57,13 @@ const char **debSrcRecordParser::Binaries()
char* binStartNext = strchrnul(bin, ',');
char* binEnd = binStartNext - 1;
for (; isspace(*binEnd) != 0; --binEnd)
- binEnd = '\0';
+ binEnd = 0;
StaticBinList.push_back(bin);
if (*binStartNext != ',')
break;
*binStartNext = '\0';
- for (bin = binStartNext + 1; isspace(*bin) != 0; ++bin);
+ for (bin = binStartNext + 1; isspace(*bin) != 0; ++bin)
+ ;
} while (*bin != '\0');
StaticBinList.push_back(NULL);
@@ -121,10 +124,6 @@ bool debSrcRecordParser::BuildDepends(std::vector<pkgSrcRecords::Parser::BuildDe
bool debSrcRecordParser::Files(std::vector<pkgSrcRecords::File> &List)
{
List.erase(List.begin(),List.end());
-
- string Files = Sect.FindS("Files");
- if (Files.empty() == true)
- return false;
// Stash the / terminated directory prefix
string Base = Sect.FindS("Directory");
@@ -133,51 +132,105 @@ bool debSrcRecordParser::Files(std::vector<pkgSrcRecords::File> &List)
std::vector<std::string> const compExts = APT::Configuration::getCompressorExtensions();
- // Iterate over the entire list grabbing each triplet
- const char *C = Files.c_str();
- while (*C != 0)
- {
- pkgSrcRecords::File F;
- string Size;
-
- // Parse each of the elements
- if (ParseQuoteWord(C,F.MD5Hash) == false ||
- ParseQuoteWord(C,Size) == false ||
- ParseQuoteWord(C,F.Path) == false)
- return _error->Error("Error parsing file record");
-
- // Parse the size and append the directory
- F.Size = atoi(Size.c_str());
- F.Path = Base + F.Path;
-
- // Try to guess what sort of file it is we are getting.
- string::size_type Pos = F.Path.length()-1;
- while (1)
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
+ {
+ // derive field from checksum type
+ std::string checksumField("Checksums-");
+ if (strcmp(*type, "MD5Sum") == 0)
+ checksumField = "Files"; // historic name for MD5 checksums
+ else
+ checksumField.append(*type);
+
+ string const Files = Sect.FindS(checksumField.c_str());
+ if (Files.empty() == true)
+ continue;
+
+ // Iterate over the entire list grabbing each triplet
+ const char *C = Files.c_str();
+ while (*C != 0)
{
- string::size_type Tmp = F.Path.rfind('.',Pos);
- if (Tmp == string::npos)
- break;
- if (F.Type == "tar") {
- // source v3 has extension 'debian.tar.*' instead of 'diff.*'
- if (string(F.Path, Tmp+1, Pos-Tmp) == "debian")
- F.Type = "diff";
- break;
- }
- F.Type = string(F.Path,Tmp+1,Pos-Tmp);
-
- if (std::find(compExts.begin(), compExts.end(), std::string(".").append(F.Type)) != compExts.end() ||
- F.Type == "tar")
+ string hash, size, path;
+
+ // Parse each of the elements
+ if (ParseQuoteWord(C, hash) == false ||
+ ParseQuoteWord(C, size) == false ||
+ ParseQuoteWord(C, path) == false)
+ return _error->Error("Error parsing file record in %s of source package %s", checksumField.c_str(), Package().c_str());
+
+ HashString const hashString(*type, hash);
+ if (Base.empty() == false)
+ path = Base + path;
+
+ // look if we have a record for this file already
+ std::vector<pkgSrcRecords::File>::iterator file = List.begin();
+ for (; file != List.end(); ++file)
+ if (file->Path == path)
+ break;
+
+ // we have it already, store the new hash and be done
+ if (file != List.end())
{
- Pos = Tmp-1;
+#if __GNUC__ >= 4
+ // set for compatibility only, so warn users not us
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ if (checksumField == "Files")
+ file->MD5Hash = hash;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ // an error here indicates that we have two different hashes for the same file
+ if (file->Hashes.push_back(hashString) == false)
+ return _error->Error("Error parsing checksum in %s of source package %s", checksumField.c_str(), Package().c_str());
continue;
}
-
- break;
+
+ // we haven't seen this file yet
+ pkgSrcRecords::File F;
+ F.Path = path;
+ F.Size = strtoull(size.c_str(), NULL, 10);
+ F.Hashes.push_back(hashString);
+
+#if __GNUC__ >= 4
+ // set for compatibility only, so warn users not us
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ if (checksumField == "Files")
+ F.MD5Hash = hash;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+
+ // Try to guess what sort of file it is we are getting.
+ string::size_type Pos = F.Path.length()-1;
+ while (1)
+ {
+ string::size_type Tmp = F.Path.rfind('.',Pos);
+ if (Tmp == string::npos)
+ break;
+ if (F.Type == "tar") {
+ // source v3 has extension 'debian.tar.*' instead of 'diff.*'
+ if (string(F.Path, Tmp+1, Pos-Tmp) == "debian")
+ F.Type = "diff";
+ break;
+ }
+ F.Type = string(F.Path,Tmp+1,Pos-Tmp);
+
+ if (std::find(compExts.begin(), compExts.end(), std::string(".").append(F.Type)) != compExts.end() ||
+ F.Type == "tar")
+ {
+ Pos = Tmp-1;
+ continue;
+ }
+
+ break;
+ }
+ List.push_back(F);
}
-
- List.push_back(F);
}
-
+
return true;
}
/*}}}*/
@@ -190,3 +243,21 @@ debSrcRecordParser::~debSrcRecordParser()
free(Buffer);
}
/*}}}*/
+
+
+debDscRecordParser::debDscRecordParser(std::string const &DscFile, pkgIndexFile const *Index)
+ : debSrcRecordParser(DscFile, Index)
+{
+ // support clear signed files
+ if (OpenMaybeClearSignedFile(DscFile, Fd) == false)
+ {
+ _error->Error("Failed to open %s", DscFile.c_str());
+ return;
+ }
+
+ // re-init to ensure the updated Fd is used
+ Tags.Init(&Fd);
+ // read the first (and only) record
+ Step();
+
+}
diff --git a/apt-pkg/deb/debsrcrecords.h b/apt-pkg/deb/debsrcrecords.h
index b65d1480b..a0a151875 100644
--- a/apt-pkg/deb/debsrcrecords.h
+++ b/apt-pkg/deb/debsrcrecords.h
@@ -26,6 +26,7 @@ class debSrcRecordParser : public pkgSrcRecords::Parser
/** \brief dpointer placeholder (for later in case we need it) */
void *d;
+ protected:
FileFd Fd;
pkgTagFile Tags;
pkgTagSection Sect;
@@ -60,4 +61,10 @@ class debSrcRecordParser : public pkgSrcRecords::Parser
virtual ~debSrcRecordParser();
};
+class debDscRecordParser : public debSrcRecordParser
+{
+ public:
+ debDscRecordParser(std::string const &DscFile, pkgIndexFile const *Index);
+};
+
#endif
diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc
index 613a4de9f..76b708359 100644
--- a/apt-pkg/deb/dpkgpm.cc
+++ b/apt-pkg/deb/dpkgpm.cc
@@ -517,7 +517,7 @@ bool pkgDPkgPM::RunScriptsWithPkgs(const char *Cnf)
void pkgDPkgPM::DoStdin(int master)
{
unsigned char input_buf[256] = {0,};
- ssize_t len = read(0, input_buf, sizeof(input_buf));
+ ssize_t len = read(STDIN_FILENO, input_buf, sizeof(input_buf));
if (len)
FileFd::Write(master, input_buf, len);
else
diff --git a/apt-pkg/indexcopy.cc b/apt-pkg/indexcopy.cc
index 854ba1bd7..bb3b5d340 100644
--- a/apt-pkg/indexcopy.cc
+++ b/apt-pkg/indexcopy.cc
@@ -516,7 +516,7 @@ bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex)
return false;
}
- if (!Record->Hash.VerifyFile(prefix+file))
+ if (!Record->Hashes.VerifyFile(prefix+file))
{
_error->Warning(_("Hash mismatch for: %s"),file.c_str());
return false;
@@ -524,8 +524,10 @@ bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex)
if(Debug == true)
{
- cout << "File: " << prefix+file << endl;
- cout << "Expected Hash " << Record->Hash.toStr() << endl;
+ cout << "File: " << prefix+file << endl
+ << "Expected Hash " << endl;
+ for (HashStringList::const_iterator hs = Record->Hashes.begin(); hs != Record->Hashes.end(); ++hs)
+ std::cout << "\t- " << hs->toStr() << std::endl;
}
return true;
diff --git a/apt-pkg/indexfile.h b/apt-pkg/indexfile.h
index b5c9ac77e..817165f08 100644
--- a/apt-pkg/indexfile.h
+++ b/apt-pkg/indexfile.h
@@ -59,6 +59,7 @@ class pkgIndexFile
const char *Label;
virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator /*File*/) const {return 0;};
+ virtual pkgSrcRecords::Parser *CreateSrcPkgParser(std::string /*File*/) const {return 0;};
Type();
virtual ~Type() {};
};
diff --git a/apt-pkg/indexrecords.cc b/apt-pkg/indexrecords.cc
index 5353d1098..5b53e796d 100644
--- a/apt-pkg/indexrecords.cc
+++ b/apt-pkg/indexrecords.cc
@@ -37,6 +37,11 @@ APT_PURE string indexRecords::GetSuite() const
return this->Suite;
}
+APT_PURE bool indexRecords::GetSupportsAcquireByHash() const
+{
+ return this->SupportsAcquireByHash;
+}
+
APT_PURE bool indexRecords::CheckDist(const string MaybeDist) const
{
return (this->Dist == MaybeDist
@@ -53,7 +58,7 @@ APT_PURE time_t indexRecords::GetValidUntil() const
return this->ValidUntil;
}
-APT_PURE const indexRecords::checkSum *indexRecords::Lookup(const string MetaKey)
+APT_PURE indexRecords::checkSum *indexRecords::Lookup(const string MetaKey)
{
std::map<std::string, indexRecords::checkSum* >::const_iterator sum = Entries.find(MetaKey);
if (sum == Entries.end())
@@ -86,12 +91,14 @@ bool indexRecords::Load(const string Filename) /*{{{*/
strprintf(ErrorText, _("No sections in Release file %s"), Filename.c_str());
return false;
}
+ // FIXME: find better tag name
+ SupportsAcquireByHash = Section.FindB("Acquire-By-Hash", false);
Suite = Section.FindS("Suite");
Dist = Section.FindS("Codename");
- int i;
- for (i=0;HashString::SupportedHashes()[i] != NULL; i++)
+ bool FoundHashSum = false;
+ for (int i=0;HashString::SupportedHashes()[i] != NULL; i++)
{
if (!Section.Find(HashString::SupportedHashes()[i], Start, End))
continue;
@@ -103,16 +110,20 @@ bool indexRecords::Load(const string Filename) /*{{{*/
{
if (!parseSumData(Start, End, Name, Hash, Size))
return false;
- indexRecords::checkSum *Sum = new indexRecords::checkSum;
- Sum->MetaKeyFilename = Name;
- Sum->Hash = HashString(HashString::SupportedHashes()[i],Hash);
- Sum->Size = Size;
- Entries[Name] = Sum;
+
+ if (Entries.find(Name) == Entries.end())
+ {
+ indexRecords::checkSum *Sum = new indexRecords::checkSum;
+ Sum->MetaKeyFilename = Name;
+ Sum->Size = Size;
+ Entries[Name] = Sum;
+ }
+ Entries[Name]->Hashes.push_back(HashString(HashString::SupportedHashes()[i],Hash));
+ FoundHashSum = true;
}
- break;
}
- if(HashString::SupportedHashes()[i] == NULL)
+ if(FoundHashSum == false)
{
strprintf(ErrorText, _("No Hash entry in Release file %s"), Filename.c_str());
return false;
@@ -239,6 +250,6 @@ indexRecords::indexRecords()
}
indexRecords::indexRecords(const string ExpectedDist) :
- ExpectedDist(ExpectedDist), ValidUntil(0)
+ ExpectedDist(ExpectedDist), ValidUntil(0), SupportsAcquireByHash(false)
{
}
diff --git a/apt-pkg/indexrecords.h b/apt-pkg/indexrecords.h
index e31f889ad..bb0fd5564 100644
--- a/apt-pkg/indexrecords.h
+++ b/apt-pkg/indexrecords.h
@@ -26,12 +26,15 @@ class indexRecords
public:
struct checkSum;
std::string ErrorText;
+ // dpointer (for later9
+ void * d;
protected:
std::string Dist;
std::string Suite;
std::string ExpectedDist;
time_t ValidUntil;
+ bool SupportsAcquireByHash;
std::map<std::string,checkSum *> Entries;
@@ -41,7 +44,7 @@ class indexRecords
indexRecords(const std::string ExpectedDist);
// Lookup function
- virtual const checkSum *Lookup(const std::string MetaKey);
+ virtual checkSum *Lookup(const std::string MetaKey);
/** \brief tests if a checksum for this file is available */
bool Exists(std::string const &MetaKey) const;
std::vector<std::string> MetaKeys();
@@ -49,17 +52,28 @@ class indexRecords
virtual bool Load(std::string Filename);
std::string GetDist() const;
std::string GetSuite() const;
+ bool GetSupportsAcquireByHash() const;
time_t GetValidUntil() const;
virtual bool CheckDist(const std::string MaybeDist) const;
std::string GetExpectedDist() const;
virtual ~indexRecords(){};
};
+#if __GNUC__ >= 4
+ // ensure that con- & de-structor don't trigger this warning
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
struct indexRecords::checkSum
{
std::string MetaKeyFilename;
- HashString Hash;
+ HashStringList Hashes;
unsigned long long Size;
+
+ APT_DEPRECATED HashString Hash;
};
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
#endif
diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc
index 393f836f7..d9df28ba3 100644
--- a/apt-pkg/packagemanager.cc
+++ b/apt-pkg/packagemanager.cc
@@ -28,6 +28,7 @@
#include <apt-pkg/pkgcache.h>
#include <apt-pkg/cacheiterators.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/install-progress.h>
#include <stddef.h>
#include <list>
diff --git a/apt-pkg/packagemanager.h b/apt-pkg/packagemanager.h
index d72790b6e..558132ceb 100644
--- a/apt-pkg/packagemanager.h
+++ b/apt-pkg/packagemanager.h
@@ -44,6 +44,11 @@ class pkgDepCache;
class pkgSourceList;
class pkgOrderList;
class pkgRecords;
+namespace APT {
+ namespace Progress {
+ class PackageManager;
+ };
+};
class pkgPackageManager : protected pkgCache::Namespace
diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc
index 58a63459f..4fbdc93d5 100644
--- a/apt-pkg/pkgcache.cc
+++ b/apt-pkg/pkgcache.cc
@@ -54,12 +54,8 @@ pkgCache::Header::Header()
/* Whenever the structures change the major version should be bumped,
whenever the generator changes the minor version should be bumped. */
- MajorVersion = 8;
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
+ MajorVersion = 9;
MinorVersion = 2;
-#else
- MinorVersion = 1;
-#endif
Dirty = false;
HeaderSz = sizeof(pkgCache::Header);
@@ -168,15 +164,23 @@ bool pkgCache::ReMap(bool const &Errorchecks)
if (Map.Size() < HeaderP->CacheFileSize)
return _error->Error(_("The package cache file is corrupted, it is too small"));
+ if (HeaderP->VerSysName == 0 || HeaderP->Architecture == 0 || HeaderP->Architectures == 0)
+ return _error->Error(_("The package cache file is corrupted"));
+
// Locate our VS..
- if (HeaderP->VerSysName == 0 ||
- (VS = pkgVersioningSystem::GetVS(StrP + HeaderP->VerSysName)) == 0)
+ if ((VS = pkgVersioningSystem::GetVS(StrP + HeaderP->VerSysName)) == 0)
return _error->Error(_("This APT does not support the versioning system '%s'"),StrP + HeaderP->VerSysName);
- // Chcek the arhcitecture
- if (HeaderP->Architecture == 0 ||
- _config->Find("APT::Architecture") != StrP + HeaderP->Architecture)
- return _error->Error(_("The package cache was built for a different architecture"));
+ // Check the architecture
+ std::vector<std::string> archs = APT::Configuration::getArchitectures();
+ std::vector<std::string>::const_iterator a = archs.begin();
+ std::string list = *a;
+ for (++a; a != archs.end(); ++a)
+ list.append(",").append(*a);
+ if (_config->Find("APT::Architecture") != StrP + HeaderP->Architecture ||
+ list != StrP + HeaderP->Architectures)
+ return _error->Error(_("The package cache was built for different architectures: %s vs %s"), StrP + HeaderP->Architectures, list.c_str());
+
return true;
}
/*}}}*/
@@ -210,7 +214,7 @@ pkgCache::PkgIterator pkgCache::SingleArchFindPkg(const string &Name)
{
// Look at the hash bucket
Package *Pkg = PkgP + HeaderP->PkgHashTable[Hash(Name)];
- for (; Pkg != PkgP; Pkg = PkgP + Pkg->NextPackage)
+ for (; Pkg != PkgP; Pkg = PkgP + Pkg->Next)
{
if (unlikely(Pkg->Name == 0))
continue;
@@ -366,7 +370,7 @@ pkgCache::PkgIterator pkgCache::GrpIterator::FindPkg(string Arch) const {
(= different packages with same calculated hash),
so we need to check the name also */
for (pkgCache::Package *Pkg = PackageList(); Pkg != Owner->PkgP;
- Pkg = Owner->PkgP + Pkg->NextPackage) {
+ Pkg = Owner->PkgP + Pkg->Next) {
if (S->Name == Pkg->Name &&
stringcasecmp(Arch, Owner->StrP + Pkg->Arch) == 0)
return PkgIterator(*Owner, Pkg);
@@ -415,7 +419,7 @@ pkgCache::PkgIterator pkgCache::GrpIterator::NextPkg(pkgCache::PkgIterator const
if (S->LastPackage == LastPkg.Index())
return PkgIterator(*Owner, 0);
- return PkgIterator(*Owner, Owner->PkgP + LastPkg->NextPackage);
+ return PkgIterator(*Owner, Owner->PkgP + LastPkg->Next);
}
/*}}}*/
// GrpIterator::operator ++ - Postfix incr /*{{{*/
@@ -442,7 +446,7 @@ void pkgCache::PkgIterator::operator ++(int)
{
// Follow the current links
if (S != Owner->PkgP)
- S = Owner->PkgP + S->NextPackage;
+ S = Owner->PkgP + S->Next;
// Follow the hash table
while (S == Owner->PkgP && (HashIndex+1) < (signed)_count(Owner->HeaderP->PkgHashTable))
diff --git a/apt-pkg/pkgcache.h b/apt-pkg/pkgcache.h
index 5e8a9630a..55f0187f9 100644
--- a/apt-pkg/pkgcache.h
+++ b/apt-pkg/pkgcache.h
@@ -138,7 +138,7 @@ class pkgCache /*{{{*/
/** \brief priority of a package version
Zero is used for unparsable or absent Priority fields. */
- enum VerPriority {Important=1,Required=2,Standard=3,Optional=4,Extra=5};
+ enum VerPriority {Required=1,Important=2,Standard=3,Optional=4,Extra=5};
enum PkgSelectedState {Unknown=0,Install=1,Hold=2,DeInstall=3,Purge=4};
enum PkgInstState {Ok=0,ReInstReq=1,HoldInst=2,HoldReInstReq=3};
enum PkgCurrentState {NotInstalled=0,UnPacked=1,HalfConfigured=2,
@@ -286,8 +286,10 @@ struct pkgCache::Header
map_ptrloc StringList;
/** \brief String representing the version system used */
map_ptrloc VerSysName;
- /** \brief Architecture(s) the cache was built against */
+ /** \brief native architecture the cache was built against */
map_ptrloc Architecture;
+ /** \brief all architectures the cache was built against */
+ map_ptrloc Architectures;
/** \brief The maximum size of a raw entry from the original Package file */
unsigned long MaxVerFileSize;
/** \brief The maximum size of a raw entry from the original Translation file */
@@ -314,8 +316,8 @@ struct pkgCache::Header
these packages are stored as a sequence in the list.
Beware: The Hashmethod assumes that the hash table sizes are equal */
- map_ptrloc PkgHashTable[2*1048];
- map_ptrloc GrpHashTable[2*1048];
+ map_ptrloc PkgHashTable[64*1048];
+ map_ptrloc GrpHashTable[64*1048];
/** \brief Size of the complete cache file */
unsigned long CacheFileSize;
@@ -388,7 +390,7 @@ struct pkgCache::Package
// Linked list
/** \brief Link to the next package in the same bucket */
- map_ptrloc NextPackage; // Package
+ map_ptrloc Next; // Package
/** \brief List of all dependencies on this package */
map_ptrloc RevDepends; // Dependency
/** \brief List of all "packages" this package provide */
diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc
index 810f0b022..9615b4c22 100644
--- a/apt-pkg/pkgcachegen.cc
+++ b/apt-pkg/pkgcachegen.cc
@@ -74,13 +74,31 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
// Starting header
*Cache.HeaderP = pkgCache::Header();
map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
+ if (unlikely(idxVerSysName == 0))
+ return;
Cache.HeaderP->VerSysName = idxVerSysName;
// this pointer is set in ReMap, but we need it now for WriteUniqString
Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
- Cache.HeaderP->Architecture = idxArchitecture;
- if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
+ if (unlikely(idxArchitecture == 0))
return;
+ Cache.HeaderP->Architecture = idxArchitecture;
+
+ std::vector<std::string> archs = APT::Configuration::getArchitectures();
+ if (archs.size() > 1)
+ {
+ std::vector<std::string>::const_iterator a = archs.begin();
+ std::string list = *a;
+ for (++a; a != archs.end(); ++a)
+ list.append(",").append(*a);
+ map_ptrloc const idxArchitectures = WriteStringInMap(list);
+ if (unlikely(idxArchitectures == 0))
+ return;
+ Cache.HeaderP->Architectures = idxArchitectures;
+ }
+ else
+ Cache.HeaderP->Architectures = idxArchitecture;
+
Cache.ReMap();
}
else
@@ -302,10 +320,9 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator
// Find the right version to write the description
MD5SumValue CurMd5 = List.Description_md5();
- if (CurMd5.Value().empty() == true || List.Description().empty() == true)
+ if (CurMd5.Value().empty() == true && List.Description("").empty() == true)
return true;
- std::string CurLang = List.DescriptionLanguage();
-
+ std::vector<std::string> availDesc = List.AvailableDescriptionLanguages();
for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
{
pkgCache::DescIterator VerDesc = Ver.DescriptionList();
@@ -314,31 +331,16 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator
if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
continue;
- // don't add a new description if we have one for the given
- // md5 && language
- if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true)
- continue;
-
- pkgCache::DescIterator Desc;
- Dynamic<pkgCache::DescIterator> DynDesc(Desc);
-
- map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum);
- if (unlikely(descindex == 0 && _error->PendingError()))
- return _error->Error(_("Error occurred while processing %s (%s%d)"),
- Pkg.Name(), "NewDescription", 1);
-
- Desc->ParentPkg = Pkg.Index();
-
- // we add at the end, so that the start is constant as we need
- // that to be able to efficiently share these lists
- VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
- for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
- map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
- *LastNextDesc = descindex;
+ map_ptrloc md5idx = VerDesc->md5sum;
+ for (std::vector<std::string>::const_iterator CurLang = availDesc.begin(); CurLang != availDesc.end(); ++CurLang)
+ {
+ // don't add a new description if we have one for the given
+ // md5 && language
+ if (IsDuplicateDescription(VerDesc, CurMd5, *CurLang) == true)
+ continue;
- if (NewFileDesc(Desc,List) == false)
- return _error->Error(_("Error occurred while processing %s (%s%d)"),
- Pkg.Name(), "NewFileDesc", 1);
+ AddNewDescription(List, Ver, *CurLang, CurMd5, md5idx);
+ }
// we can stop here as all "same" versions will share the description
break;
@@ -486,11 +488,10 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
return true;
}
- /* Record the Description (it is not translated) */
+ /* Record the Description(s) based on their master md5sum */
MD5SumValue CurMd5 = List.Description_md5();
- if (CurMd5.Value().empty() == true || List.Description().empty() == true)
+ if (CurMd5.Value().empty() == true && List.Description("").empty() == true)
return true;
- std::string CurLang = List.DescriptionLanguage();
/* Before we add a new description we first search in the group for
a version with a description of the same MD5 - if so we reuse this
@@ -501,28 +502,44 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
for (pkgCache::VerIterator V = P.VersionList();
V.end() == false; ++V)
{
- if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
+ if (V->DescriptionList == 0 || MD5SumValue(V.DescriptionList().md5()) != CurMd5)
continue;
Ver->DescriptionList = V->DescriptionList;
- return true;
}
}
- // We haven't found reusable descriptions, so add the first description
- pkgCache::DescIterator Desc = Ver.DescriptionList();
+ // We haven't found reusable descriptions, so add the first description(s)
+ map_ptrloc md5idx = Ver->DescriptionList == 0 ? 0 : Ver.DescriptionList()->md5sum;
+ std::vector<std::string> availDesc = List.AvailableDescriptionLanguages();
+ for (std::vector<std::string>::const_iterator CurLang = availDesc.begin(); CurLang != availDesc.end(); ++CurLang)
+ if (AddNewDescription(List, Ver, *CurLang, CurMd5, md5idx) == false)
+ return false;
+ return true;
+}
+ /*}}}*/
+bool pkgCacheGenerator::AddNewDescription(ListParser &List, pkgCache::VerIterator &Ver, std::string const &lang, MD5SumValue const &CurMd5, map_ptrloc &md5idx) /*{{{*/
+{
+ pkgCache::DescIterator Desc;
Dynamic<pkgCache::DescIterator> DynDesc(Desc);
- map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0);
+ map_ptrloc const descindex = NewDescription(Desc, lang, CurMd5, md5idx);
if (unlikely(descindex == 0 && _error->PendingError()))
return _error->Error(_("Error occurred while processing %s (%s%d)"),
- Pkg.Name(), "NewDescription", 2);
+ Ver.ParentPkg().Name(), "NewDescription", 1);
+
+ md5idx = Desc->md5sum;
+ Desc->ParentPkg = Ver.ParentPkg().Index();
- Desc->ParentPkg = Pkg.Index();
- Ver->DescriptionList = descindex;
+ // we add at the end, so that the start is constant as we need
+ // that to be able to efficiently share these lists
+ pkgCache::DescIterator VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
+ for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
+ map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
+ *LastNextDesc = descindex;
if (NewFileDesc(Desc,List) == false)
return _error->Error(_("Error occurred while processing %s (%s%d)"),
- Pkg.Name(), "NewFileDesc", 2);
+ Ver.ParentPkg().Name(), "NewFileDesc", 1);
return true;
}
@@ -639,16 +656,16 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name
unsigned long const Hash = Cache.Hash(Name);
map_ptrloc *insertAt = &Cache.HeaderP->PkgHashTable[Hash];
while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.PkgP + *insertAt)->Name) > 0)
- insertAt = &(Cache.PkgP + *insertAt)->NextPackage;
- Pkg->NextPackage = *insertAt;
+ insertAt = &(Cache.PkgP + *insertAt)->Next;
+ Pkg->Next = *insertAt;
*insertAt = Package;
}
else // Group the Packages together
{
// this package is the new last package
pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
- Pkg->NextPackage = LastPkg->NextPackage;
- LastPkg->NextPackage = Package;
+ Pkg->Next = LastPkg->Next;
+ LastPkg->Next = Package;
}
Grp->LastPackage = Package;
diff --git a/apt-pkg/pkgcachegen.h b/apt-pkg/pkgcachegen.h
index 1e1a71026..d275c1e42 100644
--- a/apt-pkg/pkgcachegen.h
+++ b/apt-pkg/pkgcachegen.h
@@ -125,6 +125,9 @@ class pkgCacheGenerator /*{{{*/
APT_HIDDEN bool AddImplicitDepends(pkgCache::GrpIterator &G, pkgCache::PkgIterator &P,
pkgCache::VerIterator &V);
APT_HIDDEN bool AddImplicitDepends(pkgCache::VerIterator &V, pkgCache::PkgIterator &D);
+
+ APT_HIDDEN bool AddNewDescription(ListParser &List, pkgCache::VerIterator &Ver,
+ std::string const &lang, MD5SumValue const &CurMd5, map_ptrloc &md5idx);
};
/*}}}*/
// This is the abstract package list parser class. /*{{{*/
@@ -160,8 +163,8 @@ class pkgCacheGenerator::ListParser
virtual bool ArchitectureAll() = 0;
virtual std::string Version() = 0;
virtual bool NewVersion(pkgCache::VerIterator &Ver) = 0;
- virtual std::string Description() = 0;
- virtual std::string DescriptionLanguage() = 0;
+ virtual std::string Description(std::string const &lang) = 0;
+ virtual std::vector<std::string> AvailableDescriptionLanguages() = 0;
virtual MD5SumValue Description_md5() = 0;
virtual unsigned short VersionHash() = 0;
/** compare currently parsed version with given version
diff --git a/apt-pkg/pkgrecords.cc b/apt-pkg/pkgrecords.cc
index c403e4dc3..859af3a09 100644
--- a/apt-pkg/pkgrecords.cc
+++ b/apt-pkg/pkgrecords.cc
@@ -26,7 +26,7 @@
// Records::pkgRecords - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* This will create the necessary structures to access the status files */
-pkgRecords::pkgRecords(pkgCache &Cache) : d(NULL), Cache(Cache),
+pkgRecords::pkgRecords(pkgCache &aCache) : d(NULL), Cache(aCache),
Files(Cache.HeaderP->PackageFileCount)
{
for (pkgCache::PkgFileIterator I = Cache.FileBegin();
diff --git a/apt-pkg/pkgrecords.h b/apt-pkg/pkgrecords.h
index b5237b3a0..a902da8b8 100644
--- a/apt-pkg/pkgrecords.h
+++ b/apt-pkg/pkgrecords.h
@@ -18,6 +18,8 @@
#define PKGLIB_PKGRECORDS_H
#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/macros.h>
#include <string>
#include <vector>
@@ -56,17 +58,39 @@ class pkgRecords::Parser /*{{{*/
// These refer to the archive file for the Version
virtual std::string FileName() {return std::string();};
- virtual std::string MD5Hash() {return std::string();};
- virtual std::string SHA1Hash() {return std::string();};
- virtual std::string SHA256Hash() {return std::string();};
- virtual std::string SHA512Hash() {return std::string();};
virtual std::string SourcePkg() {return std::string();};
virtual std::string SourceVer() {return std::string();};
+ /** return all known hashes in this record.
+ *
+ * For authentication proposes packages come with hashsums which
+ * this method is supposed to parse and return so that clients can
+ * choose the hash to be used.
+ */
+ virtual HashStringList Hashes() const { return HashStringList(); };
+ APT_DEPRECATED std::string MD5Hash() const { return GetHashFromHashes("MD5Sum"); };
+ APT_DEPRECATED std::string SHA1Hash() const { return GetHashFromHashes("SHA1"); };
+ APT_DEPRECATED std::string SHA256Hash() const { return GetHashFromHashes("SHA256"); };
+ APT_DEPRECATED std::string SHA512Hash() const { return GetHashFromHashes("SHA512"); };
+
// These are some general stats about the package
virtual std::string Maintainer() {return std::string();};
- virtual std::string ShortDesc() {return std::string();};
- virtual std::string LongDesc() {return std::string();};
+ /** return short description in language from record.
+ *
+ * @see #LongDesc
+ */
+ virtual std::string ShortDesc(std::string const &/*lang*/) {return std::string();};
+ /** return long description in language from record.
+ *
+ * If \b lang is empty the "best" available language will be
+ * returned as determined by the APT::Languages configuration.
+ * If a (requested) language can't be found in this record an empty
+ * string will be returned.
+ */
+ virtual std::string LongDesc(std::string const &/*lang*/) {return std::string();};
+ std::string ShortDesc() {return ShortDesc("");};
+ std::string LongDesc() {return LongDesc("");};
+
virtual std::string Name() {return std::string();};
virtual std::string Homepage() {return std::string();}
@@ -77,6 +101,14 @@ class pkgRecords::Parser /*{{{*/
virtual void GetRec(const char *&Start,const char *&Stop) {Start = Stop = 0;};
virtual ~Parser() {};
+
+ private:
+ APT_HIDDEN std::string GetHashFromHashes(char const * const type) const
+ {
+ HashStringList const hashes = Hashes();
+ HashString const * const hs = hashes.find(type);
+ return hs != NULL ? hs->HashValue() : "";
+ };
};
/*}}}*/
#endif
diff --git a/apt-pkg/pkgsystem.h b/apt-pkg/pkgsystem.h
index 6e33c67ed..f88ffa7c8 100644
--- a/apt-pkg/pkgsystem.h
+++ b/apt-pkg/pkgsystem.h
@@ -85,10 +85,12 @@ class pkgSystem
virtual bool AddStatusFiles(std::vector<pkgIndexFile *> &List) = 0;
virtual bool FindIndex(pkgCache::PkgFileIterator File,
pkgIndexFile *&Found) const = 0;
-
+
/* Evauluate how 'right' we are for this system based on the filesystem
etc.. */
- virtual signed Score(Configuration const &/*Cnf*/) {return 0;};
+ virtual signed Score(Configuration const &/*Cnf*/) {
+ return 0;
+ };
pkgSystem();
virtual ~pkgSystem() {};
diff --git a/apt-pkg/sourcelist.h b/apt-pkg/sourcelist.h
index 9df0c1d74..261dd8161 100644
--- a/apt-pkg/sourcelist.h
+++ b/apt-pkg/sourcelist.h
@@ -52,7 +52,15 @@ class pkgAcquire;
class pkgIndexFile;
class metaIndex;
-class pkgSourceList
+class pkgSource
+{
+ protected:
+
+ std::vector<metaIndex *> SrcList;
+
+};
+
+class pkgSourceList : public pkgSource
{
public:
@@ -86,7 +94,7 @@ class pkgSourceList
typedef std::vector<metaIndex *>::const_iterator const_iterator;
- protected:
+ public:
std::vector<metaIndex *> SrcList;
diff --git a/apt-pkg/srcrecords.h b/apt-pkg/srcrecords.h
index e000e176a..c68d374bb 100644
--- a/apt-pkg/srcrecords.h
+++ b/apt-pkg/srcrecords.h
@@ -14,6 +14,7 @@
#define PKGLIB_SRCRECORDS_H
#include <apt-pkg/macros.h>
+#include <apt-pkg/hashes.h>
#include <string>
#include <vector>
@@ -29,15 +30,24 @@ class pkgSrcRecords
{
public:
+#if __GNUC__ >= 4
+ // ensure that con- & de-structor don't trigger this warning
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
// Describes a single file
struct File
{
- std::string MD5Hash;
- unsigned long Size;
std::string Path;
std::string Type;
+ unsigned long long Size;
+ HashStringList Hashes;
+ APT_DEPRECATED std::string MD5Hash;
};
-
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+
// Abstract parser for each source record
class Parser
{
diff --git a/apt-pkg/tagfile.cc b/apt-pkg/tagfile.cc
index 91d176e3c..7085e7d69 100644
--- a/apt-pkg/tagfile.cc
+++ b/apt-pkg/tagfile.cc
@@ -47,16 +47,43 @@ public:
unsigned long long Size;
};
+static unsigned long AlphaHash(const char *Text, size_t Length) /*{{{*/
+{
+ /* This very simple hash function for the last 8 letters gives
+ very good performance on the debian package files */
+ if (Length > 8)
+ {
+ Text += (Length - 8);
+ Length = 8;
+ }
+ unsigned long Res = 0;
+ for (size_t i = 0; i < Length; ++i)
+ Res = ((unsigned long)(Text[i]) & 0xDF) ^ (Res << 1);
+ return Res & 0xFF;
+}
+ /*}}}*/
+
// TagFile::pkgTagFile - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
pkgTagFile::pkgTagFile(FileFd *pFd,unsigned long long Size)
+ : d(NULL)
+{
+ Init(pFd, Size);
+}
+
+void pkgTagFile::Init(FileFd *pFd,unsigned long long Size)
{
/* The size is increased by 4 because if we start with the Size of the
filename we need to try to read 1 char more to see an EOF faster, 1
char the end-pointer can be on and maybe 2 newlines need to be added
to the end of the file -> 4 extra chars */
Size += 4;
+ if(d != NULL)
+ {
+ free(d->Buffer);
+ delete d;
+ }
d = new pkgTagFilePrivate(pFd, Size);
if (d->Fd.IsOpen() == false)
@@ -128,18 +155,23 @@ bool pkgTagFile::Resize(unsigned long long const newSize)
*/
bool pkgTagFile::Step(pkgTagSection &Tag)
{
- while (Tag.Scan(d->Start,d->End - d->Start) == false)
+ if(Tag.Scan(d->Start,d->End - d->Start) == false)
{
- if (Fill() == false)
- return false;
-
- if(Tag.Scan(d->Start,d->End - d->Start))
- break;
+ do
+ {
+ if (Fill() == false)
+ return false;
- if (Resize() == false)
- return _error->Error(_("Unable to parse package file %s (1)"),
- d->Fd.Name().c_str());
+ if(Tag.Scan(d->Start,d->End - d->Start, false))
+ break;
+
+ if (Resize() == false)
+ return _error->Error(_("Unable to parse package file %s (1)"),
+ d->Fd.Name().c_str());
+
+ } while (Tag.Scan(d->Start,d->End - d->Start, false) == false);
}
+
d->Start += Tag.size();
d->iOffset += Tag.size();
@@ -233,7 +265,7 @@ bool pkgTagFile::Jump(pkgTagSection &Tag,unsigned long long Offset)
if (Fill() == false)
return false;
- if (Tag.Scan(d->Start, d->End - d->Start) == false)
+ if (Tag.Scan(d->Start, d->End - d->Start, false) == false)
return _error->Error(_("Unable to parse package file %s (2)"),d->Fd.Name().c_str());
return true;
@@ -243,27 +275,46 @@ bool pkgTagFile::Jump(pkgTagSection &Tag,unsigned long long Offset)
// ---------------------------------------------------------------------
/* */
pkgTagSection::pkgTagSection()
- : Section(0), TagCount(0), d(NULL), Stop(0)
+ : Section(0), d(NULL), Stop(0)
{
- memset(&Indexes, 0, sizeof(Indexes));
- memset(&AlphaIndexes, 0, sizeof(AlphaIndexes));
+ memset(&LookupTable, 0, sizeof(LookupTable));
}
/*}}}*/
// TagSection::Scan - Scan for the end of the header information /*{{{*/
-// ---------------------------------------------------------------------
-/* This looks for the first double new line in the data stream.
- It also indexes the tags in the section. */
-bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength)
+bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength, bool const Restart)
{
+ Section = Start;
const char *End = Start + MaxLength;
- Stop = Section = Start;
- memset(AlphaIndexes,0,sizeof(AlphaIndexes));
+
+ if (Restart == false && Tags.empty() == false)
+ {
+ Stop = Section + Tags.back().StartTag;
+ if (End <= Stop)
+ return false;
+ Stop = (const char *)memchr(Stop,'\n',End - Stop);
+ if (Stop == NULL)
+ return false;
+ ++Stop;
+ }
+ else
+ {
+ Stop = Section;
+ if (Tags.empty() == false)
+ {
+ memset(&LookupTable, 0, sizeof(LookupTable));
+ Tags.clear();
+ }
+ Tags.reserve(0x100);
+ }
+ size_t TagCount = Tags.size();
if (Stop == 0)
return false;
- TagCount = 0;
- while (TagCount+1 < sizeof(Indexes)/sizeof(Indexes[0]) && Stop < End)
+ TagData lastTagData(0);
+ lastTagData.EndTag = 0;
+ unsigned long lastTagHash = 0;
+ while (Stop < End)
{
TrimRecord(true,End);
@@ -275,12 +326,39 @@ bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength)
// Start a new index and add it to the hash
if (isspace(Stop[0]) == 0)
{
- Indexes[TagCount++] = Stop - Section;
- AlphaIndexes[AlphaHash(Stop,End)] = TagCount;
+ // store the last found tag
+ if (lastTagData.EndTag != 0)
+ {
+ if (LookupTable[lastTagHash] != 0)
+ lastTagData.NextInBucket = LookupTable[lastTagHash];
+ LookupTable[lastTagHash] = TagCount;
+ Tags.push_back(lastTagData);
+ }
+
+ ++TagCount;
+ lastTagData = TagData(Stop - Section);
+ // find the colon separating tag and value
+ char const * Colon = (char const *) memchr(Stop, ':', End - Stop);
+ if (Colon == NULL)
+ return false;
+ // find the end of the tag (which might or might not be the colon)
+ char const * EndTag = Colon;
+ --EndTag;
+ for (; EndTag > Stop && isspace(*EndTag) != 0; --EndTag)
+ ;
+ ++EndTag;
+ lastTagData.EndTag = EndTag - Section;
+ lastTagHash = AlphaHash(Stop, EndTag - Stop);
+ // find the beginning of the value
+ Stop = Colon + 1;
+ for (; isspace(*Stop) != 0; ++Stop);
+ if (Stop >= End)
+ return false;
+ lastTagData.StartValue = Stop - Section;
}
Stop = (const char *)memchr(Stop,'\n',End - Stop);
-
+
if (Stop == 0)
return false;
@@ -291,7 +369,16 @@ bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength)
// Double newline marks the end of the record
if (Stop+1 < End && Stop[1] == '\n')
{
- Indexes[TagCount] = Stop - Section;
+ if (lastTagData.EndTag != 0)
+ {
+ if (LookupTable[lastTagHash] != 0)
+ lastTagData.NextInBucket = LookupTable[lastTagHash];
+ LookupTable[lastTagHash] = TagCount;
+ Tags.push_back(lastTagData);
+ }
+
+ TagData const td(Stop - Section);
+ Tags.push_back(td);
TrimRecord(false,End);
return true;
}
@@ -320,8 +407,8 @@ void pkgTagSection::Trim()
for (; Stop > Section + 2 && (Stop[-2] == '\n' || Stop[-2] == '\r'); Stop--);
}
/*}}}*/
-// TagSection::Exists - return True if a tag exists /*{{{*/
-bool pkgTagSection::Exists(const char* const Tag)
+// TagSection::Exists - return True if a tag exists /*{{{*/
+bool pkgTagSection::Exists(const char* const Tag) const
{
unsigned int tmp;
return Find(Tag, tmp);
@@ -332,73 +419,43 @@ bool pkgTagSection::Exists(const char* const Tag)
/* This searches the section for a tag that matches the given string. */
bool pkgTagSection::Find(const char *Tag,unsigned int &Pos) const
{
- unsigned int Length = strlen(Tag);
- unsigned int I = AlphaIndexes[AlphaHash(Tag)];
- if (I == 0)
+ size_t const Length = strlen(Tag);
+ unsigned int Bucket = LookupTable[AlphaHash(Tag, Length)];
+ if (Bucket == 0)
return false;
- I--;
-
- for (unsigned int Counter = 0; Counter != TagCount; Counter++,
- I = (I+1)%TagCount)
+
+ for (; Bucket != 0; Bucket = Tags[Bucket - 1].NextInBucket)
{
- const char *St;
- St = Section + Indexes[I];
- if (strncasecmp(Tag,St,Length) != 0)
+ if ((Tags[Bucket - 1].EndTag - Tags[Bucket - 1].StartTag) != Length)
continue;
- // Make sure the colon is in the right place
- const char *C = St + Length;
- for (; isspace(*C) != 0; C++);
- if (*C != ':')
+ char const * const St = Section + Tags[Bucket - 1].StartTag;
+ if (strncasecmp(Tag,St,Length) != 0)
continue;
- Pos = I;
+
+ Pos = Bucket - 1;
return true;
}
Pos = 0;
return false;
}
- /*}}}*/
-// TagSection::Find - Locate a tag /*{{{*/
-// ---------------------------------------------------------------------
-/* This searches the section for a tag that matches the given string. */
bool pkgTagSection::Find(const char *Tag,const char *&Start,
const char *&End) const
{
- unsigned int Length = strlen(Tag);
- unsigned int I = AlphaIndexes[AlphaHash(Tag)];
- if (I == 0)
+ unsigned int Pos;
+ if (Find(Tag, Pos) == false)
return false;
- I--;
-
- for (unsigned int Counter = 0; Counter != TagCount; Counter++,
- I = (I+1)%TagCount)
- {
- const char *St;
- St = Section + Indexes[I];
- if (strncasecmp(Tag,St,Length) != 0)
- continue;
-
- // Make sure the colon is in the right place
- const char *C = St + Length;
- for (; isspace(*C) != 0; C++);
- if (*C != ':')
- continue;
- // Strip off the gunk from the start end
- Start = C;
- End = Section + Indexes[I+1];
- if (Start >= End)
- return _error->Error("Internal parsing error");
-
- for (; (isspace(*Start) != 0 || *Start == ':') && Start < End; Start++);
- for (; isspace(End[-1]) != 0 && End > Start; End--);
-
- return true;
- }
-
- Start = End = 0;
- return false;
+ Start = Section + Tags[Pos].StartValue;
+ // Strip off the gunk from the end
+ End = Section + Tags[Pos + 1].StartTag;
+ if (unlikely(Start > End))
+ return _error->Error("Internal parsing error");
+
+ for (; isspace(End[-1]) != 0 && End > Start; --End);
+
+ return true;
}
/*}}}*/
// TagSection::FindS - Find a string /*{{{*/
@@ -461,6 +518,17 @@ unsigned long long pkgTagSection::FindULL(const char *Tag, unsigned long long co
return Result;
}
/*}}}*/
+// TagSection::FindB - Find boolean value /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgTagSection::FindB(const char *Tag, bool const &Default) const
+{
+ const char *Start, *Stop;
+ if (Find(Tag, Start, Stop) == false)
+ return Default;
+ return StringToBool(string(Start, Stop));
+}
+ /*}}}*/
// TagSection::FindFlag - Locate a yes/no type flag /*{{{*/
// ---------------------------------------------------------------------
/* The bits marked in Flag are masked on/off in Flags */
@@ -493,6 +561,13 @@ bool pkgTagSection::FindFlag(unsigned long &Flags, unsigned long Flag,
return true;
}
/*}}}*/
+APT_PURE unsigned int pkgTagSection::Count() const { /*{{{*/
+ if (Tags.empty() == true)
+ return 0;
+ // the last element is just marking the end and isn't a real one
+ return Tags.size() - 1;
+}
+ /*}}}*/
// TFRewrite - Rewrite a control record /*{{{*/
// ---------------------------------------------------------------------
/* This writes the control record to stdout rewriting it as necessary. The
diff --git a/apt-pkg/tagfile.h b/apt-pkg/tagfile.h
index d5b62e76d..db43bfbf9 100644
--- a/apt-pkg/tagfile.h
+++ b/apt-pkg/tagfile.h
@@ -25,6 +25,8 @@
#include <stdio.h>
#include <string>
+#include <vector>
+#include <list>
#ifndef APT_8_CLEANER_HEADERS
#include <apt-pkg/fileutl.h>
@@ -35,23 +37,20 @@ class FileFd;
class pkgTagSection
{
const char *Section;
- // We have a limit of 256 tags per section.
- unsigned int Indexes[256];
- unsigned int AlphaIndexes[0x100];
- unsigned int TagCount;
+ struct TagData {
+ unsigned int StartTag;
+ unsigned int EndTag;
+ unsigned int StartValue;
+ unsigned int NextInBucket;
+
+ TagData(unsigned int const StartTag) : StartTag(StartTag), NextInBucket(0) {}
+ };
+ std::vector<TagData> Tags;
+ unsigned int LookupTable[0x100];
+
// dpointer placeholder (for later in case we need it)
void *d;
- /* This very simple hash function for the last 8 letters gives
- very good performance on the debian package files */
- inline static unsigned long AlphaHash(const char *Text, const char *End = 0)
- {
- unsigned long Res = 0;
- for (; Text != End && *Text != ':' && *Text != 0; Text++)
- Res = ((unsigned long)(*Text) & 0xDF) ^ (Res << 1);
- return Res & 0xFF;
- }
-
protected:
const char *Stop;
@@ -63,23 +62,46 @@ class pkgTagSection
bool Find(const char *Tag,const char *&Start, const char *&End) const;
bool Find(const char *Tag,unsigned int &Pos) const;
std::string FindS(const char *Tag) const;
- signed int FindI(const char *Tag,signed long Default = 0) const ;
+ signed int FindI(const char *Tag,signed long Default = 0) const;
+ bool FindB(const char *Tag, bool const &Default = false) const;
unsigned long long FindULL(const char *Tag, unsigned long long const &Default = 0) const;
bool FindFlag(const char *Tag,unsigned long &Flags,
unsigned long Flag) const;
bool static FindFlag(unsigned long &Flags, unsigned long Flag,
const char* Start, const char* Stop);
- bool Scan(const char *Start,unsigned long MaxLength);
+
+ /** \brief searches the boundaries of the current section
+ *
+ * While parameter Start marks the beginning of the section, this method
+ * will search for the first double newline in the data stream which marks
+ * the end of the section. It also does a first pass over the content of
+ * the section parsing it as encountered for processing later on by Find
+ *
+ * @param Start is the beginning of the section
+ * @param MaxLength is the size of valid data in the stream pointed to by Start
+ * @param Restart if enabled internal state will be cleared, otherwise it is
+ * assumed that now more data is available in the stream and the parsing will
+ * start were it encountered insufficent data the last time.
+ *
+ * @return \b true if section end was found, \b false otherwise.
+ * Beware that internal state will be inconsistent if \b false is returned!
+ */
+ APT_MUSTCHECK bool Scan(const char *Start, unsigned long MaxLength, bool const Restart = true);
inline unsigned long size() const {return Stop - Section;};
void Trim();
virtual void TrimRecord(bool BeforeRecord, const char* &End);
-
- inline unsigned int Count() const {return TagCount;};
- bool Exists(const char* const Tag);
-
+
+ /** \brief amount of Tags in the current section
+ *
+ * Note: if a Tag is mentioned repeatly it will be counted multiple
+ * times, but only the last occurance is available via Find methods.
+ */
+ unsigned int Count() const;
+ bool Exists(const char* const Tag) const;
+
inline void Get(const char *&Start,const char *&Stop,unsigned int I) const
- {Start = Section + Indexes[I]; Stop = Section + Indexes[I+1];}
-
+ {Start = Section + Tags[I].StartTag; Stop = Section + Tags[I+1].StartTag;}
+
inline void GetSection(const char *&Start,const char *&Stop) const
{
Start = Section;
@@ -105,6 +127,8 @@ class pkgTagFile
unsigned long Offset();
bool Jump(pkgTagSection &Tag,unsigned long long Offset);
+ void Init(FileFd *F,unsigned long long Size = 32*1024);
+
pkgTagFile(FileFd *F,unsigned long long Size = 32*1024);
virtual ~pkgTagFile();
};