From 448c38bdcd72b52f11ec5f326f822cf57653f81c Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 6 Jun 2015 12:28:00 +0200 Subject: rework hashsum verification in the acquire system MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Having every item having its own code to verify the file(s) it handles is an errorprune process and easy to break, especially if items move through various stages (download, uncompress, patching, …). With a giant rework we centralize (most of) the verification to have a better enforcement rate and (hopefully) less chance for bugs, but it breaks the ABI bigtime in exchange – and as we break it anyway, it is broken even harder. It shouldn't effect most frontends as they don't deal with the acquire system at all or implement their own items, but some do and will need to be patched (might be an opportunity to use apt on-board material). The theory is simple: Items implement methods to decide if hashes need to be checked (in this stage) and to return the expected hashes for this item (in this stage). The verification itself is done in worker message passing which has the benefit that a hashsum error is now a proper error for the acquire system rather than a Done() which is later revised to a Failed(). --- apt-pkg/acquire-item.cc | 3750 ++++++++++---------- apt-pkg/acquire-item.h | 677 ++-- apt-pkg/acquire-method.cc | 5 +- apt-pkg/acquire-worker.cc | 189 +- apt-pkg/contrib/hashes.cc | 10 + apt-pkg/contrib/hashes.h | 9 + apt-pkg/deb/debindexfile.cc | 4 +- apt-pkg/deb/debmetaindex.cc | 78 +- apt-pkg/indexrecords.cc | 2 +- apt-pkg/pkgcache.h | 7 +- methods/file.cc | 6 +- test/integration/test-apt-get-source-authenticated | 2 +- test/integration/test-apt-sources-deb822 | 55 +- test/integration/test-apt-update-expected-size | 2 +- test/integration/test-apt-update-file | 4 + test/integration/test-apt-update-nofallback | 11 +- test/integration/test-apt-update-not-modified | 30 +- test/integration/test-apt-update-stale | 9 +- .../test-bug-595691-empty-and-broken-archive-files | 2 +- .../test-ubuntu-bug-1098738-apt-get-source-md5sum | 8 + test/libapt/acqprogress_test.cc | 6 +- test/libapt/hashsums_test.cc | 3 + 22 files changed, 2411 insertions(+), 2458 deletions(-) diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index cf89717c4..ec6ec6e84 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -107,36 +107,296 @@ static bool AllowInsecureRepositories(indexRecords const * const MetaIndexParser } /*}}}*/ +// all ::HashesRequired and ::GetExpectedHashes implementations /*{{{*/ +/* ::GetExpectedHashes is abstract and has to be implemented by all subclasses. + It is best to implement it as broadly as possible, while ::HashesRequired defaults + to true and should be as restrictive as possible for false cases. Note that if + a hash is returned by ::GetExpectedHashes it must match. Only if it doesn't + ::HashesRequired is called to evaluate if its okay to have no hashes. */ +APT_CONST bool pkgAcqTransactionItem::HashesRequired() const +{ + /* signed repositories obviously have a parser and good hashes. + unsigned repositories, too, as even if we can't trust them for security, + we can at least trust them for integrity of the download itself. + Only repositories without a Release file can (obviously) not have + hashes – and they are very uncommon and strongly discouraged */ + return TransactionManager->MetaIndexParser != NULL; +} +HashStringList pkgAcqTransactionItem::GetExpectedHashes() const +{ + return GetExpectedHashesFor(GetMetaKey()); +} + +APT_CONST bool pkgAcqMetaBase::HashesRequired() const +{ + // Release and co have no hashes 'by design'. + return false; +} +HashStringList pkgAcqMetaBase::GetExpectedHashes() const +{ + return HashStringList(); +} + +APT_CONST bool pkgAcqIndexDiffs::HashesRequired() const +{ + /* FIXME: We have only hashes for uncompressed pdiffs. + rred uncompresses them on the fly while parsing. + In StateFetchDiff state we also uncompress on the fly for hash check. + Hashes are checked while searching for (next) patch to apply. */ + return false; +} +HashStringList pkgAcqIndexDiffs::GetExpectedHashes() const +{ + return HashStringList(); +} + +APT_CONST bool pkgAcqIndexMergeDiffs::HashesRequired() const +{ + /* @see #pkgAcqIndexDiffs::HashesRequired, with the difference that + we can check the rred result after all patches are applied as + we know the expected result rather than potentially apply more patches */ + return State == StateApplyDiff; +} +HashStringList pkgAcqIndexMergeDiffs::GetExpectedHashes() const +{ + if (State == StateApplyDiff) + return GetExpectedHashesFor(Target->MetaKey); + return HashStringList(); +} + +APT_CONST bool pkgAcqArchive::HashesRequired() const +{ + return LocalSource == false; +} +HashStringList pkgAcqArchive::GetExpectedHashes() const +{ + // figured out while parsing the records + return ExpectedHashes; +} + +APT_CONST bool pkgAcqFile::HashesRequired() const +{ + // supplied as parameter at creation time, so the caller decides + return ExpectedHashes.usable(); +} +HashStringList pkgAcqFile::GetExpectedHashes() const +{ + return ExpectedHashes; +} + /*}}}*/ +// Acquire::Item::QueueURI and specialisations from child classes /*{{{*/ +bool pkgAcquire::Item::QueueURI(pkgAcquire::ItemDesc &Item) +{ + Owner->Enqueue(Item); + return true; +} +/* The idea here is that an item isn't queued if it exists on disk and the + transition manager was a hit as this means that the files it contains + the checksums for can't be updated either (or they are and we are asking + for a hashsum mismatch to happen which helps nobody) */ +bool pkgAcqTransactionItem::QueueURI(pkgAcquire::ItemDesc &Item) +{ + std::string const FinalFile = GetFinalFilename(); + if (TransactionManager != NULL && TransactionManager->IMSHit == true && + FileExists(FinalFile) == true) + { + PartialFile = DestFile = FinalFile; + Status = StatDone; + return false; + } + return pkgAcquire::Item::QueueURI(Item); +} +/* The transition manager InRelease itself (or its older sisters-in-law + Release & Release.gpg) is always queued as this allows us to rerun gpgv + on it to verify that we aren't stalled with old files */ +bool pkgAcqMetaBase::QueueURI(pkgAcquire::ItemDesc &Item) +{ + return pkgAcquire::Item::QueueURI(Item); +} +/* the Diff/Index needs to queue also the up-to-date complete index file + to ensure that the list cleaner isn't eating it */ +bool pkgAcqDiffIndex::QueueURI(pkgAcquire::ItemDesc &Item) +{ + if (pkgAcqTransactionItem::QueueURI(Item) == true) + return true; + QueueOnIMSHit(); + return false; +} + /*}}}*/ +// Acquire::Item::GetFinalFilename and specialisations for child classes /*{{{*/ +std::string pkgAcquire::Item::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(Desc.URI); +} +std::string pkgAcqDiffIndex::GetFinalFilename() const +{ + // the logic we inherent from pkgAcqBaseIndex isn't what we need here + return pkgAcquire::Item::GetFinalFilename(); +} +std::string pkgAcqIndex::GetFinalFilename() const +{ + std::string const FinalFile = GetFinalFileNameFromURI(Target->URI); + return GetCompressedFileName(Target->URI, FinalFile, CurrentCompressionExtension); +} +std::string pkgAcqMetaSig::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(Target->URI); +} +std::string pkgAcqBaseIndex::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(Target->URI); +} +std::string pkgAcqMetaBase::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(DataTarget.URI); +} +std::string pkgAcqArchive::GetFinalFilename() const +{ + return _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename); +} + /*}}}*/ +// pkgAcqTransactionItem::GetMetaKey and specialisations for child classes /*{{{*/ +std::string pkgAcqTransactionItem::GetMetaKey() const +{ + return Target->MetaKey; +} +std::string pkgAcqIndex::GetMetaKey() const +{ + if (Stage == STAGE_DECOMPRESS_AND_VERIFY || CurrentCompressionExtension == "uncompressed") + return Target->MetaKey; + return Target->MetaKey + "." + CurrentCompressionExtension; +} +std::string pkgAcqDiffIndex::GetMetaKey() const +{ + return Target->MetaKey + ".diff/Index"; +} + /*}}}*/ +//pkgAcqTransactionItem::TransactionState and specialisations for child classes /*{{{*/ +bool pkgAcqTransactionItem::TransactionState(TransactionStates const state) +{ + bool const Debug = _config->FindB("Debug::Acquire::Transaction", false); + switch(state) + { + case TransactionAbort: + if(Debug == true) + std::clog << " Cancel: " << DestFile << std::endl; + if (Status == pkgAcquire::Item::StatIdle) + { + Status = pkgAcquire::Item::StatDone; + Dequeue(); + } + break; + case TransactionCommit: + if(PartialFile != "") + { + if(Debug == true) + std::clog << "mv " << PartialFile << " -> "<< DestFile << " # " << DescURI() << std::endl; + + Rename(PartialFile, DestFile); + } else { + if(Debug == true) + std::clog << "rm " << DestFile << " # " << DescURI() << std::endl; + unlink(DestFile.c_str()); + } + break; + } + return true; +} +bool pkgAcqMetaBase::TransactionState(TransactionStates const state) +{ + // Do not remove InRelease on IMSHit of Release.gpg [yes, this is very edgecasey] + if (TransactionManager->IMSHit == false) + return pkgAcqTransactionItem::TransactionState(state); + return true; +} +bool pkgAcqIndex::TransactionState(TransactionStates const state) +{ + if (pkgAcqTransactionItem::TransactionState(state) == false) + return false; + + switch (state) + { + case TransactionAbort: + if (Stage == STAGE_DECOMPRESS_AND_VERIFY) + { + // keep the compressed file, but drop the decompressed + EraseFileName.clear(); + if (PartialFile.empty() == false && flExtension(PartialFile) == "decomp") + unlink(PartialFile.c_str()); + } + break; + case TransactionCommit: + if (EraseFileName.empty() == false) + unlink(EraseFileName.c_str()); + break; + } + return true; +} +bool pkgAcqDiffIndex::TransactionState(TransactionStates const state) +{ + if (pkgAcqTransactionItem::TransactionState(state) == false) + return false; + + switch (state) + { + case TransactionCommit: + break; + case TransactionAbort: + std::string const Partial = GetPartialFileNameFromURI(Target->URI); + unlink(Partial.c_str()); + break; + } + + return true; +} + /*}}}*/ // Acquire::Item::Item - Constructor /*{{{*/ APT_IGNORE_DEPRECATED_PUSH -pkgAcquire::Item::Item(pkgAcquire *Owner, - HashStringList const &ExpectedHashes, - pkgAcqMetaBase *TransactionManager) - : Owner(Owner), FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), - Local(false), QueueCounter(0), TransactionManager(TransactionManager), - ExpectedAdditionalItems(0), ExpectedHashes(ExpectedHashes) +pkgAcquire::Item::Item(pkgAcquire * const Owner) : + FileSize(0), PartialSize(0), Mode(0), Complete(false), Local(false), + QueueCounter(0), ExpectedAdditionalItems(0), Owner(Owner) { Owner->Add(this); Status = StatIdle; - if(TransactionManager != NULL) - TransactionManager->Add(this); } APT_IGNORE_DEPRECATED_POP /*}}}*/ // Acquire::Item::~Item - Destructor /*{{{*/ -// --------------------------------------------------------------------- -/* */ pkgAcquire::Item::~Item() { Owner->Remove(this); } /*}}}*/ +std::string pkgAcquire::Item::Custom600Headers() const /*{{{*/ +{ + return std::string(); +} + /*}}}*/ +std::string pkgAcquire::Item::ShortDesc() const /*{{{*/ +{ + return DescURI(); +} + /*}}}*/ +APT_CONST void pkgAcquire::Item::Finished() /*{{{*/ +{ +} + /*}}}*/ +APT_PURE pkgAcquire * pkgAcquire::Item::GetOwner() const /*{{{*/ +{ + return Owner; +} + /*}}}*/ +APT_CONST bool pkgAcquire::Item::IsTrusted() const /*{{{*/ +{ + return false; +} + /*}}}*/ // Acquire::Item::Failed - Item failed to download /*{{{*/ // --------------------------------------------------------------------- /* We return to an idle state if there are still other queues that could fetch this object */ -void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcquire::Item::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) { if(ErrorText.empty()) ErrorText = LookupTag(Message,"Message"); @@ -169,59 +429,28 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Complete = false; Dequeue(); } - else - Status = StatIdle; - // check fail reason string const FailReason = LookupTag(Message, "FailReason"); - if(FailReason == "MaximumSizeExceeded") + if (FailReason == "MaximumSizeExceeded") RenameOnError(MaximumSizeExceeded); + else if (Status == StatAuthError) + RenameOnError(HashSumMismatch); // report mirror failure back to LP if we actually use a mirror - if(FailReason.empty() == false) + if (FailReason.empty() == false) ReportMirrorFailure(FailReason); else ReportMirrorFailure(ErrorText); -} - /*}}}*/ -bool pkgAcquire::Item::TransactionState(TransactionStates const state) /*{{{*/ -{ - bool const Debug = _config->FindB("Debug::Acquire::Transaction", false); - switch(state) - { - case TransactionAbort: - if(Debug == true) - std::clog << " Cancel: " << DestFile << std::endl; - if (Status == pkgAcquire::Item::StatIdle) - { - Status = pkgAcquire::Item::StatDone; - Dequeue(); - } - break; - case TransactionCommit: - if(PartialFile != "") - { - if(Debug == true) - std::clog << "mv " << PartialFile << " -> "<< DestFile << " # " << DescURI() << std::endl; - Rename(PartialFile, DestFile); - } else { - if(Debug == true) - std::clog << "rm " << DestFile << " # " << DescURI() << std::endl; - unlink(DestFile.c_str()); - } - // mark that this transaction is finished - TransactionManager = 0; - break; - } - return true; + if (QueueCounter > 1) + Status = StatIdle; } /*}}}*/ // Acquire::Item::Start - Item has begun to download /*{{{*/ // --------------------------------------------------------------------- -/* Stash status and the file size. Note that setting Complete means +/* Stash status and the file size. Note that setting Complete means sub-phases of the acquire process such as decompresion are operating */ -void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size) +void pkgAcquire::Item::Start(string const &/*Message*/, unsigned long long const Size) { Status = StatFetching; ErrorText.clear(); @@ -230,22 +459,24 @@ void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size) } /*}}}*/ // Acquire::Item::Done - Item downloaded OK /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringList const &/*Hash*/, - pkgAcquire::MethodConfig * /*Cnf*/) +void pkgAcquire::Item::Done(string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const /*Cnf*/) { // We just downloaded something.. string FileName = LookupTag(Message,"Filename"); UsedMirror = LookupTag(Message,"UsedMirror"); - if (Complete == false && !Local && FileName == DestFile) + unsigned long long const downloadedSize = Hashes.FileSize(); + if (downloadedSize != 0) { - if (Owner->Log != 0) - Owner->Log->Fetched(Size,atoi(LookupTag(Message,"Resume-Point","0").c_str())); - } + if (Complete == false && !Local && FileName == DestFile) + { + if (Owner->Log != 0) + Owner->Log->Fetched(Hashes.FileSize(),atoi(LookupTag(Message,"Resume-Point","0").c_str())); + } - if (FileSize == 0) - FileSize= Size; + if (FileSize == 0) + FileSize= downloadedSize; + } Status = StatDone; ErrorText = string(); Owner->Dequeue(this); @@ -255,7 +486,7 @@ void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringLis // --------------------------------------------------------------------- /* This helper function is used by a lot of item methods as their final step */ -bool pkgAcquire::Item::Rename(string From,string To) +bool pkgAcquire::Item::Rename(string const &From,string const &To) { if (From == To || rename(From.c_str(),To.c_str()) == 0) return true; @@ -271,76 +502,40 @@ bool pkgAcquire::Item::Rename(string From,string To) return false; } /*}}}*/ -// Acquire::Item::QueueURI and specialisations from child classes /*{{{*/ -/* The idea here is that an item isn't queued if it exists on disk and the - transition manager was a hit as this means that the files it contains - the checksums for can't be updated either (or they are and we are asking - for a hashsum mismatch to happen which helps nobody) */ -bool pkgAcquire::Item::QueueURI(ItemDesc &Item) +void pkgAcquire::Item::Dequeue() /*{{{*/ { - std::string const FinalFile = GetFinalFilename(); - if (TransactionManager != NULL && TransactionManager->IMSHit == true && - FileExists(FinalFile) == true) - { - PartialFile = DestFile = FinalFile; - Status = StatDone; - return false; - } - - Owner->Enqueue(Item); - return true; + Owner->Dequeue(this); } -/* The transition manager InRelease itself (or its older sisters-in-law - Release & Release.gpg) is always queued as this allows us to rerun gpgv - on it to verify that we aren't stalled with old files */ -bool pkgAcqMetaBase::QueueURI(pkgAcquire::ItemDesc &Item) -{ - Owner->Enqueue(Item); - return true; -} -/* the Diff/Index needs to queue also the up-to-date complete index file - to ensure that the list cleaner isn't eating it */ -bool pkgAcqDiffIndex::QueueURI(pkgAcquire::ItemDesc &Item) -{ - if (pkgAcquire::Item::QueueURI(Item) == true) - return true; - QueueOnIMSHit(); - return false; -} - /*}}}*/ -void pkgAcquire::Item::Dequeue() /*{{{*/ -{ - Owner->Dequeue(this); -} - /*}}}*/ -bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/ + /*}}}*/ +bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/ { if (RealFileExists(DestFile)) Rename(DestFile, DestFile + ".FAILED"); + std::string errtext; switch (error) { case HashSumMismatch: - ErrorText = _("Hash Sum mismatch"); + errtext = _("Hash Sum mismatch"); Status = StatAuthError; ReportMirrorFailure("HashChecksumFailure"); break; case SizeMismatch: - ErrorText = _("Size mismatch"); + errtext = _("Size mismatch"); Status = StatAuthError; ReportMirrorFailure("SizeFailure"); break; case InvalidFormat: - ErrorText = _("Invalid file format"); + errtext = _("Invalid file format"); Status = StatError; // do not report as usually its not the mirrors fault, but Portal/Proxy break; case SignatureError: - ErrorText = _("Signature error"); + errtext = _("Signature error"); Status = StatError; break; case NotClearsigned: - ErrorText = _("Does not start with a cleartext signature"); + errtext = _("Does not start with a cleartext signature"); Status = StatError; break; case MaximumSizeExceeded: @@ -351,6 +546,8 @@ bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const // no handling here, done by callers break; } + if (ErrorText.empty()) + ErrorText = errtext; return false; } /*}}}*/ @@ -360,15 +557,8 @@ void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess)/*{{{*/ APT_IGNORE_DEPRECATED(Mode = ActiveSubprocess.c_str();) } /*}}}*/ -// Acquire::Item::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcquire::Item::GetFinalFilename() const -{ - return GetFinalFileNameFromURI(Desc.URI); -} - /*}}}*/ // Acquire::Item::ReportMirrorFailure /*{{{*/ -// --------------------------------------------------------------------- -void pkgAcquire::Item::ReportMirrorFailure(string FailCode) +void pkgAcquire::Item::ReportMirrorFailure(string const &FailCode) { // we only act if a mirror was used at all if(UsedMirror.empty()) @@ -411,2083 +601,1897 @@ void pkgAcquire::Item::ReportMirrorFailure(string FailCode) } } /*}}}*/ -// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* Get the DiffIndex file first and see if there are patches available - * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the - * patches. If anything goes wrong in that process, it will fall back to - * the original packages file - */ -pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser) - : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, - MetaIndexParser) +std::string pkgAcquire::Item::HashSum() const /*{{{*/ { - - Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - - RealURI = Target->URI; - Desc.Owner = this; - Desc.Description = Target->Description + ".diff/Index"; - Desc.ShortDesc = Target->ShortDesc; - Desc.URI = Target->URI + ".diff/Index"; - - DestFile = GetPartialFileNameFromURI(Desc.URI); + HashStringList const hashes = GetExpectedHashes(); + HashString const * const hs = hashes.find(NULL); + return hs != NULL ? hs->toStr() : ""; +} + /*}}}*/ - if(Debug) - std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl; +pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/ + pkgAcqMetaBase * const TransactionManager, IndexTarget const * const Target) : + pkgAcquire::Item(Owner), Target(Target), TransactionManager(TransactionManager) +{ + if (TransactionManager != this) + TransactionManager->Add(this); +} + /*}}}*/ +pkgAcqTransactionItem::~pkgAcqTransactionItem() /*{{{*/ +{ +} + /*}}}*/ +HashStringList pkgAcqTransactionItem::GetExpectedHashesFor(std::string const MetaKey) const /*{{{*/ +{ + if (TransactionManager->MetaIndexParser == NULL) + return HashStringList(); + indexRecords::checkSum * const R = TransactionManager->MetaIndexParser->Lookup(MetaKey); + if (R == NULL) + return HashStringList(); + return R->Hashes; +} + /*}}}*/ - // look for the current package file - CurrentPackagesFile = GetFinalFileNameFromURI(RealURI); +// AcqMetaBase - Constructor /*{{{*/ +pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + std::vector const * const IndexTargets, + IndexTarget const &DataTarget, + indexRecords * const MetaIndexParser) +: pkgAcqTransactionItem(Owner, TransactionManager, NULL), DataTarget(DataTarget), + MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL), IndexTargets(IndexTargets), + AuthPass(false), IMSHit(false) +{ +} + /*}}}*/ +// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/ +void pkgAcqMetaBase::Add(pkgAcqTransactionItem * const I) +{ + Transaction.push_back(I); +} + /*}}}*/ +// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/ +void pkgAcqMetaBase::AbortTransaction() +{ + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "AbortTransaction: " << TransactionManager << std::endl; - // FIXME: this file:/ check is a hack to prevent fetching - // from local sources. this is really silly, and - // should be fixed cleanly as soon as possible - if(!FileExists(CurrentPackagesFile) || - Desc.URI.substr(0,strlen("file:/")) == "file:/") + // ensure the toplevel is in error state too + for (std::vector::iterator I = Transaction.begin(); + I != Transaction.end(); ++I) { - // we don't have a pkg file or we don't want to queue - Failed("No index file, local or canceld by user", NULL); - return; + (*I)->TransactionState(TransactionAbort); } + Transaction.clear(); +} + /*}}}*/ +// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/ +APT_PURE bool pkgAcqMetaBase::TransactionHasError() const +{ + for (std::vector::const_iterator I = Transaction.begin(); + I != Transaction.end(); ++I) + { + switch((*I)->Status) { + case StatDone: break; + case StatIdle: break; + case StatAuthError: return true; + case StatError: return true; + case StatTransientNetworkError: return true; + case StatFetching: break; + } + } + return false; +} + /*}}}*/ +// AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/ +void pkgAcqMetaBase::CommitTransaction() +{ + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "CommitTransaction: " << this << std::endl; - if(Debug) - std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): " - << CurrentPackagesFile << std::endl; - - QueueURI(Desc); - + // move new files into place *and* remove files that are not + // part of the transaction but are still on disk + for (std::vector::iterator I = Transaction.begin(); + I != Transaction.end(); ++I) + { + (*I)->TransactionState(TransactionCommit); + } + Transaction.clear(); } /*}}}*/ -// Acquire::Item::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqDiffIndex::GetFinalFilename() const +// AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/ +void pkgAcqMetaBase::TransactionStageCopy(pkgAcqTransactionItem * const I, + const std::string &From, + const std::string &To) { - // the logic we inherent from pkgAcqBaseIndex isn't what we need here - return pkgAcquire::Item::GetFinalFilename(); + I->PartialFile = From; + I->DestFile = To; } /*}}}*/ -// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// AcqMetaBase::TransactionStageRemoval - Stage a file for removal /*{{{*/ +void pkgAcqMetaBase::TransactionStageRemoval(pkgAcqTransactionItem * const I, + const std::string &FinalFile) +{ + I->PartialFile = ""; + I->DestFile = FinalFile; +} + /*}}}*/ +// AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/ +bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message) +{ + // FIXME: this entire function can do now that we disallow going to + // a unauthenticated state and can cleanly rollback + + string const Final = I->GetFinalFilename(); + if(FileExists(Final)) + { + I->Status = StatTransientNetworkError; + _error->Warning(_("An error occurred during the signature " + "verification. The repository is not updated " + "and the previous index files will be used. " + "GPG error: %s: %s\n"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + RunScripts("APT::Update::Auth-Failure"); + return true; + } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) { + /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */ + _error->Error(_("GPG error: %s: %s"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + I->Status = StatError; + return true; + } else { + _error->Warning(_("GPG error: %s: %s"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + } + // gpgv method failed + ReportMirrorFailure("GPGFailure"); + return false; +} + /*}}}*/ +// AcqMetaBase::Custom600Headers - Get header for AcqMetaBase /*{{{*/ // --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -#if APT_PKG_ABI >= 413 -string pkgAcqDiffIndex::Custom600Headers() const -#else -string pkgAcqDiffIndex::Custom600Headers() -#endif +string pkgAcqMetaBase::Custom600Headers() const { - string const Final = GetFinalFilename(); + std::string Header = "\nIndex-File: true"; + std::string MaximumSize; + strprintf(MaximumSize, "\nMaximum-Size: %i", + _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000)); + Header += MaximumSize; - if(Debug) - std::clog << "Custom600Header-IMS: " << Final << std::endl; + string const FinalFile = GetFinalFilename(); struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true"; - - return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + if (stat(FinalFile.c_str(),&Buf) == 0) + Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + + return Header; } /*}}}*/ -void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/ +// AcqMetaBase::QueueForSignatureVerify /*{{{*/ +void pkgAcqMetaBase::QueueForSignatureVerify(pkgAcqTransactionItem * const I, std::string const &File, std::string const &Signature) { - // list cleanup needs to know that this file as well as the already - // present index is ours, so we create an empty diff to save it for us - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, - ExpectedHashes, MetaIndexParser); + AuthPass = true; + I->Desc.URI = "gpgv:" + Signature; + I->DestFile = File; + QueueURI(I->Desc); + I->SetActiveSubprocess("gpgv"); } /*}}}*/ -bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ +// AcqMetaBase::CheckDownloadDone /*{{{*/ +bool pkgAcqMetaBase::CheckDownloadDone(pkgAcqTransactionItem * const I, const std::string &Message, HashStringList const &Hashes) const { - // failing here is fine: our caller will take care of trying to - // get the complete file if patching fails - if(Debug) - std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile - << std::endl; - - FileFd Fd(IndexDiffFile,FileFd::ReadOnly); - pkgTagFile TF(&Fd); - if (_error->PendingError() == true) - return false; - - pkgTagSection Tags; - if(unlikely(TF.Step(Tags) == false)) - return false; - - HashStringList ServerHashes; - unsigned long long ServerSize = 0; + // We have just finished downloading a Release file (it is not + // verified yet) - for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + string const FileName = LookupTag(Message,"Filename"); + if (FileName.empty() == true) { - std::string tagname = *type; - tagname.append("-Current"); - std::string const tmp = Tags.FindS(tagname.c_str()); - if (tmp.empty() == true) - continue; - - string hash; - unsigned long long size; - std::stringstream ss(tmp); - ss >> hash >> size; - if (unlikely(hash.empty() == true)) - continue; - if (unlikely(ServerSize != 0 && ServerSize != size)) - continue; - ServerHashes.push_back(HashString(*type, hash)); - ServerSize = size; + I->Status = StatError; + I->ErrorText = "Method gave a blank filename"; + return false; } - if (ServerHashes.usable() == false) + if (FileName != I->DestFile) { - if (Debug == true) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl; + I->Local = true; + I->Desc.URI = "copy:" + FileName; + I->QueueURI(I->Desc); return false; } - if (ServerHashes != HashSums()) + // make sure to verify against the right file on I-M-S hit + bool IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"), false); + if (IMSHit == false && Hashes.usable()) { - if (Debug == true) + // detect IMS-Hits servers haven't detected by Hash comparison + std::string const FinalFile = I->GetFinalFilename(); + if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true) { - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl; - printHashSumComparision(CurrentPackagesFile, ServerHashes, HashSums()); + IMSHit = true; + unlink(I->DestFile.c_str()); } - return false; } - if (ServerHashes.VerifyFile(CurrentPackagesFile) == true) + if(IMSHit == true) { - // we have the same sha1 as the server so we are done here - if(Debug) - std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl; - QueueOnIMSHit(); - return true; + // for simplicity, the transaction manager is always InRelease + // even if it doesn't exist. + if (TransactionManager != NULL) + TransactionManager->IMSHit = true; + I->PartialFile = I->DestFile = I->GetFinalFilename(); } - FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); - Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + // set Item to complete as the remaining work is all local (verify etc) + I->Complete = true; - if(Debug) - std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at " - << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl; + return true; +} + /*}}}*/ +bool pkgAcqMetaBase::CheckAuthDone(string const &Message) /*{{{*/ +{ + // At this point, the gpgv method has succeeded, so there is a + // valid signature from a key in the trusted keyring. We + // perform additional verification of its contents, and use them + // to verify the indexes we are about to download - // parse all of (provided) history - vector available_patches; - bool firstAcceptedHashes = true; - for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + if (TransactionManager->IMSHit == false) { - if (LocalHashes.find(*type) == NULL) - continue; - - std::string tagname = *type; - tagname.append("-History"); - std::string const tmp = Tags.FindS(tagname.c_str()); - if (tmp.empty() == true) - continue; - - string hash, filename; - unsigned long long size; - std::stringstream ss(tmp); - - while (ss >> hash >> size >> filename) + // open the last (In)Release if we have it + std::string const FinalFile = GetFinalFilename(); + std::string FinalRelease; + std::string FinalInRelease; + if (APT::String::Endswith(FinalFile, "InRelease")) { - if (unlikely(hash.empty() == true || filename.empty() == true)) - continue; - - // see if we have a record for this file already - std::vector::iterator cur = available_patches.begin(); - for (; cur != available_patches.end(); ++cur) - { - if (cur->file != filename || unlikely(cur->result_size != size)) - continue; - cur->result_hashes.push_back(HashString(*type, hash)); - break; - } - if (cur != available_patches.end()) - continue; - if (firstAcceptedHashes == true) - { - DiffInfo next; - next.file = filename; - next.result_hashes.push_back(HashString(*type, hash)); - next.result_size = size; - next.patch_size = 0; - available_patches.push_back(next); - } + FinalInRelease = FinalFile; + FinalRelease = FinalFile.substr(0, FinalFile.length() - strlen("InRelease")) + "Release"; + } + else + { + FinalInRelease = FinalFile.substr(0, FinalFile.length() - strlen("Release")) + "InRelease"; + FinalRelease = FinalFile; + } + if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease)) + { + TransactionManager->LastMetaIndexParser = new indexRecords; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + TransactionManager->LastMetaIndexParser->Load(FinalInRelease); else + TransactionManager->LastMetaIndexParser->Load(FinalRelease); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) { - if (Debug == true) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename - << " wasn't in the list for the first parsed hash! (history)" << std::endl; - break; + delete TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; } + _error->RevertToStack(); } - firstAcceptedHashes = false; } - if (unlikely(available_patches.empty() == true)) + if (TransactionManager->MetaIndexParser->Load(DestFile) == false) { - if (Debug) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " - << "Couldn't find any patches for the patch series." << std::endl; + Status = StatAuthError; + ErrorText = TransactionManager->MetaIndexParser->ErrorText; return false; } - for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + if (!VerifyVendor(Message)) { - if (LocalHashes.find(*type) == NULL) - continue; + Status = StatAuthError; + return false; + } - std::string tagname = *type; - tagname.append("-Patches"); - std::string const tmp = Tags.FindS(tagname.c_str()); - if (tmp.empty() == true) - continue; + if (_config->FindB("Debug::pkgAcquire::Auth", false)) + std::cerr << "Signature verification succeeded: " + << DestFile << std::endl; - string hash, filename; - unsigned long long size; - std::stringstream ss(tmp); + // Download further indexes with verification + QueueIndexes(true); - while (ss >> hash >> size >> filename) + return true; +} + /*}}}*/ +void pkgAcqMetaBase::QueueIndexes(bool const verify) /*{{{*/ +{ + // at this point the real Items are loaded in the fetcher + ExpectedAdditionalItems = 0; + + vector ::const_iterator Target; + for (Target = IndexTargets->begin(); + Target != IndexTargets->end(); + ++Target) + { + if (verify == true && TransactionManager->MetaIndexParser->Exists((*Target)->MetaKey) == false) { - if (unlikely(hash.empty() == true || filename.empty() == true)) + // optional target that we do not have in the Release file are skipped + if ((*Target)->IsOptional()) continue; - // see if we have a record for this file already - std::vector::iterator cur = available_patches.begin(); - for (; cur != available_patches.end(); ++cur) - { - if (cur->file != filename) - continue; - if (unlikely(cur->patch_size != 0 && cur->patch_size != size)) - continue; - cur->patch_hashes.push_back(HashString(*type, hash)); - cur->patch_size = size; - break; - } - if (cur != available_patches.end()) - continue; - if (Debug == true) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename - << " wasn't in the list for the first parsed hash! (patches)" << std::endl; - break; + Status = StatAuthError; + strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); + return; } + + /* Queue the Index file (Packages, Sources, Translation-$foo + (either diff or full packages files, depending + on the users option) - we also check if the PDiff Index file is listed + in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this + instead, but passing the required info to it is to much hassle */ + if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || + TransactionManager->MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) + new pkgAcqDiffIndex(Owner, TransactionManager, *Target); + else + new pkgAcqIndex(Owner, TransactionManager, *Target); } +} + /*}}}*/ +bool pkgAcqMetaBase::VerifyVendor(string const &Message) /*{{{*/ +{ + string::size_type pos; - bool foundStart = false; - for (std::vector::iterator cur = available_patches.begin(); - cur != available_patches.end(); ++cur) + // check for missing sigs (that where not fatal because otherwise we had + // bombed earlier) + string missingkeys; + string msg = _("There is no public key available for the " + "following key IDs:\n"); + pos = Message.find("NO_PUBKEY "); + if (pos != std::string::npos) { - if (LocalHashes != cur->result_hashes) - continue; - - available_patches.erase(available_patches.begin(), cur); - foundStart = true; - break; + string::size_type start = pos+strlen("NO_PUBKEY "); + string Fingerprint = Message.substr(start, Message.find("\n")-start); + missingkeys += (Fingerprint); } + if(!missingkeys.empty()) + _error->Warning("%s", (msg + missingkeys).c_str()); - if (foundStart == false || unlikely(available_patches.empty() == true)) + string Transformed = TransactionManager->MetaIndexParser->GetExpectedDist(); + + if (Transformed == "../project/experimental") { - if (Debug) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " - << "Couldn't find the start of the patch series." << std::endl; - return false; + Transformed = "experimental"; } - // patching with too many files is rather slow compared to a fast download - unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); - if (fileLimit != 0 && fileLimit < available_patches.size()) + pos = Transformed.rfind('/'); + if (pos != string::npos) { - if (Debug) - std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit - << ") so fallback to complete download" << std::endl; - return false; + Transformed = Transformed.substr(0, pos); } - // calculate the size of all patches we have to get - // note that all sizes are uncompressed, while we download compressed files - unsigned long long patchesSize = 0; - for (std::vector::const_iterator cur = available_patches.begin(); - cur != available_patches.end(); ++cur) - patchesSize += cur->patch_size; - unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); - if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) + if (Transformed == ".") { - if (Debug) - std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 - << ") so fallback to complete download" << std::endl; - return false; + Transformed = ""; } - // we have something, queue the diffs - string::size_type const last_space = Description.rfind(" "); - if(last_space != string::npos) - Description.erase(last_space, Description.size()-last_space); + if (_config->FindB("Acquire::Check-Valid-Until", true) == true && + TransactionManager->MetaIndexParser->GetValidUntil() > 0) { + time_t const invalid_since = time(NULL) - TransactionManager->MetaIndexParser->GetValidUntil(); + if (invalid_since > 0) + { + std::string errmsg; + strprintf(errmsg, + // TRANSLATOR: The first %s is the URL of the bad Release file, the second is + // the time since then the file is invalid - formated in the same way as in + // the download progress display (e.g. 7d 3h 42min 1s) + _("Release file for %s is expired (invalid since %s). " + "Updates for this repository will not be applied."), + DataTarget.URI.c_str(), TimeToStr(invalid_since).c_str()); + if (ErrorText.empty()) + ErrorText = errmsg; + return _error->Error("%s", errmsg.c_str()); + } + } - /* decide if we should download patches one by one or in one go: - The first is good if the server merges patches, but many don't so client - based merging can be attempt in which case the second is better. - "bad things" will happen if patches are merged on the server, - but client side merging is attempt as well */ - bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); - if (pdiff_merge == true) + /* Did we get a file older than what we have? This is a last minute IMS hit and doubles + as a prevention of downgrading us to older (still valid) files */ + if (TransactionManager->IMSHit == false && TransactionManager->LastMetaIndexParser != NULL && + TransactionManager->LastMetaIndexParser->GetDate() > TransactionManager->MetaIndexParser->GetDate()) { - // reprepro adds this flag if it has merged patches on the server - std::string const precedence = Tags.FindS("X-Patch-Precedence"); - pdiff_merge = (precedence != "merged"); + TransactionManager->IMSHit = true; + unlink(DestFile.c_str()); + PartialFile = DestFile = GetFinalFilename(); + delete TransactionManager->MetaIndexParser; + TransactionManager->MetaIndexParser = TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; } - if (pdiff_merge == false) + if (_config->FindB("Debug::pkgAcquire::Auth", false)) { - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, ExpectedHashes, - MetaIndexParser, available_patches); + std::cerr << "Got Codename: " << TransactionManager->MetaIndexParser->GetDist() << std::endl; + std::cerr << "Expecting Dist: " << TransactionManager->MetaIndexParser->GetExpectedDist() << std::endl; + std::cerr << "Transformed Dist: " << Transformed << std::endl; } - else + + if (TransactionManager->MetaIndexParser->CheckDist(Transformed) == false) { - std::vector *diffs = new std::vector(available_patches.size()); - for(size_t i = 0; i < available_patches.size(); ++i) - (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager, - Target, - ExpectedHashes, - MetaIndexParser, - available_patches[i], - diffs); + // This might become fatal one day +// Status = StatAuthError; +// ErrorText = "Conflicting distribution; expected " +// + MetaIndexParser->GetExpectedDist() + " but got " +// + MetaIndexParser->GetDist(); +// return false; + if (!Transformed.empty()) + { + _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"), + Desc.Description.c_str(), + Transformed.c_str(), + TransactionManager->MetaIndexParser->GetDist().c_str()); + } } - Complete = false; - Status = StatDone; - Dequeue(); return true; } /*}}}*/ -void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ -{ - Item::Failed(Message,Cnf); - Status = StatDone; - - if(Debug) - std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl - << "Falling back to normal index file acquire" << std::endl; - new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); +pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire * const Owner, /*{{{*/ + IndexTarget const &ClearsignedTarget, + IndexTarget const &DetachedDataTarget, IndexTarget const &DetachedSigTarget, + const vector* const IndexTargets, + indexRecords * const MetaIndexParser) : + pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget, IndexTargets, MetaIndexParser), + ClearsignedTarget(ClearsignedTarget), + DetachedDataTarget(DetachedDataTarget), DetachedSigTarget(DetachedSigTarget) +{ + // index targets + (worst case:) Release/Release.gpg + ExpectedAdditionalItems = IndexTargets->size() + 2; + TransactionManager->Add(this); } /*}}}*/ -bool pkgAcqDiffIndex::TransactionState(TransactionStates const state) /*{{{*/ +pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/ { - if (pkgAcquire::Item::TransactionState(state) == false) - return false; - - switch (state) - { - case TransactionCommit: - break; - case TransactionAbort: - std::string const Partial = GetPartialFileNameFromURI(RealURI); - unlink(Partial.c_str()); - break; - } - - return true; } /*}}}*/ -void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ - pkgAcquire::MethodConfig *Cnf) +// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/ +string pkgAcqMetaClearSig::Custom600Headers() const { - if(Debug) - std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl; - - Item::Done(Message, Size, Hashes, Cnf); + string Header = pkgAcqMetaBase::Custom600Headers(); + Header += "\nFail-Ignore: true"; + return Header; +} + /*}}}*/ +// pkgAcqMetaClearSig::Done - We got a file /*{{{*/ +class APT_HIDDEN DummyItem : public pkgAcquire::Item +{ + IndexTarget const * const Target; + public: + virtual std::string DescURI() const {return Target->URI;}; + virtual HashStringList GetExpectedHashes() const {return HashStringList();}; - // verify the index target - if(Target && Target->MetaKey != "" && MetaIndexParser && Hashes.usable()) + DummyItem(pkgAcquire * const Owner, IndexTarget const * const Target) : + pkgAcquire::Item(Owner), Target(Target) { - std::string IndexMetaKey = Target->MetaKey + ".diff/Index"; - indexRecords::checkSum *Record = MetaIndexParser->Lookup(IndexMetaKey); - if(Record && Record->Hashes.usable() && Hashes != Record->Hashes) - { - RenameOnError(HashSumMismatch); - printHashSumComparision(RealURI, Record->Hashes, Hashes); - Failed(Message, Cnf); - return; - } - + Status = StatDone; + DestFile = GetFinalFileNameFromURI(Target->URI); } +}; +void pkgAcqMetaClearSig::Done(std::string const &Message, + HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf) +{ + Item::Done(Message, Hashes, Cnf); - string const FinalFile = GetFinalFilename(); - if(StringToBool(LookupTag(Message,"IMS-Hit"),false)) - DestFile = FinalFile; - - if(ParseDiffIndex(DestFile) == false) + // if we expect a ClearTextSignature (InRelease), ensure that + // this is what we get and if not fail to queue a + // Release/Release.gpg, see #346386 + if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile)) { - Failed("Message: Couldn't parse pdiff index", Cnf); - // queue for final move - this should happen even if we fail - // while parsing (e.g. on sizelimit) and download the complete file. - TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); + pkgAcquire::Item::Failed(Message, Cnf); + RenameOnError(NotClearsigned); + TransactionManager->AbortTransaction(); return; } - TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); - - Complete = true; - Status = StatDone; - Dequeue(); - - return; + if(AuthPass == false) + { + if(CheckDownloadDone(this, Message, Hashes) == true) + QueueForSignatureVerify(this, DestFile, DestFile); + return; + } + else if(CheckAuthDone(Message) == true) + { + if (TransactionManager->IMSHit == false) + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + else if (RealFileExists(GetFinalFilename()) == false) + { + // We got an InRelease file IMSHit, but we haven't one, which means + // we had a valid Release/Release.gpg combo stepping in, which we have + // to 'acquire' now to ensure list cleanup isn't removing them + new DummyItem(Owner, &DetachedDataTarget); + new DummyItem(Owner, &DetachedSigTarget); + } + } } /*}}}*/ -// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* The package diff is added to the queue. one object is constructed - * for each diff and the index - */ -pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser, - vector diffs) - : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser), - available_patches(diffs) +void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) /*{{{*/ { - DestFile = GetPartialFileNameFromURI(Target->URI); - - Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); + Item::Failed(Message, Cnf); - RealURI = Target->URI; - Desc.Owner = this; - Description = Target->Description; - Desc.ShortDesc = Target->ShortDesc; + // we failed, we will not get additional items from this method + ExpectedAdditionalItems = 0; - if(available_patches.empty() == true) + if (AuthPass == false) { - // we are done (yeah!), check hashes against the final file - DestFile = GetFinalFileNameFromURI(Target->URI); - Finish(true); + // Queue the 'old' InRelease file for removal if we try Release.gpg + // as otherwise the file will stay around and gives a false-auth + // impression (CVE-2012-0214) + TransactionManager->TransactionStageRemoval(this, GetFinalFilename()); + Status = StatDone; + + new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget, IndexTargets, TransactionManager->MetaIndexParser); } else { - // patching needs to be bootstrapped with the 'old' version - std::string const PartialFile = GetPartialFileNameFromURI(RealURI); - if (RealFileExists(PartialFile) == false) + if(CheckStopAuthentication(this, Message)) + return; + + _error->Warning(_("The data from '%s' is not signed. Packages " + "from that repository can not be authenticated."), + ClearsignedTarget.Description.c_str()); + + // No Release file was present, or verification failed, so fall + // back to queueing Packages files without verification + // only allow going further if the users explicitely wants it + if(AllowInsecureRepositories(TransactionManager->MetaIndexParser, TransactionManager, this) == true) { - if (symlink(GetFinalFilename().c_str(), PartialFile.c_str()) != 0) + Status = StatDone; + + /* InRelease files become Release files, otherwise + * they would be considered as trusted later on */ + string const FinalRelease = GetFinalFileNameFromURI(DetachedDataTarget.URI); + string const PartialRelease = GetPartialFileNameFromURI(DetachedDataTarget.URI); + string const FinalReleasegpg = GetFinalFileNameFromURI(DetachedSigTarget.URI); + string const FinalInRelease = GetFinalFilename(); + Rename(DestFile, PartialRelease); + TransactionManager->TransactionStageCopy(this, PartialRelease, FinalRelease); + + if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) { - Failed("Link creation of " + PartialFile + " to " + GetFinalFilename() + " failed", NULL); - return; + // open the last Release if we have it + if (TransactionManager->IMSHit == false) + { + TransactionManager->LastMetaIndexParser = new indexRecords; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + TransactionManager->LastMetaIndexParser->Load(FinalInRelease); + else + TransactionManager->LastMetaIndexParser->Load(FinalRelease); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) + { + delete TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; + } + _error->RevertToStack(); + } } - } - // get the next diff - State = StateFetchDiff; - QueueNextDiff(); + // we parse the indexes here because at this point the user wanted + // a repository that may potentially harm him + if (TransactionManager->MetaIndexParser->Load(PartialRelease) == false || VerifyVendor(Message) == false) + /* expired Release files are still a problem you need extra force for */; + else + QueueIndexes(true); + } } } /*}}}*/ -void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ -{ - Item::Failed(Message,Cnf); - Status = StatDone; - if(Debug) - std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl - << "Falling back to normal index file acquire" << std::endl; - DestFile = GetPartialFileNameFromURI(Target->URI); - RenameOnError(PDiffError); - new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); - Finish(); -} - /*}}}*/ -// Finish - helper that cleans the item out of the fetcher queue /*{{{*/ -void pkgAcqIndexDiffs::Finish(bool allDone) +pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner, /*{{{*/ + pkgAcqMetaBase * const TransactionManager, + IndexTarget const &DataTarget, + IndexTarget const &DetachedSigTarget, + vector const * const IndexTargets, + indexRecords * const MetaIndexParser) : + pkgAcqMetaBase(Owner, TransactionManager, IndexTargets, DataTarget, MetaIndexParser), + DetachedSigTarget(DetachedSigTarget) { - if(Debug) - std::clog << "pkgAcqIndexDiffs::Finish(): " - << allDone << " " - << Desc.URI << std::endl; - - // we restore the original name, this is required, otherwise - // the file will be cleaned - if(allDone) - { - if(HashSums().usable() && !HashSums().VerifyFile(DestFile)) - { - RenameOnError(HashSumMismatch); - Dequeue(); - return; - } + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgAcqMetaIndex with TransactionManager " + << this->TransactionManager << std::endl; - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + DestFile = GetPartialFileNameFromURI(DataTarget.URI); - // this is for the "real" finish - Complete = true; - Status = StatDone; - Dequeue(); - if(Debug) - std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl; - return; - } + // Create the item + Desc.Description = DataTarget.Description; + Desc.Owner = this; + Desc.ShortDesc = DataTarget.ShortDesc; + Desc.URI = DataTarget.URI; - if(Debug) - std::clog << "Finishing: " << Desc.URI << std::endl; - Complete = false; - Status = StatDone; - Dequeue(); - return; + // we expect more item + ExpectedAdditionalItems = IndexTargets->size(); + QueueURI(Desc); } /*}}}*/ -bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ +void pkgAcqMetaIndex::Done(string const &Message, /*{{{*/ + HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cfg) { - // calc sha1 of the just patched file - std::string const FinalFile = GetPartialFileNameFromURI(RealURI); + Item::Done(Message,Hashes,Cfg); - if(!FileExists(FinalFile)) + if(CheckDownloadDone(this, Message, Hashes)) { - Failed("Message: No FinalFile " + FinalFile + " available", NULL); - return false; + // we have a Release file, now download the Signature, all further + // verify/queue for additional downloads will be done in the + // pkgAcqMetaSig::Done() code + new pkgAcqMetaSig(Owner, TransactionManager, &DetachedSigTarget, this); } +} + /*}}}*/ +// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/ +void pkgAcqMetaIndex::Failed(string const &Message, + pkgAcquire::MethodConfig const * const Cnf) +{ + pkgAcquire::Item::Failed(Message, Cnf); + Status = StatDone; - FileFd fd(FinalFile, FileFd::ReadOnly); - Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - - if(Debug) - std::clog << "QueueNextDiff: " << FinalFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl; + _error->Warning(_("The repository '%s' does not have a Release file. " + "This is deprecated, please contact the owner of the " + "repository."), DataTarget.Description.c_str()); - if (unlikely(LocalHashes.usable() == false || ExpectedHashes.usable() == false)) + // No Release file was present so fall + // back to queueing Packages files without verification + // only allow going further if the users explicitely wants it + if(AllowInsecureRepositories(TransactionManager->MetaIndexParser, TransactionManager, this) == true) { - Failed("Local/Expected hashes are not usable", NULL); - return false; - } - + // ensure old Release files are removed + TransactionManager->TransactionStageRemoval(this, GetFinalFilename()); + delete TransactionManager->MetaIndexParser; + TransactionManager->MetaIndexParser = NULL; - // final file reached before all patches are applied - if(LocalHashes == ExpectedHashes) - { - Finish(true); - return true; - } - - // remove all patches until the next matching patch is found - // this requires the Index file to be ordered - for(vector::iterator I = available_patches.begin(); - available_patches.empty() == false && - I != available_patches.end() && - I->result_hashes != LocalHashes; - ++I) - { - available_patches.erase(I); + // queue without any kind of hashsum support + QueueIndexes(false); } +} + /*}}}*/ +void pkgAcqMetaIndex::Finished() /*{{{*/ +{ + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "Finished: " << DestFile <TransactionHasError() == false) + TransactionManager->CommitTransaction(); +} + /*}}}*/ +std::string pkgAcqMetaIndex::DescURI() const /*{{{*/ +{ + return DataTarget.URI; +} + /*}}}*/ - // error checking and falling back if no patch was found - if(available_patches.empty() == true) - { - Failed("No patches left to reach target", NULL); - return false; - } +// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/ +pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + pkgAcqMetaIndex * const MetaIndex) : + pkgAcqTransactionItem(Owner, TransactionManager, Target), MetaIndex(MetaIndex) +{ + DestFile = GetPartialFileNameFromURI(Target->URI); - // queue the right diff - Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz"; - Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); - DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + available_patches[0].file); + // remove any partial downloaded sig-file in partial/. + // it may confuse proxies and is too small to warrant a + // partial download anyway + unlink(DestFile.c_str()); - if(Debug) - std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl; + // set the TransactionManager + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgAcqMetaSig with TransactionManager " + << TransactionManager << std::endl; - QueueURI(Desc); + // Create the item + Desc.Description = Target->Description; + Desc.Owner = this; + Desc.ShortDesc = Target->ShortDesc; + Desc.URI = Target->URI; - return true; + // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors), + // so we skip the download step and go instantly to verification + if (TransactionManager->IMSHit == true && RealFileExists(GetFinalFilename())) + { + Complete = true; + Status = StatDone; + PartialFile = DestFile = GetFinalFilename(); + MetaIndexFileSignature = DestFile; + MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); + } + else + QueueURI(Desc); } /*}}}*/ -void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringList const &Hashes, /*{{{*/ - pkgAcquire::MethodConfig *Cnf) +pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/ { - if(Debug) - std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl; - - Item::Done(Message, Size, Hashes, Cnf); - - // FIXME: verify this download too before feeding it to rred - std::string const FinalFile = GetPartialFileNameFromURI(RealURI); - - // success in downloading a diff, enter ApplyDiff state - if(State == StateFetchDiff) +} + /*}}}*/ +// AcqMetaSig::Done - The signature was downloaded/verified /*{{{*/ +void pkgAcqMetaSig::Done(string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cfg) +{ + if (MetaIndexFileSignature.empty() == false) { - FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); - class Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + DestFile = MetaIndexFileSignature; + MetaIndexFileSignature.clear(); + } + Item::Done(Message, Hashes, Cfg); - if (fd.Size() != available_patches[0].patch_size || - available_patches[0].patch_hashes != LocalHashes) + if(MetaIndex->AuthPass == false) + { + if(MetaIndex->CheckDownloadDone(this, Message, Hashes) == true) { - // patchfiles are dated, so bad indicates a bad download, so kill it - unlink(DestFile.c_str()); - Failed("Patch has Size/Hashsum mismatch", NULL); - return; + // destfile will be modified to point to MetaIndexFile for the + // gpgv method, so we need to save it here + MetaIndexFileSignature = DestFile; + MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); + } + return; + } + else if(MetaIndex->CheckAuthDone(Message) == true) + { + if (TransactionManager->IMSHit == false) + { + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); } + } +} + /*}}}*/ +void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/ +{ + Item::Failed(Message,Cnf); - // rred excepts the patch as $FinalFile.ed - Rename(DestFile,FinalFile+".ed"); + // check if we need to fail at this point + if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message)) + return; - if(Debug) - std::clog << "Sending to rred method: " << FinalFile << std::endl; + string const FinalRelease = MetaIndex->GetFinalFilename(); + string const FinalReleasegpg = GetFinalFilename(); + string const FinalInRelease = TransactionManager->GetFinalFilename(); - State = StateApplyDiff; - Local = true; - Desc.URI = "rred:" + FinalFile; - QueueURI(Desc); - SetActiveSubprocess("rred"); - return; - } + if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) + { + std::string downgrade_msg; + strprintf(downgrade_msg, _("The repository '%s' is no longer signed."), + MetaIndex->DataTarget.Description.c_str()); + if(_config->FindB("Acquire::AllowDowngradeToInsecureRepositories")) + { + // meh, the users wants to take risks (we still mark the packages + // from this repository as unauthenticated) + _error->Warning("%s", downgrade_msg.c_str()); + _error->Warning(_("This is normally not allowed, but the option " + "Acquire::AllowDowngradeToInsecureRepositories was " + "given to override it.")); + Status = StatDone; + } else { + _error->Error("%s", downgrade_msg.c_str()); + if (TransactionManager->IMSHit == false) + Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED"); + Item::Failed("Message: " + downgrade_msg, Cnf); + TransactionManager->AbortTransaction(); + return; + } + } + else + _error->Warning(_("The data from '%s' is not signed. Packages " + "from that repository can not be authenticated."), + MetaIndex->DataTarget.Description.c_str()); + // ensures that a Release.gpg file in the lists/ is removed by the transaction + TransactionManager->TransactionStageRemoval(this, DestFile); - // success in download/apply a diff, queue next (if needed) - if(State == StateApplyDiff) + // only allow going further if the users explicitely wants it + if(AllowInsecureRepositories(TransactionManager->MetaIndexParser, TransactionManager, this) == true) { - // remove the just applied patch - available_patches.erase(available_patches.begin()); - unlink((FinalFile + ".ed").c_str()); - - // move into place - if(Debug) + if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) { - std::clog << "Moving patched file in place: " << std::endl - << DestFile << " -> " << FinalFile << std::endl; + // open the last Release if we have it + if (TransactionManager->IMSHit == false) + { + TransactionManager->LastMetaIndexParser = new indexRecords; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + TransactionManager->LastMetaIndexParser->Load(FinalInRelease); + else + TransactionManager->LastMetaIndexParser->Load(FinalRelease); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) + { + delete TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; + } + _error->RevertToStack(); + } } - Rename(DestFile,FinalFile); - chmod(FinalFile.c_str(),0644); - // see if there is more to download - if(available_patches.empty() == false) { - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, - ExpectedHashes, MetaIndexParser, - available_patches); - return Finish(); - } else - // update - DestFile = FinalFile; - return Finish(true); + // we parse the indexes here because at this point the user wanted + // a repository that may potentially harm him + if (TransactionManager->MetaIndexParser->Load(MetaIndex->DestFile) == false || MetaIndex->VerifyVendor(Message) == false) + /* expired Release files are still a problem you need extra force for */; + else + MetaIndex->QueueIndexes(true); + + TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); + } + + // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor + if (Cnf->LocalOnly == true || + StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) + { + // Ignore this + Status = StatDone; } } /*}}}*/ -// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ -pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser, - DiffInfo const &patch, - std::vector const * const allPatches) - : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser), - patch(patch), allPatches(allPatches), State(StateFetchDiff) + + +// AcqBaseIndex - Constructor /*{{{*/ +pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target) +: pkgAcqTransactionItem(Owner, TransactionManager, Target) +{ +} + /*}}}*/ + +// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* Get the DiffIndex file first and see if there are patches available + * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the + * patches. If anything goes wrong in that process, it will fall back to + * the original packages file + */ +pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target) + : pkgAcqBaseIndex(Owner, TransactionManager, Target) { Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - RealURI = Target->URI; Desc.Owner = this; - Description = Target->Description; + Desc.Description = Target->Description + ".diff/Index"; Desc.ShortDesc = Target->ShortDesc; + Desc.URI = Target->URI + ".diff/Index"; - Desc.URI = RealURI + ".diff/" + patch.file + ".gz"; - Desc.Description = Description + " " + patch.file + string(".pdiff"); + DestFile = GetPartialFileNameFromURI(Desc.URI); + + if(Debug) + std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl; - DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + patch.file); + // look for the current package file + CurrentPackagesFile = GetFinalFileNameFromURI(Target->URI); + + // FIXME: this file:/ check is a hack to prevent fetching + // from local sources. this is really silly, and + // should be fixed cleanly as soon as possible + if(!FileExists(CurrentPackagesFile) || + Desc.URI.substr(0,strlen("file:/")) == "file:/") + { + // we don't have a pkg file or we don't want to queue + Failed("No index file, local or canceld by user", NULL); + return; + } if(Debug) - std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl; + std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): " + << CurrentPackagesFile << std::endl; QueueURI(Desc); + } /*}}}*/ -void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ +// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +/* The only header we use is the last-modified header. */ +string pkgAcqDiffIndex::Custom600Headers() const { - if(Debug) - std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl; - - Item::Failed(Message,Cnf); - Status = StatDone; + string const Final = GetFinalFilename(); - // check if we are the first to fail, otherwise we are done here - State = StateDoneDiff; - for (std::vector::const_iterator I = allPatches->begin(); - I != allPatches->end(); ++I) - if ((*I)->State == StateErrorDiff) - return; + if(Debug) + std::clog << "Custom600Header-IMS: " << Final << std::endl; - // first failure means we should fallback - State = StateErrorDiff; - if (Debug) - std::clog << "Falling back to normal index file acquire" << std::endl; - DestFile = GetPartialFileNameFromURI(Target->URI); - RenameOnError(PDiffError); - new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); + struct stat Buf; + if (stat(Final.c_str(),&Buf) != 0) + return "\nIndex-File: true"; + + return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); +} + /*}}}*/ +void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/ +{ + // list cleanup needs to know that this file as well as the already + // present index is ours, so we create an empty diff to save it for us + new pkgAcqIndexDiffs(Owner, TransactionManager, Target); } /*}}}*/ -void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ - pkgAcquire::MethodConfig *Cnf) +bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ { + // failing here is fine: our caller will take care of trying to + // get the complete file if patching fails if(Debug) - std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl; - - Item::Done(Message,Size,Hashes,Cnf); - - // FIXME: verify download before feeding it to rred - string const FinalFile = GetPartialFileNameFromURI(RealURI); - - if (State == StateFetchDiff) - { - FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); - class Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - - if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes) - { - // patchfiles are dated, so bad indicates a bad download, so kill it - unlink(DestFile.c_str()); - Failed("Patch has Size/Hashsum mismatch", NULL); - return; - } + std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile + << std::endl; - // rred expects the patch as $FinalFile.ed.$patchname.gz - Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz"); + FileFd Fd(IndexDiffFile,FileFd::ReadOnly); + pkgTagFile TF(&Fd); + if (_error->PendingError() == true) + return false; - // check if this is the last completed diff - State = StateDoneDiff; - for (std::vector::const_iterator I = allPatches->begin(); - I != allPatches->end(); ++I) - if ((*I)->State != StateDoneDiff) - { - if(Debug) - std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl; - return; - } + pkgTagSection Tags; + if(unlikely(TF.Step(Tags) == false)) + return false; - // this is the last completed diff, so we are ready to apply now - State = StateApplyDiff; + HashStringList ServerHashes; + unsigned long long ServerSize = 0; - // patching needs to be bootstrapped with the 'old' version - if (symlink(GetFinalFilename().c_str(), FinalFile.c_str()) != 0) - { - Failed("Link creation of " + FinalFile + " to " + GetFinalFilename() + " failed", NULL); - return; - } + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + std::string tagname = *type; + tagname.append("-Current"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; - if(Debug) - std::clog << "Sending to rred method: " << FinalFile << std::endl; + string hash; + unsigned long long size; + std::stringstream ss(tmp); + ss >> hash >> size; + if (unlikely(hash.empty() == true)) + continue; + if (unlikely(ServerSize != 0 && ServerSize != size)) + continue; + ServerHashes.push_back(HashString(*type, hash)); + ServerSize = size; + } - Local = true; - Desc.URI = "rred:" + FinalFile; - QueueURI(Desc); - SetActiveSubprocess("rred"); - return; + if (ServerHashes.usable() == false) + { + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl; + return false; } - // success in download/apply all diffs, clean up - else if (State == StateApplyDiff) + + HashStringList const TargetFileHashes = GetExpectedHashesFor(Target->MetaKey); + if (TargetFileHashes.usable() == false || ServerHashes != TargetFileHashes) { - // see if we really got the expected file - if(ExpectedHashes.usable() && ExpectedHashes != Hashes) + if (Debug == true) { - RenameOnError(HashSumMismatch); - return; + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl; + printHashSumComparision(CurrentPackagesFile, ServerHashes, TargetFileHashes); } + return false; + } - // move the result into place - std::string const Final = GetFinalFilename(); + if (ServerHashes.VerifyFile(CurrentPackagesFile) == true) + { + // we have the same sha1 as the server so we are done here if(Debug) - std::clog << "Queue patched file in place: " << std::endl - << DestFile << " -> " << Final << std::endl; + std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl; + QueueOnIMSHit(); + return true; + } - // queue for copy by the transaction manager - TransactionManager->TransactionStageCopy(this, DestFile, Final); + FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); + Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - // ensure the ed's are gone regardless of list-cleanup - for (std::vector::const_iterator I = allPatches->begin(); - I != allPatches->end(); ++I) - { - std::string const PartialFile = GetPartialFileNameFromURI(RealURI); - std::string patch = PartialFile + ".ed." + (*I)->patch.file + ".gz"; - unlink(patch.c_str()); - } - unlink(FinalFile.c_str()); + if(Debug) + std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at " + << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl; - // all set and done - Complete = true; - if(Debug) - std::clog << "allDone: " << DestFile << "\n" << std::endl; - } -} - /*}}}*/ -// AcqBaseIndex - Constructor /*{{{*/ -pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser) -: Item(Owner, ExpectedHashes, TransactionManager), Target(Target), - MetaIndexParser(MetaIndexParser) -{ -} - /*}}}*/ -// AcqBaseIndex::VerifyHashByMetaKey - verify hash for the given metakey /*{{{*/ -bool pkgAcqBaseIndex::VerifyHashByMetaKey(HashStringList const &Hashes) -{ - if(MetaKey != "" && Hashes.usable()) + // parse all of (provided) history + vector available_patches; + bool firstAcceptedHashes = true; + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { - indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); - if(Record && Record->Hashes.usable() && Hashes != Record->Hashes) - { - printHashSumComparision(RealURI, Record->Hashes, Hashes); - return false; - } - } - return true; -} - /*}}}*/ -// AcqBaseIndex::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqBaseIndex::GetFinalFilename() const -{ - return GetFinalFileNameFromURI(RealURI); -} - /*}}}*/ -// AcqIndex::AcqIndex - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* The package file is added to the queue and a second class is - instantiated to fetch the revision file */ -pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc, - HashStringList const &ExpectedHash) - : pkgAcqBaseIndex(Owner, 0, NULL, ExpectedHash, NULL) -{ - RealURI = URI; - - AutoSelectCompression(); - Init(URI, URIDesc, ShortDesc); + if (LocalHashes.find(*type) == NULL) + continue; - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "New pkgIndex with TransactionManager " - << TransactionManager << std::endl; -} - /*}}}*/ -// AcqIndex::AcqIndex - Constructor /*{{{*/ -pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - IndexTarget const *Target, - HashStringList const &ExpectedHash, - indexRecords *MetaIndexParser) - : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHash, - MetaIndexParser) -{ - RealURI = Target->URI; + std::string tagname = *type; + tagname.append("-History"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; - // autoselect the compression method - AutoSelectCompression(); - Init(Target->URI, Target->Description, Target->ShortDesc); + string hash, filename; + unsigned long long size; + std::stringstream ss(tmp); - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "New pkgIndex with TransactionManager " - << TransactionManager << std::endl; -} - /*}}}*/ -// AcqIndex::AutoSelectCompression - Select compression /*{{{*/ -void pkgAcqIndex::AutoSelectCompression() -{ - std::vector types = APT::Configuration::getCompressionTypes(); - CompressionExtensions = ""; - if (ExpectedHashes.usable()) - { - for (std::vector::const_iterator t = types.begin(); - t != types.end(); ++t) + while (ss >> hash >> size >> filename) { - std::string CompressedMetaKey = string(Target->MetaKey).append(".").append(*t); - if (*t == "uncompressed" || - MetaIndexParser->Exists(CompressedMetaKey) == true) - CompressionExtensions.append(*t).append(" "); + if (unlikely(hash.empty() == true || filename.empty() == true)) + continue; + + // see if we have a record for this file already + std::vector::iterator cur = available_patches.begin(); + for (; cur != available_patches.end(); ++cur) + { + if (cur->file != filename || unlikely(cur->result_size != size)) + continue; + cur->result_hashes.push_back(HashString(*type, hash)); + break; + } + if (cur != available_patches.end()) + continue; + if (firstAcceptedHashes == true) + { + DiffInfo next; + next.file = filename; + next.result_hashes.push_back(HashString(*type, hash)); + next.result_size = size; + next.patch_size = 0; + available_patches.push_back(next); + } + else + { + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename + << " wasn't in the list for the first parsed hash! (history)" << std::endl; + break; + } } + firstAcceptedHashes = false; } - else + + if (unlikely(available_patches.empty() == true)) { - for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) - CompressionExtensions.append(*t).append(" "); + if (Debug) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " + << "Couldn't find any patches for the patch series." << std::endl; + return false; } - if (CompressionExtensions.empty() == false) - CompressionExtensions.erase(CompressionExtensions.end()-1); -} - /*}}}*/ -// AcqIndex::Init - defered Constructor /*{{{*/ -void pkgAcqIndex::Init(string const &URI, string const &URIDesc, - string const &ShortDesc) -{ - Stage = STAGE_DOWNLOAD; - - DestFile = GetPartialFileNameFromURI(URI); - CurrentCompressionExtension = CompressionExtensions.substr(0, CompressionExtensions.find(' ')); - if (CurrentCompressionExtension == "uncompressed") + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { - Desc.URI = URI; - if(Target) - MetaKey = string(Target->MetaKey); - } - else - { - Desc.URI = URI + '.' + CurrentCompressionExtension; - DestFile = DestFile + '.' + CurrentCompressionExtension; - if(Target) - MetaKey = string(Target->MetaKey) + '.' + CurrentCompressionExtension; - } - - // load the filesize - if(MetaIndexParser) - { - indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); - if(Record) - FileSize = Record->Size; - - InitByHashIfNeeded(MetaKey); - } - - Desc.Description = URIDesc; - Desc.Owner = this; - Desc.ShortDesc = ShortDesc; - - QueueURI(Desc); -} - /*}}}*/ -// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/ -void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey) -{ - // TODO: - // - (maybe?) add support for by-hash into the sources.list as flag - // - make apt-ftparchive generate the hashes (and expire?) - std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash"; - if(_config->FindB("APT::Acquire::By-Hash", false) == true || - _config->FindB(HostKnob, false) == true || - MetaIndexParser->GetSupportsAcquireByHash()) - { - indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); - if(Record) - { - // FIXME: should we really use the best hash here? or a fixed one? - const HashString *TargetHash = Record->Hashes.find(""); - std::string ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); - size_t trailing_slash = Desc.URI.find_last_of("/"); - Desc.URI = Desc.URI.replace( - trailing_slash, - Desc.URI.substr(trailing_slash+1).size()+1, - ByHash); - } else { - _error->Warning( - "Fetching ByHash requested but can not find record for %s", - MetaKey.c_str()); - } - } -} - /*}}}*/ -// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -#if APT_PKG_ABI >= 413 -string pkgAcqIndex::Custom600Headers() const -#else -string pkgAcqIndex::Custom600Headers() -#endif -{ - string Final = GetFinalFilename(); - - string msg = "\nIndex-File: true"; - struct stat Buf; - if (stat(Final.c_str(),&Buf) == 0) - msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + if (LocalHashes.find(*type) == NULL) + continue; - if(Target->IsOptional()) - msg += "\nFail-Ignore: true"; + std::string tagname = *type; + tagname.append("-Patches"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; - return msg; -} - /*}}}*/ -// pkgAcqIndex::Failed - getting the indexfile failed /*{{{*/ -void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) -{ - Item::Failed(Message,Cnf); + string hash, filename; + unsigned long long size; + std::stringstream ss(tmp); - // authorisation matches will not be fixed by other compression types - if (Status != StatAuthError) - { - size_t const nextExt = CompressionExtensions.find(' '); - if (nextExt != std::string::npos) + while (ss >> hash >> size >> filename) { - CompressionExtensions = CompressionExtensions.substr(nextExt+1); - Init(RealURI, Desc.Description, Desc.ShortDesc); - Status = StatIdle; - return; - } - } - - if(Target->IsOptional() && ExpectedHashes.empty() && Stage == STAGE_DOWNLOAD) - Status = StatDone; - else - TransactionManager->AbortTransaction(); -} - /*}}}*/ -bool pkgAcqIndex::TransactionState(TransactionStates const state) /*{{{*/ -{ - if (pkgAcquire::Item::TransactionState(state) == false) - return false; + if (unlikely(hash.empty() == true || filename.empty() == true)) + continue; - switch (state) - { - case TransactionAbort: - if (Stage == STAGE_DECOMPRESS_AND_VERIFY) + // see if we have a record for this file already + std::vector::iterator cur = available_patches.begin(); + for (; cur != available_patches.end(); ++cur) { - // keep the compressed file, but drop the decompressed - EraseFileName.clear(); - if (PartialFile.empty() == false && flExtension(PartialFile) == "decomp") - unlink(PartialFile.c_str()); + if (cur->file != filename) + continue; + if (unlikely(cur->patch_size != 0 && cur->patch_size != size)) + continue; + cur->patch_hashes.push_back(HashString(*type, hash)); + cur->patch_size = size; + break; } + if (cur != available_patches.end()) + continue; + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename + << " wasn't in the list for the first parsed hash! (patches)" << std::endl; break; - case TransactionCommit: - if (EraseFileName.empty() == false) - unlink(EraseFileName.c_str()); - break; - } - return true; -} - /*}}}*/ -// pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqIndex::GetFinalFilename() const -{ - std::string const FinalFile = GetFinalFileNameFromURI(RealURI); - return GetCompressedFileName(RealURI, FinalFile, CurrentCompressionExtension); -} - /*}}}*/ -// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/ -void pkgAcqIndex::ReverifyAfterIMS() -{ - // update destfile to *not* include the compression extension when doing - // a reverify (as its uncompressed on disk already) - DestFile = GetCompressedFileName(RealURI, GetPartialFileNameFromURI(RealURI), CurrentCompressionExtension); - - // copy FinalFile into partial/ so that we check the hash again - string FinalFile = GetFinalFilename(); - Stage = STAGE_DECOMPRESS_AND_VERIFY; - Desc.URI = "copy:" + FinalFile; - QueueURI(Desc); -} - /*}}}*/ -// AcqIndex::ValidateFile - Validate the content of the downloaded file /*{{{*/ -bool pkgAcqIndex::ValidateFile(const std::string &FileName) -{ - // FIXME: this can go away once we only ever download stuff that - // has a valid hash and we never do GET based probing - // FIXME2: this also leaks debian-isms into the code and should go therefore - - /* Always validate the index file for correctness (all indexes must - * have a Package field) (LP: #346386) (Closes: #627642) - */ - FileFd fd(FileName, FileFd::ReadOnly, FileFd::Extension); - // Only test for correctness if the content of the file is not empty - // (empty is ok) - if (fd.Size() > 0) - { - pkgTagSection sec; - pkgTagFile tag(&fd); - - // all our current indexes have a field 'Package' in each section - if (_error->PendingError() == true || - tag.Step(sec) == false || - sec.Exists("Package") == false) - return false; - } - return true; -} - /*}}}*/ -// AcqIndex::Done - Finished a fetch /*{{{*/ -// --------------------------------------------------------------------- -/* This goes through a number of states.. On the initial fetch the - method could possibly return an alternate filename which points - to the uncompressed version of the file. If this is so the file - is copied into the partial directory. In all other cases the file - is decompressed with a compressed uri. */ -void pkgAcqIndex::Done(string Message, - unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) -{ - Item::Done(Message,Size,Hashes,Cfg); - - switch(Stage) - { - case STAGE_DOWNLOAD: - StageDownloadDone(Message, Hashes, Cfg); - break; - case STAGE_DECOMPRESS_AND_VERIFY: - StageDecompressDone(Message, Hashes, Cfg); - break; - } -} - /*}}}*/ -// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/ -void pkgAcqIndex::StageDownloadDone(string Message, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) -{ - // First check if the calculcated Hash of the (compressed) downloaded - // file matches the hash we have in the MetaIndexRecords for this file - if(VerifyHashByMetaKey(Hashes) == false) - { - RenameOnError(HashSumMismatch); - Failed(Message, Cfg); - return; - } - - Complete = true; - - // Handle the unzipd case - string FileName = LookupTag(Message,"Alt-Filename"); - if (FileName.empty() == false) - { - Stage = STAGE_DECOMPRESS_AND_VERIFY; - Local = true; - DestFile += ".decomp"; - Desc.URI = "copy:" + FileName; - QueueURI(Desc); - SetActiveSubprocess("copy"); - return; - } - - FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) - { - Status = StatError; - ErrorText = "Method gave a blank filename"; - } - - // Methods like e.g. "file:" will give us a (compressed) FileName that is - // not the "DestFile" we set, in this case we uncompress from the local file - if (FileName != DestFile) - Local = true; - else - EraseFileName = FileName; - - // we need to verify the file against the current Release file again - // on if-modfied-since hit to avoid a stale attack against us - if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) - { - // The files timestamp matches, reverify by copy into partial/ - EraseFileName = ""; - ReverifyAfterIMS(); - return; - } - - // If we have compressed indexes enabled, queue for hash verification - if (_config->FindB("Acquire::GzipIndexes",false)) - { - DestFile = GetPartialFileNameFromURI(RealURI + '.' + CurrentCompressionExtension); - EraseFileName = ""; - Stage = STAGE_DECOMPRESS_AND_VERIFY; - Desc.URI = "copy:" + FileName; - QueueURI(Desc); - SetActiveSubprocess("copy"); - return; - } - - // get the binary name for your used compression type - string decompProg; - if(CurrentCompressionExtension == "uncompressed") - decompProg = "copy"; - else - decompProg = _config->Find(string("Acquire::CompressionTypes::").append(CurrentCompressionExtension),""); - if(decompProg.empty() == true) - { - _error->Error("Unsupported extension: %s", CurrentCompressionExtension.c_str()); - return; - } - - // queue uri for the next stage - Stage = STAGE_DECOMPRESS_AND_VERIFY; - DestFile += ".decomp"; - Desc.URI = decompProg + ":" + FileName; - QueueURI(Desc); - SetActiveSubprocess(decompProg); -} - /*}}}*/ -// pkgAcqIndex::StageDecompressDone - Final verification /*{{{*/ -void pkgAcqIndex::StageDecompressDone(string Message, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) -{ - if (ExpectedHashes.usable() && ExpectedHashes != Hashes) - { - Desc.URI = RealURI; - RenameOnError(HashSumMismatch); - printHashSumComparision(RealURI, ExpectedHashes, Hashes); - Failed(Message, Cfg); - return; - } - - if(!ValidateFile(DestFile)) - { - RenameOnError(InvalidFormat); - Failed(Message, Cfg); - return; - } - - // Done, queue for rename on transaction finished - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - - return; -} - /*}}}*/ -// AcqMetaBase - Constructor /*{{{*/ -pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire *Owner, - const std::vector* IndexTargets, - indexRecords* MetaIndexParser, - std::string const &RealURI, - HashStringList const &ExpectedHashes, - pkgAcqMetaBase *TransactionManager) -: Item(Owner, ExpectedHashes, TransactionManager), - MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL), IndexTargets(IndexTargets), - AuthPass(false), RealURI(RealURI), IMSHit(false) -{ -} - /*}}}*/ -// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/ -void pkgAcqMetaBase::Add(Item *I) -{ - Transaction.push_back(I); -} - /*}}}*/ -// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/ -void pkgAcqMetaBase::AbortTransaction() -{ - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "AbortTransaction: " << TransactionManager << std::endl; - - // ensure the toplevel is in error state too - for (std::vector::iterator I = Transaction.begin(); - I != Transaction.end(); ++I) - { - (*I)->TransactionState(TransactionAbort); - } - Transaction.clear(); -} - /*}}}*/ -// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/ -bool pkgAcqMetaBase::TransactionHasError() -{ - for (pkgAcquire::ItemIterator I = Transaction.begin(); - I != Transaction.end(); ++I) - { - switch((*I)->Status) { - case StatDone: break; - case StatIdle: break; - case StatAuthError: return true; - case StatError: return true; - case StatTransientNetworkError: return true; - case StatFetching: break; } } - return false; -} - /*}}}*/ -// AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/ -void pkgAcqMetaBase::CommitTransaction() -{ - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "CommitTransaction: " << this << std::endl; - - // move new files into place *and* remove files that are not - // part of the transaction but are still on disk - for (std::vector::iterator I = Transaction.begin(); - I != Transaction.end(); ++I) - { - (*I)->TransactionState(TransactionCommit); - } - Transaction.clear(); -} - /*}}}*/ -bool pkgAcqMetaBase::TransactionState(TransactionStates const state) /*{{{*/ -{ - // Do not remove InRelease on IMSHit of Release.gpg [yes, this is very edgecasey] - if (TransactionManager->IMSHit == false) - return pkgAcquire::Item::TransactionState(state); - return true; -} - /*}}}*/ -// AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/ -void pkgAcqMetaBase::TransactionStageCopy(Item *I, - const std::string &From, - const std::string &To) -{ - I->PartialFile = From; - I->DestFile = To; -} - /*}}}*/ -// AcqMetaBase::TransactionStageRemoval - Stage a file for removal /*{{{*/ -void pkgAcqMetaBase::TransactionStageRemoval(Item *I, - const std::string &FinalFile) -{ - I->PartialFile = ""; - I->DestFile = FinalFile; -} - /*}}}*/ -// AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/ -bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message) -{ - // FIXME: this entire function can do now that we disallow going to - // a unauthenticated state and can cleanly rollback - string const Final = I->GetFinalFilename(); - if(FileExists(Final)) - { - I->Status = StatTransientNetworkError; - _error->Warning(_("An error occurred during the signature " - "verification. The repository is not updated " - "and the previous index files will be used. " - "GPG error: %s: %s\n"), - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); - RunScripts("APT::Update::Auth-Failure"); - return true; - } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) { - /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */ - _error->Error(_("GPG error: %s: %s"), - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); - I->Status = StatError; - return true; - } else { - _error->Warning(_("GPG error: %s: %s"), - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); + bool foundStart = false; + for (std::vector::iterator cur = available_patches.begin(); + cur != available_patches.end(); ++cur) + { + if (LocalHashes != cur->result_hashes) + continue; + + available_patches.erase(available_patches.begin(), cur); + foundStart = true; + break; } - // gpgv method failed - ReportMirrorFailure("GPGFailure"); - return false; -} - /*}}}*/ -// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/ -pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - string const &URI, string const &URIDesc,string const &ShortDesc, - pkgAcqMetaIndex * const MetaIndex) : - pkgAcquire::Item(Owner, HashStringList(), TransactionManager), MetaIndex(MetaIndex), - URIDesc(URIDesc), RealURI(URI) -{ - DestFile = GetPartialFileNameFromURI(URI); - // remove any partial downloaded sig-file in partial/. - // it may confuse proxies and is too small to warrant a - // partial download anyway - unlink(DestFile.c_str()); + if (foundStart == false || unlikely(available_patches.empty() == true)) + { + if (Debug) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " + << "Couldn't find the start of the patch series." << std::endl; + return false; + } - // set the TransactionManager - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "New pkgAcqMetaSig with TransactionManager " - << TransactionManager << std::endl; + // patching with too many files is rather slow compared to a fast download + unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); + if (fileLimit != 0 && fileLimit < available_patches.size()) + { + if (Debug) + std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit + << ") so fallback to complete download" << std::endl; + return false; + } - // Create the item - Desc.Description = URIDesc; - Desc.Owner = this; - Desc.ShortDesc = ShortDesc; - Desc.URI = URI; + // calculate the size of all patches we have to get + // note that all sizes are uncompressed, while we download compressed files + unsigned long long patchesSize = 0; + for (std::vector::const_iterator cur = available_patches.begin(); + cur != available_patches.end(); ++cur) + patchesSize += cur->patch_size; + unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); + if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) + { + if (Debug) + std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 + << ") so fallback to complete download" << std::endl; + return false; + } - // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors), - // so we skip the download step and go instantly to verification - if (TransactionManager->IMSHit == true && RealFileExists(GetFinalFilename())) + // we have something, queue the diffs + string::size_type const last_space = Description.rfind(" "); + if(last_space != string::npos) + Description.erase(last_space, Description.size()-last_space); + + /* decide if we should download patches one by one or in one go: + The first is good if the server merges patches, but many don't so client + based merging can be attempt in which case the second is better. + "bad things" will happen if patches are merged on the server, + but client side merging is attempt as well */ + bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); + if (pdiff_merge == true) { - Complete = true; - Status = StatDone; - PartialFile = DestFile = GetFinalFilename(); - MetaIndexFileSignature = DestFile; - MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); + // reprepro adds this flag if it has merged patches on the server + std::string const precedence = Tags.FindS("X-Patch-Precedence"); + pdiff_merge = (precedence != "merged"); } + + if (pdiff_merge == false) + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, available_patches); else - QueueURI(Desc); -} - /*}}}*/ -pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/ -{ + { + std::vector *diffs = new std::vector(available_patches.size()); + for(size_t i = 0; i < available_patches.size(); ++i) + (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager, + Target, + available_patches[i], + diffs); + } + + Complete = false; + Status = StatDone; + Dequeue(); + return true; } /*}}}*/ -// pkgAcqMetaSig::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqMetaSig::GetFinalFilename() const +void pkgAcqDiffIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/ { - return GetFinalFileNameFromURI(RealURI); + Item::Failed(Message,Cnf); + Status = StatDone; + + if(Debug) + std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl + << "Falling back to normal index file acquire" << std::endl; + + new pkgAcqIndex(Owner, TransactionManager, Target); } /*}}}*/ -// pkgAcqMetaSig::Done - The signature was downloaded/verified /*{{{*/ -// --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -void pkgAcqMetaSig::Done(string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) +void pkgAcqDiffIndex::Done(string const &Message,HashStringList const &Hashes, /*{{{*/ + pkgAcquire::MethodConfig const * const Cnf) { - if (MetaIndexFileSignature.empty() == false) - { - DestFile = MetaIndexFileSignature; - MetaIndexFileSignature.clear(); - } - Item::Done(Message, Size, Hashes, Cfg); + if(Debug) + std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl; - if(MetaIndex->AuthPass == false) + Item::Done(Message, Hashes, Cnf); + + string const FinalFile = GetFinalFilename(); + if(StringToBool(LookupTag(Message,"IMS-Hit"),false)) + DestFile = FinalFile; + + if(ParseDiffIndex(DestFile) == false) { - if(MetaIndex->CheckDownloadDone(this, Message, Hashes) == true) - { - // destfile will be modified to point to MetaIndexFile for the - // gpgv method, so we need to save it here - MetaIndexFileSignature = DestFile; - MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); - } + Failed("Message: Couldn't parse pdiff index", Cnf); + // queue for final move - this should happen even if we fail + // while parsing (e.g. on sizelimit) and download the complete file. + TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); return; } - else if(MetaIndex->CheckAuthDone(Message) == true) - { - if (TransactionManager->IMSHit == false) - { - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); - } - } + + TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); + + Complete = true; + Status = StatDone; + Dequeue(); + + return; } /*}}}*/ -void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ + +// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* The package diff is added to the queue. one object is constructed + * for each diff and the index + */ +pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + vector const &diffs) + : pkgAcqBaseIndex(Owner, TransactionManager, Target), + available_patches(diffs) { - Item::Failed(Message,Cnf); + DestFile = GetPartialFileNameFromURI(Target->URI); - // check if we need to fail at this point - if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message)) - return; + Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - string const FinalRelease = MetaIndex->GetFinalFilename(); - string const FinalReleasegpg = GetFinalFilename(); - string const FinalInRelease = TransactionManager->GetFinalFilename(); + Desc.Owner = this; + Description = Target->Description; + Desc.ShortDesc = Target->ShortDesc; - if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) + if(available_patches.empty() == true) { - std::string downgrade_msg; - strprintf(downgrade_msg, _("The repository '%s' is no longer signed."), - MetaIndex->URIDesc.c_str()); - if(_config->FindB("Acquire::AllowDowngradeToInsecureRepositories")) - { - // meh, the users wants to take risks (we still mark the packages - // from this repository as unauthenticated) - _error->Warning("%s", downgrade_msg.c_str()); - _error->Warning(_("This is normally not allowed, but the option " - "Acquire::AllowDowngradeToInsecureRepositories was " - "given to override it.")); - Status = StatDone; - } else { - _error->Error("%s", downgrade_msg.c_str()); - if (TransactionManager->IMSHit == false) - Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED"); - Item::Failed("Message: " + downgrade_msg, Cnf); - TransactionManager->AbortTransaction(); - return; - } + // we are done (yeah!), check hashes against the final file + DestFile = GetFinalFileNameFromURI(Target->URI); + Finish(true); } else - _error->Warning(_("The data from '%s' is not signed. Packages " - "from that repository can not be authenticated."), - MetaIndex->URIDesc.c_str()); - - // ensures that a Release.gpg file in the lists/ is removed by the transaction - TransactionManager->TransactionStageRemoval(this, DestFile); - - // only allow going further if the users explicitely wants it - if(AllowInsecureRepositories(MetaIndex->MetaIndexParser, TransactionManager, this) == true) { - if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) + // patching needs to be bootstrapped with the 'old' version + std::string const PartialFile = GetPartialFileNameFromURI(Target->URI); + if (RealFileExists(PartialFile) == false) { - // open the last Release if we have it - if (TransactionManager->IMSHit == false) + if (symlink(GetFinalFilename().c_str(), PartialFile.c_str()) != 0) { - MetaIndex->LastMetaIndexParser = new indexRecords; - _error->PushToStack(); - if (RealFileExists(FinalInRelease)) - MetaIndex->LastMetaIndexParser->Load(FinalInRelease); - else - MetaIndex->LastMetaIndexParser->Load(FinalRelease); - // its unlikely to happen, but if what we have is bad ignore it - if (_error->PendingError()) - { - delete MetaIndex->LastMetaIndexParser; - MetaIndex->LastMetaIndexParser = NULL; - } - _error->RevertToStack(); + Failed("Link creation of " + PartialFile + " to " + GetFinalFilename() + " failed", NULL); + return; } } - // we parse the indexes here because at this point the user wanted - // a repository that may potentially harm him - MetaIndex->MetaIndexParser->Load(MetaIndex->DestFile); - if (MetaIndex->VerifyVendor(Message) == false) - /* expired Release files are still a problem you need extra force for */; - else - MetaIndex->QueueIndexes(true); - - TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); + // get the next diff + State = StateFetchDiff; + QueueNextDiff(); } +} + /*}}}*/ +void pkgAcqIndexDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/ +{ + Item::Failed(Message,Cnf); + Status = StatDone; - // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor - if (Cnf->LocalOnly == true || - StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) + if(Debug) + std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl + << "Falling back to normal index file acquire" << std::endl; + DestFile = GetPartialFileNameFromURI(Target->URI); + RenameOnError(PDiffError); + new pkgAcqIndex(Owner, TransactionManager, Target); + Finish(); +} + /*}}}*/ +// Finish - helper that cleans the item out of the fetcher queue /*{{{*/ +void pkgAcqIndexDiffs::Finish(bool allDone) +{ + if(Debug) + std::clog << "pkgAcqIndexDiffs::Finish(): " + << allDone << " " + << Desc.URI << std::endl; + + // we restore the original name, this is required, otherwise + // the file will be cleaned + if(allDone) { - // Ignore this + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + + // this is for the "real" finish + Complete = true; Status = StatDone; + Dequeue(); + if(Debug) + std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl; + return; } + + if(Debug) + std::clog << "Finishing: " << Desc.URI << std::endl; + Complete = false; + Status = StatDone; + Dequeue(); + return; } /*}}}*/ -pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/ - pkgAcqMetaBase *TransactionManager, - string URI,string URIDesc,string ShortDesc, - string MetaIndexSigURI,string MetaIndexSigURIDesc, string MetaIndexSigShortDesc, - const vector* IndexTargets, - indexRecords* MetaIndexParser) : - pkgAcqMetaBase(Owner, IndexTargets, MetaIndexParser, URI, HashStringList(), - TransactionManager), - URIDesc(URIDesc), ShortDesc(ShortDesc), - MetaIndexSigURI(MetaIndexSigURI), MetaIndexSigURIDesc(MetaIndexSigURIDesc), - MetaIndexSigShortDesc(MetaIndexSigShortDesc) +bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ { - if(TransactionManager == NULL) + // calc sha1 of the just patched file + std::string const FinalFile = GetPartialFileNameFromURI(Target->URI); + + if(!FileExists(FinalFile)) { - this->TransactionManager = this; - this->TransactionManager->Add(this); + Failed("Message: No FinalFile " + FinalFile + " available", NULL); + return false; } - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "New pkgAcqMetaIndex with TransactionManager " - << this->TransactionManager << std::endl; - - - Init(URIDesc, ShortDesc); -} - /*}}}*/ -// pkgAcqMetaIndex::Init - Delayed constructor /*{{{*/ -void pkgAcqMetaIndex::Init(std::string URIDesc, std::string ShortDesc) -{ - DestFile = GetPartialFileNameFromURI(RealURI); - - // Create the item - Desc.Description = URIDesc; - Desc.Owner = this; - Desc.ShortDesc = ShortDesc; - Desc.URI = RealURI; + FileFd fd(FinalFile, FileFd::ReadOnly); + Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - // we expect more item - ExpectedAdditionalItems = IndexTargets->size(); - QueueURI(Desc); -} - /*}}}*/ -void pkgAcqMetaIndex::Done(string Message,unsigned long long Size, /*{{{*/ - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) -{ - Item::Done(Message,Size,Hashes,Cfg); + if(Debug) + std::clog << "QueueNextDiff: " << FinalFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl; - if(CheckDownloadDone(this, Message, Hashes)) + HashStringList const TargetFileHashes = GetExpectedHashesFor(Target->MetaKey); + if (unlikely(LocalHashes.usable() == false || TargetFileHashes.usable() == false)) { - // we have a Release file, now download the Signature, all further - // verify/queue for additional downloads will be done in the - // pkgAcqMetaSig::Done() code - new pkgAcqMetaSig(Owner, TransactionManager, - MetaIndexSigURI, MetaIndexSigURIDesc, - MetaIndexSigShortDesc, this); + Failed("Local/Expected hashes are not usable", NULL); + return false; } -} - /*}}}*/ -bool pkgAcqMetaBase::CheckAuthDone(string Message) /*{{{*/ -{ - // At this point, the gpgv method has succeeded, so there is a - // valid signature from a key in the trusted keyring. We - // perform additional verification of its contents, and use them - // to verify the indexes we are about to download - if (TransactionManager->IMSHit == false) + + // final file reached before all patches are applied + if(LocalHashes == TargetFileHashes) { - // open the last (In)Release if we have it - std::string const FinalFile = GetFinalFilename(); - std::string FinalRelease; - std::string FinalInRelease; - if (APT::String::Endswith(FinalFile, "InRelease")) - { - FinalInRelease = FinalFile; - FinalRelease = FinalFile.substr(0, FinalFile.length() - strlen("InRelease")) + "Release"; - } - else - { - FinalInRelease = FinalFile.substr(0, FinalFile.length() - strlen("Release")) + "InRelease"; - FinalRelease = FinalFile; - } - if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease)) - { - LastMetaIndexParser = new indexRecords; - _error->PushToStack(); - if (RealFileExists(FinalInRelease)) - LastMetaIndexParser->Load(FinalInRelease); - else - LastMetaIndexParser->Load(FinalRelease); - // its unlikely to happen, but if what we have is bad ignore it - if (_error->PendingError()) - { - delete LastMetaIndexParser; - LastMetaIndexParser = NULL; - } - _error->RevertToStack(); - } + Finish(true); + return true; } - if (!MetaIndexParser->Load(DestFile)) + // remove all patches until the next matching patch is found + // this requires the Index file to be ordered + for(vector::iterator I = available_patches.begin(); + available_patches.empty() == false && + I != available_patches.end() && + I->result_hashes != LocalHashes; + ++I) { - Status = StatAuthError; - ErrorText = MetaIndexParser->ErrorText; - return false; + available_patches.erase(I); } - if (!VerifyVendor(Message)) + // error checking and falling back if no patch was found + if(available_patches.empty() == true) { - Status = StatAuthError; + Failed("No patches left to reach target", NULL); return false; } - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - std::cerr << "Signature verification succeeded: " - << DestFile << std::endl; + // queue the right diff + Desc.URI = Target->URI + ".diff/" + available_patches[0].file + ".gz"; + Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); + DestFile = GetPartialFileNameFromURI(Target->URI + ".diff/" + available_patches[0].file); - // Download further indexes with verification - QueueIndexes(true); + if(Debug) + std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl; + + QueueURI(Desc); return true; } /*}}}*/ -// pkgAcqMetaBase::Custom600Headers - Get header for AcqMetaBase /*{{{*/ -// --------------------------------------------------------------------- -#if APT_PKG_ABI >= 413 -string pkgAcqMetaBase::Custom600Headers() const -#else -string pkgAcqMetaBase::Custom600Headers() -#endif +void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/ + pkgAcquire::MethodConfig const * const Cnf) { - std::string Header = "\nIndex-File: true"; - std::string MaximumSize; - strprintf(MaximumSize, "\nMaximum-Size: %i", - _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000)); - Header += MaximumSize; - - string const FinalFile = GetFinalFilename(); + if(Debug) + std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl; - struct stat Buf; - if (stat(FinalFile.c_str(),&Buf) == 0) - Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + Item::Done(Message, Hashes, Cnf); - return Header; -} - /*}}}*/ -// pkgAcqMetaBase::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqMetaBase::GetFinalFilename() const -{ - return GetFinalFileNameFromURI(RealURI); -} - /*}}}*/ -// pkgAcqMetaBase::QueueForSignatureVerify /*{{{*/ -void pkgAcqMetaBase::QueueForSignatureVerify(pkgAcquire::Item * const I, std::string const &File, std::string const &Signature) -{ - AuthPass = true; - I->Desc.URI = "gpgv:" + Signature; - I->DestFile = File; - QueueURI(I->Desc); - I->SetActiveSubprocess("gpgv"); -} - /*}}}*/ -// pkgAcqMetaBase::CheckDownloadDone /*{{{*/ -bool pkgAcqMetaBase::CheckDownloadDone(pkgAcquire::Item * const I, const std::string &Message, HashStringList const &Hashes) const -{ - // We have just finished downloading a Release file (it is not - // verified yet) + // FIXME: verify this download too before feeding it to rred + std::string const FinalFile = GetPartialFileNameFromURI(Target->URI); - string const FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) + // success in downloading a diff, enter ApplyDiff state + if(State == StateFetchDiff) { - I->Status = StatError; - I->ErrorText = "Method gave a blank filename"; - return false; - } + FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); + class Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - if (FileName != I->DestFile) - { - I->Local = true; - I->Desc.URI = "copy:" + FileName; - I->QueueURI(I->Desc); - return false; - } + if (fd.Size() != available_patches[0].patch_size || + available_patches[0].patch_hashes != LocalHashes) + { + // patchfiles are dated, so bad indicates a bad download, so kill it + unlink(DestFile.c_str()); + Failed("Patch has Size/Hashsum mismatch", NULL); + return; + } - // make sure to verify against the right file on I-M-S hit - bool IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"), false); - if (IMSHit == false && Hashes.usable()) + // rred excepts the patch as $FinalFile.ed + Rename(DestFile,FinalFile+".ed"); + + if(Debug) + std::clog << "Sending to rred method: " << FinalFile << std::endl; + + State = StateApplyDiff; + Local = true; + Desc.URI = "rred:" + FinalFile; + QueueURI(Desc); + SetActiveSubprocess("rred"); + return; + } + + + // success in download/apply a diff, queue next (if needed) + if(State == StateApplyDiff) { - // detect IMS-Hits servers haven't detected by Hash comparison - std::string const FinalFile = I->GetFinalFilename(); - if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true) + // remove the just applied patch + available_patches.erase(available_patches.begin()); + unlink((FinalFile + ".ed").c_str()); + + // move into place + if(Debug) { - IMSHit = true; - unlink(I->DestFile.c_str()); + std::clog << "Moving patched file in place: " << std::endl + << DestFile << " -> " << FinalFile << std::endl; } - } + Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); - if(IMSHit == true) - { - // for simplicity, the transaction manager is always InRelease - // even if it doesn't exist. - if (TransactionManager != NULL) - TransactionManager->IMSHit = true; - I->PartialFile = I->DestFile = I->GetFinalFilename(); + // see if there is more to download + if(available_patches.empty() == false) { + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, + available_patches); + return Finish(); + } else + // update + DestFile = FinalFile; + return Finish(true); } +} + /*}}}*/ + +// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ +pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + DiffInfo const &patch, + std::vector const * const allPatches) + : pkgAcqBaseIndex(Owner, TransactionManager, Target), + patch(patch), allPatches(allPatches), State(StateFetchDiff) +{ + Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); + + Desc.Owner = this; + Description = Target->Description; + Desc.ShortDesc = Target->ShortDesc; + + Desc.URI = Target->URI + ".diff/" + patch.file + ".gz"; + Desc.Description = Description + " " + patch.file + string(".pdiff"); + + DestFile = GetPartialFileNameFromURI(Target->URI + ".diff/" + patch.file); + + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl; + + QueueURI(Desc); +} + /*}}}*/ +void pkgAcqIndexMergeDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/ +{ + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl; - // set Item to complete as the remaining work is all local (verify etc) - I->Complete = true; + Item::Failed(Message,Cnf); + Status = StatDone; - return true; + // check if we are the first to fail, otherwise we are done here + State = StateDoneDiff; + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + if ((*I)->State == StateErrorDiff) + return; + + // first failure means we should fallback + State = StateErrorDiff; + if (Debug) + std::clog << "Falling back to normal index file acquire" << std::endl; + DestFile = GetPartialFileNameFromURI(Target->URI); + RenameOnError(PDiffError); + new pkgAcqIndex(Owner, TransactionManager, Target); } /*}}}*/ -void pkgAcqMetaBase::QueueIndexes(bool verify) /*{{{*/ +void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/ + pkgAcquire::MethodConfig const * const Cnf) { - // at this point the real Items are loaded in the fetcher - ExpectedAdditionalItems = 0; + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl; - vector ::const_iterator Target; - for (Target = IndexTargets->begin(); - Target != IndexTargets->end(); - ++Target) - { - HashStringList ExpectedIndexHashes; - const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey); + Item::Done(Message, Hashes, Cnf); - // optional target that we do not have in the Release file are - // skipped - if (verify == true && Record == NULL && (*Target)->IsOptional()) - continue; + // FIXME: verify download before feeding it to rred + string const FinalFile = GetPartialFileNameFromURI(Target->URI); + + if (State == StateFetchDiff) + { + FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); + class Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - // targets without a hash record are a error when verify is required - if (verify == true && Record == NULL) + if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes) { - Status = StatAuthError; - strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); - return; + // patchfiles are dated, so bad indicates a bad download, so kill it + unlink(DestFile.c_str()); + Failed("Patch has Size/Hashsum mismatch", NULL); + return; } - if (Record) - ExpectedIndexHashes = Record->Hashes; - - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - { - std::cerr << "Queueing: " << (*Target)->URI << std::endl - << "Expected Hash:" << std::endl; - for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs) - std::cerr << "\t- " << hs->toStr() << std::endl; - std::cerr << "For: " << ((Record == NULL) ? "" : Record->MetaKeyFilename) << std::endl; + // rred expects the patch as $FinalFile.ed.$patchname.gz + Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz"); + + // check if this is the last completed diff + State = StateDoneDiff; + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + if ((*I)->State != StateDoneDiff) + { + if(Debug) + std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl; + return; + } + + // this is the last completed diff, so we are ready to apply now + State = StateApplyDiff; + // patching needs to be bootstrapped with the 'old' version + if (symlink(GetFinalFilename().c_str(), FinalFile.c_str()) != 0) + { + Failed("Link creation of " + FinalFile + " to " + GetFinalFilename() + " failed", NULL); + return; } - if (verify == true && ExpectedIndexHashes.empty() == true) + + if(Debug) + std::clog << "Sending to rred method: " << FinalFile << std::endl; + + Local = true; + Desc.URI = "rred:" + FinalFile; + QueueURI(Desc); + SetActiveSubprocess("rred"); + return; + } + // success in download/apply all diffs, clean up + else if (State == StateApplyDiff) + { + // move the result into place + std::string const Final = GetFinalFilename(); + if(Debug) + std::clog << "Queue patched file in place: " << std::endl + << DestFile << " -> " << Final << std::endl; + + // queue for copy by the transaction manager + TransactionManager->TransactionStageCopy(this, DestFile, Final); + + // ensure the ed's are gone regardless of list-cleanup + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) { - Status = StatAuthError; - strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str()); - return; + std::string const PartialFile = GetPartialFileNameFromURI(Target->URI); + std::string patch = PartialFile + ".ed." + (*I)->patch.file + ".gz"; + unlink(patch.c_str()); } + unlink(FinalFile.c_str()); - /* Queue the Index file (Packages, Sources, Translation-$foo - (either diff or full packages files, depending - on the users option) - we also check if the PDiff Index file is listed - in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this - instead, but passing the required info to it is to much hassle */ - if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || - MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) - new pkgAcqDiffIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); - else - new pkgAcqIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); + // all set and done + Complete = true; + if(Debug) + std::clog << "allDone: " << DestFile << "\n" << std::endl; } } /*}}}*/ -bool pkgAcqMetaBase::VerifyVendor(string Message) /*{{{*/ + +// AcqIndex::AcqIndex - Constructor /*{{{*/ +pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target) + : pkgAcqBaseIndex(Owner, TransactionManager, Target) { - string::size_type pos; + // autoselect the compression method + AutoSelectCompression(); + Init(Target->URI, Target->Description, Target->ShortDesc); - // check for missing sigs (that where not fatal because otherwise we had - // bombed earlier) - string missingkeys; - string msg = _("There is no public key available for the " - "following key IDs:\n"); - pos = Message.find("NO_PUBKEY "); - if (pos != std::string::npos) + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgIndex with TransactionManager " + << TransactionManager << std::endl; +} + /*}}}*/ +// AcqIndex::AutoSelectCompression - Select compression /*{{{*/ +void pkgAcqIndex::AutoSelectCompression() +{ + std::vector types = APT::Configuration::getCompressionTypes(); + CompressionExtensions = ""; + if (TransactionManager->MetaIndexParser != NULL && TransactionManager->MetaIndexParser->Exists(Target->MetaKey)) { - string::size_type start = pos+strlen("NO_PUBKEY "); - string Fingerprint = Message.substr(start, Message.find("\n")-start); - missingkeys += (Fingerprint); + for (std::vector::const_iterator t = types.begin(); + t != types.end(); ++t) + { + std::string CompressedMetaKey = string(Target->MetaKey).append(".").append(*t); + if (*t == "uncompressed" || + TransactionManager->MetaIndexParser->Exists(CompressedMetaKey) == true) + CompressionExtensions.append(*t).append(" "); + } } - if(!missingkeys.empty()) - _error->Warning("%s", (msg + missingkeys).c_str()); - - string Transformed = MetaIndexParser->GetExpectedDist(); - - if (Transformed == "../project/experimental") + else { - Transformed = "experimental"; + for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) + CompressionExtensions.append(*t).append(" "); } + if (CompressionExtensions.empty() == false) + CompressionExtensions.erase(CompressionExtensions.end()-1); +} + /*}}}*/ +// AcqIndex::Init - defered Constructor /*{{{*/ +void pkgAcqIndex::Init(string const &URI, string const &URIDesc, + string const &ShortDesc) +{ + Stage = STAGE_DOWNLOAD; - pos = Transformed.rfind('/'); - if (pos != string::npos) - { - Transformed = Transformed.substr(0, pos); - } + DestFile = GetPartialFileNameFromURI(URI); - if (Transformed == ".") + size_t const nextExt = CompressionExtensions.find(' '); + if (nextExt == std::string::npos) { - Transformed = ""; + CurrentCompressionExtension = CompressionExtensions; + CompressionExtensions.clear(); } - - if (_config->FindB("Acquire::Check-Valid-Until", true) == true && - MetaIndexParser->GetValidUntil() > 0) { - time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil(); - if (invalid_since > 0) - { - std::string errmsg; - strprintf(errmsg, - // TRANSLATOR: The first %s is the URL of the bad Release file, the second is - // the time since then the file is invalid - formated in the same way as in - // the download progress display (e.g. 7d 3h 42min 1s) - _("Release file for %s is expired (invalid since %s). " - "Updates for this repository will not be applied."), - RealURI.c_str(), TimeToStr(invalid_since).c_str()); - if (ErrorText.empty()) - ErrorText = errmsg; - return _error->Error("%s", errmsg.c_str()); - } + else + { + CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt); + CompressionExtensions = CompressionExtensions.substr(nextExt+1); } - /* Did we get a file older than what we have? This is a last minute IMS hit and doubles - as a prevention of downgrading us to older (still valid) files */ - if (TransactionManager->IMSHit == false && LastMetaIndexParser != NULL && - LastMetaIndexParser->GetDate() > MetaIndexParser->GetDate()) + if (CurrentCompressionExtension == "uncompressed") { - TransactionManager->IMSHit = true; - unlink(DestFile.c_str()); - PartialFile = DestFile = GetFinalFilename(); - delete MetaIndexParser; - MetaIndexParser = LastMetaIndexParser; - LastMetaIndexParser = NULL; + Desc.URI = URI; } - - if (_config->FindB("Debug::pkgAcquire::Auth", false)) + else if (unlikely(CurrentCompressionExtension.empty())) + return; + else { - std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl; - std::cerr << "Expecting Dist: " << MetaIndexParser->GetExpectedDist() << std::endl; - std::cerr << "Transformed Dist: " << Transformed << std::endl; + Desc.URI = URI + '.' + CurrentCompressionExtension; + DestFile = DestFile + '.' + CurrentCompressionExtension; } - if (MetaIndexParser->CheckDist(Transformed) == false) + if(TransactionManager->MetaIndexParser != NULL) + InitByHashIfNeeded(); + + Desc.Description = URIDesc; + Desc.Owner = this; + Desc.ShortDesc = ShortDesc; + + QueueURI(Desc); +} + /*}}}*/ +// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/ +void pkgAcqIndex::InitByHashIfNeeded() +{ + // TODO: + // - (maybe?) add support for by-hash into the sources.list as flag + // - make apt-ftparchive generate the hashes (and expire?) + std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash"; + if(_config->FindB("APT::Acquire::By-Hash", false) == true || + _config->FindB(HostKnob, false) == true || + TransactionManager->MetaIndexParser->GetSupportsAcquireByHash()) { - // This might become fatal one day -// Status = StatAuthError; -// ErrorText = "Conflicting distribution; expected " -// + MetaIndexParser->GetExpectedDist() + " but got " -// + MetaIndexParser->GetDist(); -// return false; - if (!Transformed.empty()) + HashStringList const Hashes = GetExpectedHashes(); + if(Hashes.usable()) { - _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"), - Desc.Description.c_str(), - Transformed.c_str(), - MetaIndexParser->GetDist().c_str()); + // FIXME: should we really use the best hash here? or a fixed one? + HashString const * const TargetHash = Hashes.find(""); + std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); + size_t const trailing_slash = Desc.URI.find_last_of("/"); + Desc.URI = Desc.URI.replace( + trailing_slash, + Desc.URI.substr(trailing_slash+1).size()+1, + ByHash); + } else { + _error->Warning( + "Fetching ByHash requested but can not find record for %s", + GetMetaKey().c_str()); } } - - return true; } /*}}}*/ -// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/ -void pkgAcqMetaIndex::Failed(string Message, - pkgAcquire::MethodConfig * Cnf) +// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +/* The only header we use is the last-modified header. */ +string pkgAcqIndex::Custom600Headers() const { - pkgAcquire::Item::Failed(Message, Cnf); - Status = StatDone; - - _error->Warning(_("The repository '%s' does not have a Release file. " - "This is deprecated, please contact the owner of the " - "repository."), URIDesc.c_str()); + string Final = GetFinalFilename(); - // No Release file was present so fall - // back to queueing Packages files without verification - // only allow going further if the users explicitely wants it - if(AllowInsecureRepositories(MetaIndexParser, TransactionManager, this) == true) - { - // Done, queue for rename on transaction finished - if (FileExists(DestFile)) - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + string msg = "\nIndex-File: true"; + struct stat Buf; + if (stat(Final.c_str(),&Buf) == 0) + msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); - // queue without any kind of hashsum support - QueueIndexes(false); - } + if(Target->IsOptional()) + msg += "\nFail-Ignore: true"; + + return msg; } /*}}}*/ -void pkgAcqMetaIndex::Finished() /*{{{*/ +// AcqIndex::Failed - getting the indexfile failed /*{{{*/ +void pkgAcqIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) { - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "Finished: " << DestFile <TransactionHasError() == false) - TransactionManager->CommitTransaction(); + Item::Failed(Message,Cnf); + + // authorisation matches will not be fixed by other compression types + if (Status != StatAuthError) + { + if (CompressionExtensions.empty() == false) + { + Init(Target->URI, Desc.Description, Desc.ShortDesc); + Status = StatIdle; + return; + } + } + + if(Target->IsOptional() && GetExpectedHashes().empty() && Stage == STAGE_DOWNLOAD) + Status = StatDone; + else + TransactionManager->AbortTransaction(); } /*}}}*/ -pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/ - string const &URI, string const &URIDesc, string const &ShortDesc, - string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc, - string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc, - const vector* IndexTargets, - indexRecords* MetaIndexParser) : - pkgAcqMetaIndex(Owner, NULL, URI, URIDesc, ShortDesc, MetaSigURI, MetaSigURIDesc,MetaSigShortDesc, IndexTargets, MetaIndexParser), - MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc), - MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc) +// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/ +void pkgAcqIndex::ReverifyAfterIMS() { - // index targets + (worst case:) Release/Release.gpg - ExpectedAdditionalItems = IndexTargets->size() + 2; + // update destfile to *not* include the compression extension when doing + // a reverify (as its uncompressed on disk already) + DestFile = GetCompressedFileName(Target->URI, GetPartialFileNameFromURI(Target->URI), CurrentCompressionExtension); + + // copy FinalFile into partial/ so that we check the hash again + string FinalFile = GetFinalFilename(); + Stage = STAGE_DECOMPRESS_AND_VERIFY; + Desc.URI = "copy:" + FinalFile; + QueueURI(Desc); } /*}}}*/ -pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/ +// AcqIndex::ValidateFile - Validate the content of the downloaded file /*{{{*/ +bool pkgAcqIndex::ValidateFile(const std::string &FileName) { + // FIXME: this can go away once we only ever download stuff that + // has a valid hash and we never do GET based probing + // FIXME2: this also leaks debian-isms into the code and should go therefore + + /* Always validate the index file for correctness (all indexes must + * have a Package field) (LP: #346386) (Closes: #627642) + */ + FileFd fd(FileName, FileFd::ReadOnly, FileFd::Extension); + // Only test for correctness if the content of the file is not empty + // (empty is ok) + if (fd.Size() > 0) + { + pkgTagSection sec; + pkgTagFile tag(&fd); + + // all our current indexes have a field 'Package' in each section + if (_error->PendingError() == true || + tag.Step(sec) == false || + sec.Exists("Package") == false) + return false; + } + return true; } /*}}}*/ -// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/ -#if APT_PKG_ABI >= 413 -string pkgAcqMetaClearSig::Custom600Headers() const -#else -string pkgAcqMetaClearSig::Custom600Headers() -#endif +// AcqIndex::Done - Finished a fetch /*{{{*/ +// --------------------------------------------------------------------- +/* This goes through a number of states.. On the initial fetch the + method could possibly return an alternate filename which points + to the uncompressed version of the file. If this is so the file + is copied into the partial directory. In all other cases the file + is decompressed with a compressed uri. */ +void pkgAcqIndex::Done(string const &Message, + HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cfg) { - string Header = pkgAcqMetaBase::Custom600Headers(); - Header += "\nFail-Ignore: true"; - return Header; + Item::Done(Message,Hashes,Cfg); + + switch(Stage) + { + case STAGE_DOWNLOAD: + StageDownloadDone(Message, Hashes, Cfg); + break; + case STAGE_DECOMPRESS_AND_VERIFY: + StageDecompressDone(Message, Hashes, Cfg); + break; + } } /*}}}*/ -// pkgAcqMetaClearSig::Done - We got a file /*{{{*/ -class APT_HIDDEN DummyItem : public pkgAcquire::Item +// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/ +void pkgAcqIndex::StageDownloadDone(string const &Message, HashStringList const &, + pkgAcquire::MethodConfig const * const) { - std::string URI; - public: - virtual std::string DescURI() {return URI;}; + Complete = true; - DummyItem(pkgAcquire *Owner, std::string const &URI) : pkgAcquire::Item(Owner), URI(URI) + // Handle the unzipd case + string FileName = LookupTag(Message,"Alt-Filename"); + if (FileName.empty() == false) { - Status = StatDone; - DestFile = GetFinalFileNameFromURI(URI); + Stage = STAGE_DECOMPRESS_AND_VERIFY; + Local = true; + DestFile += ".decomp"; + Desc.URI = "copy:" + FileName; + QueueURI(Desc); + SetActiveSubprocess("copy"); + return; } -}; -void pkgAcqMetaClearSig::Done(std::string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf) -{ - Item::Done(Message, Size, Hashes, Cnf); - // if we expect a ClearTextSignature (InRelease), ensure that - // this is what we get and if not fail to queue a - // Release/Release.gpg, see #346386 - if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile)) + FileName = LookupTag(Message,"Filename"); + if (FileName.empty() == true) { - pkgAcquire::Item::Failed(Message, Cnf); - RenameOnError(NotClearsigned); - TransactionManager->AbortTransaction(); - return; + Status = StatError; + ErrorText = "Method gave a blank filename"; } - if(AuthPass == false) + // Methods like e.g. "file:" will give us a (compressed) FileName that is + // not the "DestFile" we set, in this case we uncompress from the local file + if (FileName != DestFile) + Local = true; + else + EraseFileName = FileName; + + // we need to verify the file against the current Release file again + // on if-modfied-since hit to avoid a stale attack against us + if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) { - if(CheckDownloadDone(this, Message, Hashes) == true) - QueueForSignatureVerify(this, DestFile, DestFile); + // The files timestamp matches, reverify by copy into partial/ + EraseFileName = ""; + ReverifyAfterIMS(); return; } - else if(CheckAuthDone(Message) == true) + + // If we have compressed indexes enabled, queue for hash verification + if (_config->FindB("Acquire::GzipIndexes",false)) { - if (TransactionManager->IMSHit == false) - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - else if (RealFileExists(GetFinalFilename()) == false) - { - // We got an InRelease file IMSHit, but we haven't one, which means - // we had a valid Release/Release.gpg combo stepping in, which we have - // to 'acquire' now to ensure list cleanup isn't removing them - new DummyItem(Owner, MetaIndexURI); - new DummyItem(Owner, MetaSigURI); - } + DestFile = GetPartialFileNameFromURI(Target->URI + '.' + CurrentCompressionExtension); + EraseFileName = ""; + Stage = STAGE_DECOMPRESS_AND_VERIFY; + Desc.URI = "copy:" + FileName; + QueueURI(Desc); + SetActiveSubprocess("copy"); + return; + } + + // get the binary name for your used compression type + string decompProg; + if(CurrentCompressionExtension == "uncompressed") + decompProg = "copy"; + else + decompProg = _config->Find(string("Acquire::CompressionTypes::").append(CurrentCompressionExtension),""); + if(decompProg.empty() == true) + { + _error->Error("Unsupported extension: %s", CurrentCompressionExtension.c_str()); + return; } + + // queue uri for the next stage + Stage = STAGE_DECOMPRESS_AND_VERIFY; + DestFile += ".decomp"; + Desc.URI = decompProg + ":" + FileName; + QueueURI(Desc); + SetActiveSubprocess(decompProg); } /*}}}*/ -void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +// AcqIndex::StageDecompressDone - Final verification /*{{{*/ +void pkgAcqIndex::StageDecompressDone(string const &Message, + HashStringList const &, + pkgAcquire::MethodConfig const * const Cfg) { - Item::Failed(Message, Cnf); - - // we failed, we will not get additional items from this method - ExpectedAdditionalItems = 0; - - if (AuthPass == false) + if(!ValidateFile(DestFile)) { - // Queue the 'old' InRelease file for removal if we try Release.gpg - // as otherwise the file will stay around and gives a false-auth - // impression (CVE-2012-0214) - TransactionManager->TransactionStageRemoval(this, GetFinalFilename()); - Status = StatDone; - - new pkgAcqMetaIndex(Owner, TransactionManager, - MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, - MetaSigURI, MetaSigURIDesc, MetaSigShortDesc, - IndexTargets, MetaIndexParser); + RenameOnError(InvalidFormat); + Failed(Message, Cfg); + return; } - else - { - if(CheckStopAuthentication(this, Message)) - return; - _error->Warning(_("The data from '%s' is not signed. Packages " - "from that repository can not be authenticated."), - URIDesc.c_str()); - - // No Release file was present, or verification failed, so fall - // back to queueing Packages files without verification - // only allow going further if the users explicitely wants it - if(AllowInsecureRepositories(MetaIndexParser, TransactionManager, this) == true) - { - Status = StatDone; + // Done, queue for rename on transaction finished + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - /* Always move the meta index, even if gpgv failed. This ensures - * that PackageFile objects are correctly filled in */ - if (FileExists(DestFile)) - { - string FinalFile = GetFinalFilename(); - /* InRelease files become Release files, otherwise - * they would be considered as trusted later on */ - RealURI = RealURI.replace(RealURI.rfind("InRelease"), 9, - "Release"); - FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9, - "Release"); - - // Done, queue for rename on transaction finished - TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); - } - QueueIndexes(false); - } - } + return; } /*}}}*/ + + // AcqArchive::AcqArchive - Constructor /*{{{*/ // --------------------------------------------------------------------- /* This just sets up the initial fetch environment and queues the first possibilitiy */ -pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, - pkgRecords *Recs,pkgCache::VerIterator const &Version, +pkgAcqArchive::pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sources, + pkgRecords * const Recs,pkgCache::VerIterator const &Version, string &StoreFilename) : - Item(Owner, HashStringList()), Version(Version), Sources(Sources), Recs(Recs), - StoreFilename(StoreFilename), Vf(Version.FileList()), + Item(Owner), LocalSource(false), Version(Version), Sources(Sources), Recs(Recs), + StoreFilename(StoreFilename), Vf(Version.FileList()), Trusted(false) { Retries = _config->FindI("Acquire::Retries",0); @@ -2573,15 +2577,17 @@ bool pkgAcqArchive::QueueNext() { for (; Vf.end() == false; ++Vf) { + pkgCache::PkgFileIterator const PkgF = Vf.File(); // Ignore not source sources - if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0) + if ((PkgF->Flags & pkgCache::Flag::NotSource) != 0) continue; // Try to cross match against the source list pkgIndexFile *Index; - if (Sources->FindIndex(Vf.File(),Index) == false) + if (Sources->FindIndex(PkgF, Index) == false) continue; - + LocalSource = (PkgF->Flags & pkgCache::Flag::LocalSource) == pkgCache::Flag::LocalSource; + // only try to get a trusted package from another source if that source // is also trusted if(Trusted && !Index->IsTrusted()) @@ -2681,25 +2687,10 @@ bool pkgAcqArchive::QueueNext() // AcqArchive::Done - Finished fetching /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList const &CalcHashes, - pkgAcquire::MethodConfig *Cfg) +void pkgAcqArchive::Done(string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cfg) { - Item::Done(Message, Size, CalcHashes, Cfg); - - // Check the size - if (Size != Version->Size) - { - RenameOnError(SizeMismatch); - return; - } - - // FIXME: could this empty() check impose *any* sort of security issue? - if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes) - { - RenameOnError(HashSumMismatch); - printHashSumComparision(DestFile, ExpectedHashes, CalcHashes); - return; - } + Item::Done(Message, Hashes, Cfg); // Grab the output filename string FileName = LookupTag(Message,"Filename"); @@ -2726,23 +2717,17 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList Complete = true; } /*}}}*/ -// Acquire::Item::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqArchive::GetFinalFilename() const -{ - return _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename); -} - /*}}}*/ // AcqArchive::Failed - Failure handler /*{{{*/ // --------------------------------------------------------------------- /* Here we try other sources */ -void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcqArchive::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) { Item::Failed(Message,Cnf); - /* We don't really want to retry on failed media swaps, this prevents + /* We don't really want to retry on failed media swaps, this prevents that. An interesting observation is that permanent failures are not recorded. */ - if (Cnf->Removable == true && + if (Cnf->Removable == true && StringToBool(LookupTag(Message,"Transient-Failure"),false) == true) { // Vf = Version.FileList(); @@ -2770,21 +2755,12 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) } } /*}}}*/ -// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/ -// --------------------------------------------------------------------- -#if APT_PKG_ABI >= 413 -APT_PURE bool pkgAcqArchive::IsTrusted() const -#else -APT_PURE bool pkgAcqArchive::IsTrusted() -#endif +APT_PURE bool pkgAcqArchive::IsTrusted() const /*{{{*/ { return Trusted; } /*}}}*/ -// AcqArchive::Finished - Fetching has finished, tidy up /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcqArchive::Finished() +void pkgAcqArchive::Finished() /*{{{*/ { if (Status == pkgAcquire::Item::StatDone && Complete == true) @@ -2792,17 +2768,26 @@ void pkgAcqArchive::Finished() StoreFilename = string(); } /*}}}*/ +std::string pkgAcqArchive::DescURI() const /*{{{*/ +{ + return Desc.URI; +} + /*}}}*/ +std::string pkgAcqArchive::ShortDesc() const /*{{{*/ +{ + return Desc.ShortDesc; +} + /*}}}*/ + // AcqFile::pkgAcqFile - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* The file is added to the queue */ -pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashes, - unsigned long long Size,string Dsc,string ShortDesc, +pkgAcqFile::pkgAcqFile(pkgAcquire * const Owner,string const &URI, HashStringList const &Hashes, + unsigned long long const Size,string const &Dsc,string const &ShortDesc, const string &DestDir, const string &DestFilename, - bool IsIndexFile) : - Item(Owner, Hashes), IsIndexFile(IsIndexFile) + bool const IsIndexFile) : + Item(Owner), IsIndexFile(IsIndexFile), ExpectedHashes(Hashes) { Retries = _config->FindI("Acquire::Retries",0); - + if(!DestFilename.empty()) DestFile = DestFilename; else if(!DestDir.empty()) @@ -2817,7 +2802,7 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashe // Set the short description to the archive component Desc.ShortDesc = ShortDesc; - + // Get the transfer sizes FileSize = Size; struct stat Buf; @@ -2834,21 +2819,11 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashe } /*}}}*/ // AcqFile::Done - Item downloaded OK /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList const &CalcHashes, - pkgAcquire::MethodConfig *Cnf) +void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes, + pkgAcquire::MethodConfig const * const Cnf) { - Item::Done(Message,Size,CalcHashes,Cnf); + Item::Done(Message,CalcHashes,Cnf); - // Check the hash - if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes) - { - RenameOnError(HashSumMismatch); - printHashSumComparision(DestFile, ExpectedHashes, CalcHashes); - return; - } - string FileName = LookupTag(Message,"Filename"); if (FileName.empty() == true) { @@ -2858,11 +2833,11 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons } Complete = true; - + // The files timestamp matches if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) return; - + // We have to copy it into place if (FileName != DestFile) { @@ -2874,7 +2849,7 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons QueueURI(Desc); return; } - + // Erase the file if it is a symlink so we can overwrite it struct stat St; if (lstat(DestFile.c_str(),&St) == 0) @@ -2882,7 +2857,7 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons if (S_ISLNK(St.st_mode) != 0) unlink(DestFile.c_str()); } - + // Symlink the file if (symlink(FileName.c_str(),DestFile.c_str()) != 0) { @@ -2894,14 +2869,14 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons ErrorText = msg.str(); Status = StatError; Complete = false; - } + } } } /*}}}*/ // AcqFile::Failed - Failure handler /*{{{*/ // --------------------------------------------------------------------- /* Here we try other sources */ -void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcqFile::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf) { Item::Failed(Message,Cnf); @@ -2918,14 +2893,7 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf) } /*}}}*/ -// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -#if APT_PKG_ABI >= 413 -string pkgAcqFile::Custom600Headers() const -#else -string pkgAcqFile::Custom600Headers() -#endif +string pkgAcqFile::Custom600Headers() const /*{{{*/ { if (IsIndexFile) return "\nIndex-File: true"; diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 07c86f31b..97d5ea1dd 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -6,15 +6,15 @@ Acquire Item - Item to acquire When an item is instantiated it will add it self to the local list in - the Owner Acquire class. Derived classes will then call QueueURI to - register all the URI's they wish to fetch at the initial moment. - + the Owner Acquire class. Derived classes will then call QueueURI to + register all the URI's they wish to fetch at the initial moment. + Three item classes are provided to provide functionality for downloading of Index, Translation and Packages files. - + A Archive class is provided for downloading .deb files. It does Hash checking and source location as well as a retry algorithm. - + ##################################################################### */ /*}}}*/ #ifndef PKGLIB_ACQUIRE_ITEM_H @@ -49,7 +49,48 @@ class pkgSourceList; class IndexTarget; class pkgAcqMetaBase; -/** \brief Represents the process by which a pkgAcquire object should {{{ +class APT_HIDDEN IndexTarget /*{{{*/ +/** \brief Information about an index file. */ +{ + public: + /** \brief A URI from which the index file can be downloaded. */ + std::string const URI; + + /** \brief A description of the index file. */ + std::string const Description; + + /** \brief A shorter description of the index file. */ + std::string const ShortDesc; + + /** \brief The key by which this index file should be + * looked up within the meta signature file. + */ + std::string const MetaKey; + + virtual bool IsOptional() const { + return false; + } + + IndexTarget(std::string const &MetaKey, std::string const &ShortDesc, + std::string const &LongDesc, std::string const &URI) : + URI(URI), Description(LongDesc), ShortDesc(ShortDesc), MetaKey(MetaKey) {} +}; + /*}}}*/ +class APT_HIDDEN OptionalIndexTarget : public IndexTarget /*{{{*/ +/** \brief Information about an optional index file. */ +{ + public: + virtual bool IsOptional() const { + return true; + } + + OptionalIndexTarget(std::string const &MetaKey, std::string const &ShortDesc, + std::string const &LongDesc, std::string const &URI) : + IndexTarget(MetaKey, ShortDesc, LongDesc, URI) {} +}; + /*}}}*/ +class pkgAcquire::Item : public WeakPointable /*{{{*/ +/** \brief Represents the process by which a pkgAcquire object should * retrieve a file or a collection of files. * * By convention, Item subclasses should insert themselves into the @@ -61,46 +102,7 @@ class pkgAcqMetaBase; * * \see pkgAcquire */ -class pkgAcquire::Item : public WeakPointable -{ - friend class pkgAcqMetaBase; - - void *d; - - protected: - - /** \brief The acquire object with which this item is associated. */ - pkgAcquire *Owner; - - /** \brief Insert this item into its owner's queue. - * - * The method is designed to check if the request would end - * in an IMSHit and if it determines that it would, it isn't - * queueing the Item and instead sets it to completion instantly. - * - * \param Item Metadata about this item (its URI and - * description). - * \return true if the item was inserted, false if IMSHit was detected - */ - virtual bool QueueURI(ItemDesc &Item); - - /** \brief Remove this item from its owner's queue. */ - void Dequeue(); - - /** \brief Rename a file without modifying its timestamp. - * - * Many item methods call this as their final action. - * - * \param From The file to be renamed. - * - * \param To The new name of \a From. If \a To exists it will be - * overwritten. - */ - bool Rename(std::string From,std::string To); - - /** \brief Get the full pathname of the final file for the current URI */ - virtual std::string GetFinalFilename() const; - +{ public: /** \brief The current status of this item. */ @@ -125,7 +127,7 @@ class pkgAcquire::Item : public WeakPointable */ StatAuthError, - /** \brief The item was could not be downloaded because of + /** \brief The item was could not be downloaded because of * a transient network error (e.g. network down) */ StatTransientNetworkError, @@ -153,11 +155,11 @@ class pkgAcquire::Item : public WeakPointable std::string ActiveSubprocess; /** \brief A client-supplied unique identifier. - * + * * This field is initalized to 0; it is meant to be filled in by * clients that wish to use it to uniquely identify items. * - * \todo it's unused in apt itself + * APT progress reporting will store an ID there as shown in "Get:42 …" */ unsigned long ID; @@ -173,6 +175,7 @@ class pkgAcquire::Item : public WeakPointable * download progress indicator's overall statistics. */ bool Local; + std::string UsedMirror; /** \brief The number of fetch queues into which this item has been @@ -185,9 +188,6 @@ class pkgAcquire::Item : public WeakPointable */ unsigned int QueueCounter; - /** \brief TransactionManager */ - pkgAcqMetaBase *TransactionManager; - /** \brief The number of additional fetch items that are expected * once this item is done. * @@ -197,15 +197,12 @@ class pkgAcquire::Item : public WeakPointable * progress. */ unsigned int ExpectedAdditionalItems; - + /** \brief The name of the file into which the retrieved object * will be written. */ std::string DestFile; - /** \brief storge name until a transaction is finished */ - std::string PartialFile; - /** \brief Invoked by the acquire worker when the object couldn't * be fetched. * @@ -219,7 +216,7 @@ class pkgAcquire::Item : public WeakPointable * * \sa pkgAcqMethod */ - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); /** \brief Invoked by the acquire worker when the object was * fetched successfully. @@ -234,25 +231,24 @@ class pkgAcquire::Item : public WeakPointable * * \param Message Data from the acquire method. Use LookupTag() * to parse it. - * \param Size The size of the object that was fetched. * \param Hashes The HashSums of the object that was fetched. * \param Cnf The method via which the object was fetched. * * \sa pkgAcqMethod */ - virtual void Done(std::string Message, unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); /** \brief Invoked when the worker starts to fetch this object. * * \param Message RFC822-formatted data from the worker process. * Use LookupTag() to parse it. * - * \param Size The size of the object being fetched. + * \param Hashes The expected hashes of the object being fetched. * * \sa pkgAcqMethod */ - virtual void Start(std::string Message,unsigned long long Size); + virtual void Start(std::string const &Message, unsigned long long const Size); /** \brief Custom headers to be sent to the fetch process. * @@ -262,61 +258,55 @@ class pkgAcquire::Item : public WeakPointable * line, so they should (if nonempty) have a leading newline and * no trailing newline. */ -#if APT_PKG_ABI >= 413 - virtual std::string Custom600Headers() const {return std::string();}; -#else - virtual std::string Custom600Headers() {return std::string();}; -#endif + virtual std::string Custom600Headers() const; /** \brief A "descriptive" URI-like string. * * \return a URI that should be used to describe what is being fetched. */ - virtual std::string DescURI() = 0; + virtual std::string DescURI() const = 0; /** \brief Short item description. * * \return a brief description of the object being fetched. */ - virtual std::string ShortDesc() {return DescURI();} + virtual std::string ShortDesc() const; /** \brief Invoked by the worker when the download is completely done. */ - virtual void Finished() {}; - - /** \brief HashSums + virtual void Finished(); + + /** \return HashSums the DestFile is supposed to have in this stage */ + virtual HashStringList GetExpectedHashes() const = 0; + /** \return the 'best' hash for display proposes like --print-uris */ + std::string HashSum() const; + + /** \return if having no hashes is a hard failure or not * - * \return the HashSums of this object, if applicable; otherwise, an - * empty list. + * Idealy this is always \b true for every subclass, but thanks to + * historical grow we don't have hashes for all files in all cases + * in all steps, so it is slightly more complicated than it should be. */ - HashStringList HashSums() const {return ExpectedHashes;}; - std::string HashSum() const {HashStringList const hashes = HashSums(); HashString const * const hs = hashes.find(NULL); return hs != NULL ? hs->toStr() : ""; }; + virtual bool HashesRequired() const { return true; } /** \return the acquire process with which this item is associated. */ - pkgAcquire *GetOwner() const {return Owner;}; -#if APT_PKG_ABI < 413 - pkgAcquire *GetOwner() {return Owner;}; -#endif + pkgAcquire *GetOwner() const; /** \return \b true if this object is being fetched from a trusted source. */ -#if APT_PKG_ABI >= 413 - virtual bool IsTrusted() const {return false;}; -#else - virtual bool IsTrusted() {return false;}; -#endif - + virtual bool IsTrusted() const; + /** \brief Report mirror problem - * + * * This allows reporting mirror failures back to a centralized * server. The apt-report-mirror-failure script is called for this - * + * * \param FailCode A short failure string that is send */ - void ReportMirrorFailure(std::string FailCode); + void ReportMirrorFailure(std::string const &FailCode); /** \brief Set the name of the current active subprocess * * See also #ActiveSubprocess */ - void SetActiveSubprocess(const std::string &subprocess); + void SetActiveSubprocess(std::string const &subprocess); /** \brief Initialize an item. * @@ -325,11 +315,8 @@ class pkgAcquire::Item : public WeakPointable * manually invoke QueueURI() to do so). * * \param Owner The new owner of this item. - * \param ExpectedHashes of the file represented by this item */ - Item(pkgAcquire *Owner, - HashStringList const &ExpectedHashes=HashStringList(), - pkgAcqMetaBase *TransactionManager=NULL); + Item(pkgAcquire * const Owner); /** \brief Remove this item from its owner's queue by invoking * pkgAcquire::Remove. @@ -337,6 +324,11 @@ class pkgAcquire::Item : public WeakPointable virtual ~Item(); protected: + /** \brief The acquire object with which this item is associated. */ + pkgAcquire * const Owner; + + /** \brief The item that is currently being downloaded. */ + pkgAcquire::ItemDesc Desc; enum RenameOnErrorState { HashSumMismatch, @@ -354,63 +346,99 @@ class pkgAcquire::Item : public WeakPointable */ bool RenameOnError(RenameOnErrorState const state); + /** \brief Insert this item into its owner's queue. + * + * The method is designed to check if the request would end + * in an IMSHit and if it determines that it would, it isn't + * queueing the Item and instead sets it to completion instantly. + * + * \param Item Metadata about this item (its URI and + * description). + * \return true if the item was inserted, false if IMSHit was detected + */ + virtual bool QueueURI(ItemDesc &Item); + + /** \brief Remove this item from its owner's queue. */ + void Dequeue(); + + /** \brief Rename a file without modifying its timestamp. + * + * Many item methods call this as their final action. + * + * \param From The file to be renamed. + * + * \param To The new name of \a From. If \a To exists it will be + * overwritten. If \a From and \a To are equal nothing happens. + */ + bool Rename(std::string const &From, std::string const &To); + + /** \brief Get the full pathname of the final file for the current URI */ + virtual std::string GetFinalFilename() const; + + private: + void *d; + + friend class pkgAcqMetaBase; +}; + /*}}}*/ +class APT_HIDDEN pkgAcqTransactionItem: public pkgAcquire::Item /*{{{*/ +/** \brief baseclass for the indexes files to manage them all together */ +{ + protected: + IndexTarget const * const Target; + HashStringList GetExpectedHashesFor(std::string const MetaKey) const; + + bool QueueURI(pkgAcquire::ItemDesc &Item); + + public: + /** \brief storge name until a transaction is finished */ + std::string PartialFile; + + /** \brief TransactionManager */ + pkgAcqMetaBase * const TransactionManager; + enum TransactionStates { TransactionCommit, TransactionAbort, }; virtual bool TransactionState(TransactionStates const state); - /** \brief The HashSums of the item is supposed to have than done */ - HashStringList ExpectedHashes; + virtual std::string DescURI() const { return Target->URI; } + virtual HashStringList GetExpectedHashes() const; + virtual std::string GetMetaKey() const; + virtual bool HashesRequired() const; - /** \brief The item that is currently being downloaded. */ - pkgAcquire::ItemDesc Desc; -}; - /*}}}*/ -/** \brief Information about an index patch (aka diff). */ /*{{{*/ -struct APT_HIDDEN DiffInfo { - /** The filename of the diff. */ - std::string file; - /** The hashes of the diff */ - HashStringList result_hashes; - - /** The hashes of the file after the diff is applied */ - HashStringList patch_hashes; + pkgAcqTransactionItem(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, IndexTarget const * const Target); + virtual ~pkgAcqTransactionItem(); - /** The size of the file after the diff is applied */ - unsigned long long result_size; - - /** The size of the diff itself */ - unsigned long long patch_size; + friend class pkgAcqMetaBase; }; /*}}}*/ -class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ +class APT_HIDDEN pkgAcqMetaBase : public pkgAcqTransactionItem /*{{{*/ +/** \brief the manager of a transaction */ { void *d; protected: - std::vector Transaction; + std::vector Transaction; + IndexTarget const DataTarget; + public: /** \brief A package-system-specific parser for the meta-index file. */ indexRecords *MetaIndexParser; indexRecords *LastMetaIndexParser; + protected: /** \brief The index files which should be looked up in the meta-index * and then downloaded. */ - const std::vector* IndexTargets; + const std::vector* const IndexTargets; /** \brief If \b true, the index's signature is currently being verified. */ bool AuthPass; - /** \brief The URI of the signature file. Unlike Desc.URI, this is - * never modified; it is used to determine the file that is being - * downloaded. - */ - std::string RealURI; - /** \brief Starts downloading the individual index files. * * \param verify If \b true, only indices whose expected hashsum @@ -419,7 +447,7 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ * #StatAuthError if there is a mismatch). If verify is \b false, * no hashsum checking will be performed. */ - void QueueIndexes(bool verify); + void QueueIndexes(bool const verify); /** \brief Called when a file is finished being retrieved. * @@ -430,16 +458,12 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ * \param Message The message block received from the fetch * subprocess. */ - bool CheckDownloadDone(pkgAcquire::Item * const I, const std::string &Message, HashStringList const &Hashes) const; + bool CheckDownloadDone(pkgAcqTransactionItem * const I, const std::string &Message, HashStringList const &Hashes) const; /** \brief Queue the downloaded Signature for verification */ - void QueueForSignatureVerify(pkgAcquire::Item * const I, std::string const &File, std::string const &Signature); + void QueueForSignatureVerify(pkgAcqTransactionItem * const I, std::string const &File, std::string const &Signature); -#if APT_PKG_ABI >= 413 virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif /** \brief Called when authentication succeeded. * @@ -450,7 +474,7 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ * \param Message The message block received from the fetch * subprocess. */ - bool CheckAuthDone(std::string Message); + bool CheckAuthDone(std::string const &Message); /** Check if the current item should fail at this point */ bool CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message); @@ -460,7 +484,7 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ * * \return \b true if no fatal errors were encountered. */ - bool VerifyVendor(std::string Message); + bool VerifyVendor(std::string const &Message); virtual bool TransactionState(TransactionStates const state); @@ -468,33 +492,32 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ // This refers more to the Transaction-Manager than the actual file bool IMSHit; - virtual std::string DescURI() {return RealURI; }; virtual bool QueueURI(pkgAcquire::ItemDesc &Item); + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; // transaction code - void Add(Item *I); + void Add(pkgAcqTransactionItem * const I); void AbortTransaction(); - bool TransactionHasError() APT_PURE; + bool TransactionHasError() const; void CommitTransaction(); /** \brief Stage (queue) a copy action when the transaction is committed */ - void TransactionStageCopy(Item *I, - const std::string &From, + void TransactionStageCopy(pkgAcqTransactionItem * const I, + const std::string &From, const std::string &To); /** \brief Stage (queue) a removal action when the transaction is committed */ - void TransactionStageRemoval(Item *I, const std::string &FinalFile); + void TransactionStageRemoval(pkgAcqTransactionItem * const I, const std::string &FinalFile); /** \brief Get the full pathname of the final file for the current URI */ virtual std::string GetFinalFilename() const; - pkgAcqMetaBase(pkgAcquire *Owner, - const std::vector* IndexTargets, - indexRecords* MetaIndexParser, - std::string const &RealURI, - HashStringList const &ExpectedHashes=HashStringList(), - pkgAcqMetaBase *TransactionManager=NULL); + pkgAcqMetaBase(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + std::vector const * const IndexTargets, + IndexTarget const &DataTarget, + indexRecords* const MetaIndexParser); }; /*}}}*/ /** \brief An item that is responsible for downloading the meta-index {{{ @@ -512,36 +535,24 @@ class APT_HIDDEN pkgAcqMetaIndex : public pkgAcqMetaBase void *d; protected: - std::string URIDesc; - std::string ShortDesc; - - /** \brief The URI of the meta-index file for the detached signature */ - std::string MetaIndexSigURI; - - /** \brief A "URI-style" description of the meta-index file */ - std::string MetaIndexSigURIDesc; - - /** \brief A brief description of the meta-index file */ - std::string MetaIndexSigShortDesc; + IndexTarget const DetachedSigTarget; /** \brief delayed constructor */ - void Init(std::string URIDesc, std::string ShortDesc); - + void Init(std::string const &URIDesc, std::string const &ShortDesc); + public: + virtual std::string DescURI() const; // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); virtual void Finished(); /** \brief Create a new pkgAcqMetaIndex. */ - pkgAcqMetaIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - std::string URI,std::string URIDesc, std::string ShortDesc, - std::string MetaIndexSigURI, std::string MetaIndexSigURIDesc, std::string MetaIndexSigShortDesc, - const std::vector* IndexTargets, - indexRecords* MetaIndexParser); + pkgAcqMetaIndex(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const &DataTarget, IndexTarget const &DetachedSigTarget, + const std::vector* const IndexTargets, indexRecords * const MetaIndexParser); friend class pkgAcqMetaSig; }; @@ -554,7 +565,7 @@ class APT_HIDDEN pkgAcqMetaIndex : public pkgAcqMetaBase * * \sa pkgAcqMetaIndex */ -class APT_HIDDEN pkgAcqMetaSig : public pkgAcquire::Item +class APT_HIDDEN pkgAcqMetaSig : public pkgAcqTransactionItem { void *d; @@ -565,29 +576,20 @@ class APT_HIDDEN pkgAcqMetaSig : public pkgAcquire::Item protected: - /** \brief Long URI description used in the acquire system */ - std::string URIDesc; - - /** \brief URI used to get the file */ - std::string RealURI; - /** \brief Get the full pathname of the final file for the current URI */ virtual std::string GetFinalFilename() const; public: - virtual std::string DescURI() {return RealURI;}; + virtual bool HashesRequired() const { return false; } // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); /** \brief Create a new pkgAcqMetaSig. */ - pkgAcqMetaSig(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - std::string const &URI,std::string const &URIDesc, - std::string const &ShortDesc, pkgAcqMetaIndex * const MetaIndex); + pkgAcqMetaSig(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, IndexTarget const * const Target, + pkgAcqMetaIndex * const MetaIndex); virtual ~pkgAcqMetaSig(); }; /*}}}*/ @@ -596,78 +598,37 @@ class APT_HIDDEN pkgAcqMetaClearSig : public pkgAcqMetaIndex { void *d; - /** \brief The URI of the meta-index file for the detached signature */ - std::string MetaIndexURI; - - /** \brief A "URI-style" description of the meta-index file */ - std::string MetaIndexURIDesc; - - /** \brief A brief description of the meta-index file */ - std::string MetaIndexShortDesc; - - /** \brief The URI of the detached meta-signature file if the clearsigned one failed. */ - std::string MetaSigURI; - - /** \brief A "URI-style" description of the meta-signature file */ - std::string MetaSigURIDesc; - - /** \brief A brief description of the meta-signature file */ - std::string MetaSigShortDesc; + IndexTarget const ClearsignedTarget; + IndexTarget const DetachedDataTarget; + IndexTarget const DetachedSigTarget; public: - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); -#if APT_PKG_ABI >= 413 + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif - virtual void Done(std::string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); /** \brief Create a new pkgAcqMetaClearSig. */ - pkgAcqMetaClearSig(pkgAcquire *Owner, - std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc, - std::string const &MetaIndexURI, std::string const &MetaIndexURIDesc, std::string const &MetaIndexShortDesc, - std::string const &MetaSigURI, std::string const &MetaSigURIDesc, std::string const &MetaSigShortDesc, - const std::vector* IndexTargets, - indexRecords* MetaIndexParser); + pkgAcqMetaClearSig(pkgAcquire * const Owner, + IndexTarget const &ClearsignedTarget, + IndexTarget const &DetachedDataTarget, + IndexTarget const &DetachedSigTarget, + std::vector const * const IndexTargets, + indexRecords * const MetaIndexParser); virtual ~pkgAcqMetaClearSig(); }; /*}}}*/ -/** \brief Common base class for all classes that deal with fetching {{{ - indexes - */ -class pkgAcqBaseIndex : public pkgAcquire::Item +/** \brief Common base class for all classes that deal with fetching indexes {{{*/ +class APT_HIDDEN pkgAcqBaseIndex : public pkgAcqTransactionItem { void *d; - protected: - /** \brief Pointer to the IndexTarget data - */ - const struct IndexTarget * Target; - - /** \brief Pointer to the indexRecords parser */ - indexRecords *MetaIndexParser; - - /** \brief The MetaIndex Key */ - std::string MetaKey; - - /** \brief The URI of the index file to recreate at our end (either - * by downloading it or by applying partial patches). - */ - std::string RealURI; - - bool VerifyHashByMetaKey(HashStringList const &Hashes); - + public: /** \brief Get the full pathname of the final file for the current URI */ virtual std::string GetFinalFilename() const; - pkgAcqBaseIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser); + pkgAcqBaseIndex(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target); }; /*}}}*/ /** \brief An item that is responsible for fetching an index file of {{{ @@ -705,15 +666,12 @@ class APT_HIDDEN pkgAcqDiffIndex : public pkgAcqBaseIndex virtual bool TransactionState(TransactionStates const state); public: // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return RealURI + "Index";}; -#if APT_PKG_ABI >= 413 + virtual void Failed(std::string const &Message, pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const {return Target->URI + "Index";}; virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif + virtual std::string GetMetaKey() const; /** \brief Parse the Index file for a set of Packages diffs. * @@ -725,7 +683,7 @@ class APT_HIDDEN pkgAcqDiffIndex : public pkgAcqBaseIndex * \return \b true if the Index file was successfully parsed, \b * false otherwise. */ - bool ParseDiffIndex(std::string IndexDiffFile); + bool ParseDiffIndex(std::string const &IndexDiffFile); /** \brief Create a new pkgAcqDiffIndex. * @@ -736,18 +694,30 @@ class APT_HIDDEN pkgAcqDiffIndex : public pkgAcqBaseIndex * \param URIDesc A long description of the list file to download. * * \param ShortDesc A short description of the list file to download. - * - * \param ExpectedHashes The list file's hashsums which are expected. */ - pkgAcqDiffIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser); + pkgAcqDiffIndex(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target); private: APT_HIDDEN void QueueOnIMSHit() const; }; /*}}}*/ +struct APT_HIDDEN DiffInfo { /*{{{*/ + /** The filename of the diff. */ + std::string file; + + /** The hashes of the diff */ + HashStringList result_hashes; + + /** The hashes of the file after the diff is applied */ + HashStringList patch_hashes; + + /** The size of the file after the diff is applied */ + unsigned long long result_size; + + /** The size of the diff itself */ + unsigned long long patch_size; +}; + /*}}}*/ /** \brief An item that is responsible for fetching client-merge patches {{{ * that need to be applied to a given package index file. * @@ -801,10 +771,12 @@ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex * This method will fall back to downloading the whole index file * outright; its arguments are ignored. */ - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return RealURI + "Index";}; + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const {return Target->URI + "Index";}; + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; /** \brief Create an index merge-diff item. * @@ -817,22 +789,15 @@ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex * * \param ShortDesc A brief description of this item. * - * \param ExpectedHashes The expected md5sum of the completely - * reconstructed package index file; the index file will be tested - * against this value when it is entirely reconstructed. - * * \param patch contains infos about the patch this item is supposed * to download which were read from the index * * \param allPatches contains all related items so that each item can * check if it was the last one to complete the download step */ - pkgAcqIndexMergeDiffs(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHash, - indexRecords *MetaIndexParser, - DiffInfo const &patch, + pkgAcqIndexMergeDiffs(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + DiffInfo const &patch, std::vector const * const allPatches); }; /*}}}*/ @@ -875,7 +840,7 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex * \param allDone If \b true, the file was entirely reconstructed, * and its md5sum is verified. */ - APT_HIDDEN void Finish(bool allDone=false); + APT_HIDDEN void Finish(bool const allDone=false); protected: @@ -905,26 +870,25 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex /** \brief The diff is currently being fetched. */ StateFetchDiff, - - /** \brief The diff is currently being uncompressed. */ - StateUnzipDiff, // FIXME: No longer used /** \brief The diff is currently being applied. */ StateApplyDiff } State; public: - + /** \brief Called when the patch file failed to be downloaded. * * This method will fall back to downloading the whole index file * outright; its arguments are ignored. */ - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return RealURI + "IndexDiffs";}; + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const {return Target->URI + "IndexDiffs";}; + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; /** \brief Create an index diff item. * @@ -940,20 +904,13 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex * * \param ShortDesc A brief description of this item. * - * \param ExpectedHashes The expected hashsums of the completely - * reconstructed package index file; the index file will be tested - * against this value when it is entirely reconstructed. - * * \param diffs The remaining diffs from the index of diffs. They * should be ordered so that each diff appears before any diff * that depends on it. */ - pkgAcqIndexDiffs(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHash, - indexRecords *MetaIndexParser, - std::vector diffs=std::vector()); + pkgAcqIndexDiffs(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + std::vector const &diffs=std::vector()); }; /*}}}*/ /** \brief An acquire item that is responsible for fetching an index {{{ @@ -981,16 +938,16 @@ class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex AllStages Stage; /** \brief Handle what needs to be done when the download is done */ - void StageDownloadDone(std::string Message, + void StageDownloadDone(std::string const &Message, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg); + pkgAcquire::MethodConfig const * const Cfg); /** \brief Handle what needs to be done when the decompression/copy is * done */ - void StageDecompressDone(std::string Message, + void StageDecompressDone(std::string const &Message, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg); + pkgAcquire::MethodConfig const * const Cfg); /** \brief If \b set, this partially downloaded file will be * removed when the download completes. @@ -1006,7 +963,7 @@ class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex std::string CurrentCompressionExtension; /** \brief Do the changes needed to fetch via AptByHash (if needed) */ - void InitByHashIfNeeded(const std::string MetaKey); + void InitByHashIfNeeded(); /** \brief Auto select the right compression to use */ void AutoSelectCompression(); @@ -1015,7 +972,7 @@ class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex void ReverifyAfterIMS(); /** \brief Validate the downloaded index file */ - bool ValidateFile(const std::string &FileName); + bool ValidateFile(std::string const &FileName); /** \brief Get the full pathname of the final file for the current URI */ virtual std::string GetFinalFilename() const; @@ -1024,82 +981,20 @@ class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex public: // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); -#if APT_PKG_ABI >= 413 + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif - virtual std::string DescURI() {return Desc.URI;}; + virtual std::string DescURI() const {return Desc.URI;}; + virtual std::string GetMetaKey() const; + + pkgAcqIndex(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target); - /** \brief Create a pkgAcqIndex. - * - * \param Owner The pkgAcquire object with which this item is - * associated. - * - * \param URI The URI of the index file that is to be downloaded. - * - * \param URIDesc A "URI-style" description of this index file. - * - * \param ShortDesc A brief description of this index file. - * - * \param ExpectedHashes The expected hashsum of this index file. - * - * \param compressExt The compression-related extension with which - * this index file should be downloaded, or "" to autodetect - * Compression types can be set with config Acquire::CompressionTypes, - * default is ".lzma" or ".bz2" (if the needed binaries are present) - * fallback is ".gz" or none. - */ - pkgAcqIndex(pkgAcquire *Owner,std::string URI,std::string URIDesc, - std::string ShortDesc, HashStringList const &ExpectedHashes); - pkgAcqIndex(pkgAcquire *Owner, pkgAcqMetaBase *TransactionManager, - IndexTarget const * const Target, - HashStringList const &ExpectedHash, - indexRecords *MetaIndexParser); - void Init(std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc); }; /*}}}*/ -/** \brief Information about an index file. */ /*{{{*/ -class APT_HIDDEN IndexTarget -{ - void *d; - - public: - /** \brief A URI from which the index file can be downloaded. */ - std::string URI; - - /** \brief A description of the index file. */ - std::string Description; - - /** \brief A shorter description of the index file. */ - std::string ShortDesc; - - /** \brief The key by which this index file should be - * looked up within the meta signature file. - */ - std::string MetaKey; - - virtual bool IsOptional() const { - return false; - } -}; - /*}}}*/ -/** \brief Information about an optional index file. */ /*{{{*/ -class APT_HIDDEN OptionalIndexTarget : public IndexTarget -{ - void *d; - - virtual bool IsOptional() const { - return true; - } -}; - /*}}}*/ /** \brief An item that is responsible for fetching a package file. {{{ * * If the package file already exists in the cache, nothing will be @@ -1109,6 +1004,9 @@ class pkgAcqArchive : public pkgAcquire::Item { void *d; + bool LocalSource; + HashStringList ExpectedHashes; + protected: /** \brief The package version being fetched. */ pkgCache::VerIterator Version; @@ -1141,7 +1039,7 @@ class pkgAcqArchive : public pkgAcquire::Item /** \brief \b true if this version file is being downloaded from a * trusted source. */ - bool Trusted; + bool Trusted; /** \brief Queue up the next available file for this version. */ bool QueueNext(); @@ -1151,17 +1049,15 @@ class pkgAcqArchive : public pkgAcquire::Item public: - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return Desc.URI;}; - virtual std::string ShortDesc() {return Desc.ShortDesc;}; + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const; + virtual std::string ShortDesc() const; virtual void Finished(); -#if APT_PKG_ABI >= 413 virtual bool IsTrusted() const; -#else - virtual bool IsTrusted(); -#endif + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; /** \brief Create a new pkgAcqArchive. * @@ -1181,8 +1077,8 @@ class pkgAcqArchive : public pkgAcquire::Item * basename in the constructor, and filled in with a fully * qualified filename once the download finishes. */ - pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, - pkgRecords *Recs,pkgCache::VerIterator const &Version, + pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sources, + pkgRecords * const Recs,pkgCache::VerIterator const &Version, std::string &StoreFilename); }; /*}}}*/ @@ -1200,22 +1096,21 @@ class pkgAcqFile : public pkgAcquire::Item * Acquire::Retries. */ unsigned int Retries; - + /** \brief Should this file be considered a index file */ bool IsIndexFile; + HashStringList const ExpectedHashes; public: - + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; + // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &CalcHashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return Desc.URI;}; -#if APT_PKG_ABI >= 413 + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &CalcHashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const {return Desc.URI;}; virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif /** \brief Create a new pkgAcqFile object. * @@ -1248,10 +1143,10 @@ class pkgAcqFile : public pkgAcquire::Item * is the absolute name to which the file should be downloaded. */ - pkgAcqFile(pkgAcquire *Owner, std::string URI, HashStringList const &Hashes, unsigned long long Size, - std::string Desc, std::string ShortDesc, - const std::string &DestDir="", const std::string &DestFilename="", - bool IsIndexFile=false); + pkgAcqFile(pkgAcquire * const Owner, std::string const &URI, HashStringList const &Hashes, unsigned long long const Size, + std::string const &Desc, std::string const &ShortDesc, + std::string const &DestDir="", std::string const &DestFilename="", + bool const IsIndexFile=false); }; /*}}}*/ /** @} */ diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc index c29ef469e..b77096efd 100644 --- a/apt-pkg/acquire-method.cc +++ b/apt-pkg/acquire-method.cc @@ -376,7 +376,10 @@ int pkgAcqMethod::Run(bool Single) Tmp->ExpectedHashes.push_back(HashString(*t, hash)); } char *End; - Tmp->MaximumSize = strtoll(LookupTag(Message, "Maximum-Size", "0").c_str(), &End, 10); + if (Tmp->ExpectedHashes.FileSize() > 0) + Tmp->MaximumSize = Tmp->ExpectedHashes.FileSize(); + else + Tmp->MaximumSize = strtoll(LookupTag(Message, "Maximum-Size", "0").c_str(), &End, 10); Tmp->Next = 0; // Append it to the list diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index 9254e20a3..099a1f87d 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -55,8 +55,8 @@ pkgAcquire::Worker::Worker(Queue *Q,MethodConfig *Cnf, CurrentItem = 0; TotalSize = 0; CurrentSize = 0; - - Construct(); + + Construct(); } /*}}}*/ // Worker::Worker - Constructor for method config startup /*{{{*/ @@ -70,8 +70,8 @@ pkgAcquire::Worker::Worker(MethodConfig *Cnf) CurrentItem = 0; TotalSize = 0; CurrentSize = 0; - - Construct(); + + Construct(); } /*}}}*/ // Worker::Construct - Constructor helper /*{{{*/ @@ -136,7 +136,7 @@ bool pkgAcquire::Worker::Start() } for (int I = 0; I != 4; I++) SetCloseExec(Pipes[I],true); - + // Fork off the process Process = ExecFork(); if (Process == 0) @@ -145,9 +145,9 @@ bool pkgAcquire::Worker::Start() dup2(Pipes[1],STDOUT_FILENO); dup2(Pipes[2],STDIN_FILENO); SetCloseExec(STDOUT_FILENO,false); - SetCloseExec(STDIN_FILENO,false); + SetCloseExec(STDIN_FILENO,false); SetCloseExec(STDERR_FILENO,false); - + const char *Args[2]; Args[0] = Method.c_str(); Args[1] = 0; @@ -165,7 +165,7 @@ bool pkgAcquire::Worker::Start() close(Pipes[2]); OutReady = false; InReady = true; - + // Read the configuration data if (WaitFd(InFd) == false || ReadMessages() == false) @@ -174,7 +174,7 @@ bool pkgAcquire::Worker::Start() RunMessages(); if (OwnerQ != 0) SendConfiguration(); - + return true; } /*}}}*/ @@ -201,7 +201,7 @@ bool pkgAcquire::Worker::RunMessages() if (Debug == true) clog << " <- " << Access << ':' << QuoteString(Message,"\n") << endl; - + // Fetch the message number char *End; int Number = strtol(Message.c_str(),&End,10); @@ -215,15 +215,15 @@ bool pkgAcquire::Worker::RunMessages() // update used mirror string UsedMirror = LookupTag(Message,"UsedMirror", ""); - if (!UsedMirror.empty() && + if (!UsedMirror.empty() && Itm && - Itm->Description.find(" ") != string::npos) + Itm->Description.find(" ") != string::npos) { Itm->Description.replace(0, Itm->Description.find(" "), UsedMirror); // FIXME: will we need this as well? //Itm->ShortDesc = UsedMirror; } - + // Determine the message number and dispatch switch (Number) { @@ -232,18 +232,18 @@ bool pkgAcquire::Worker::RunMessages() if (Capabilities(Message) == false) return _error->Error("Unable to process Capabilities message from %s",Access.c_str()); break; - + // 101 Log case 101: if (Debug == true) clog << " <- (log) " << LookupTag(Message,"Message") << endl; break; - + // 102 Status case 102: Status = LookupTag(Message,"Message"); break; - + // 103 Redirect case 103: { @@ -252,7 +252,7 @@ bool pkgAcquire::Worker::RunMessages() _error->Error("Method gave invalid 103 Redirect message"); break; } - + string NewURI = LookupTag(Message,"New-URI",URI.c_str()); Itm->URI = NewURI; @@ -272,7 +272,7 @@ bool pkgAcquire::Worker::RunMessages() Log->Done(Desc); break; } - + // 200 URI Start case 200: { @@ -281,23 +281,23 @@ bool pkgAcquire::Worker::RunMessages() _error->Error("Method gave invalid 200 URI Start message"); break; } - + CurrentItem = Itm; CurrentSize = 0; TotalSize = strtoull(LookupTag(Message,"Size","0").c_str(), NULL, 10); ResumePoint = strtoull(LookupTag(Message,"Resume-Point","0").c_str(), NULL, 10); - Itm->Owner->Start(Message,strtoull(LookupTag(Message,"Size","0").c_str(), NULL, 10)); + Itm->Owner->Start(Message, TotalSize); // Display update before completion if (Log != 0 && Log->MorePulses == true) Log->Pulse(Itm->Owner->GetOwner()); - + if (Log != 0) Log->Fetch(*Itm); break; } - + // 201 URI Done case 201: { @@ -306,7 +306,7 @@ bool pkgAcquire::Worker::RunMessages() _error->Error("Method gave invalid 201 URI Done message"); break; } - + pkgAcquire::Item *Owner = Itm->Owner; pkgAcquire::ItemDesc Desc = *Itm; @@ -316,22 +316,11 @@ bool pkgAcquire::Worker::RunMessages() // Display update before completion if (Log != 0 && Log->MorePulses == true) Log->Pulse(Owner->GetOwner()); - + OwnerQ->ItemDone(Itm); - unsigned long long const ServerSize = strtoull(LookupTag(Message,"Size","0").c_str(), NULL, 10); - bool isHit = StringToBool(LookupTag(Message,"IMS-Hit"),false) || - StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false); - // Using the https method the server might return 200, but the - // If-Modified-Since condition is not satsified, libcurl will - // discard the download. In this case, however, TotalSize will be - // set to the actual size of the file, while ServerSize will be set - // to 0. Therefore, if the item is marked as a hit and the - // downloaded size (ServerSize) is 0, we ignore TotalSize. - if (TotalSize != 0 && (!isHit || ServerSize != 0) && ServerSize != TotalSize) - _error->Warning("Size of file %s is not what the server reported %s %llu", - Owner->DestFile.c_str(), LookupTag(Message,"Size","0").c_str(),TotalSize); - - // see if there is a hash to verify + + HashStringList const ExpectedHashes = Owner->GetExpectedHashes(); + // see if we got hashes to verify HashStringList ReceivedHashes; for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { @@ -340,6 +329,18 @@ bool pkgAcquire::Worker::RunMessages() if (hashsum.empty() == false) ReceivedHashes.push_back(HashString(*type, hashsum)); } + // not all methods always sent Hashes our way + if (ExpectedHashes.usable() == true && ReceivedHashes.usable() == false) + { + std::string const filename = LookupTag(Message, "Filename", Owner->DestFile.c_str()); + if (filename.empty() == false && RealFileExists(filename)) + { + Hashes calc(ExpectedHashes); + FileFd file(filename, FileFd::ReadOnly, FileFd::None); + calc.AddFD(file); + ReceivedHashes = calc.GetHashStringList(); + } + } if(_config->FindB("Debug::pkgAcquire::Auth", false) == true) { @@ -348,30 +349,66 @@ bool pkgAcquire::Worker::RunMessages() for (HashStringList::const_iterator hs = ReceivedHashes.begin(); hs != ReceivedHashes.end(); ++hs) std::clog << "\t- " << hs->toStr() << std::endl; std::clog << "ExpectedHash:" << endl; - HashStringList expectedHashes = Owner->HashSums(); - for (HashStringList::const_iterator hs = expectedHashes.begin(); hs != expectedHashes.end(); ++hs) + for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs) std::clog << "\t- " << hs->toStr() << std::endl; std::clog << endl; } - Owner->Done(Message, ServerSize, ReceivedHashes, Config); - ItemDone(); - // Log that we are done - if (Log != 0) + // decide if what we got is what we expected + bool consideredOkay = false; + bool const isIMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false) || + StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false); + if (ExpectedHashes.usable()) { - if (isHit) + if (ReceivedHashes.usable() == false) { - /* Hide 'hits' for local only sources - we also manage to - hide gets */ - if (Config->LocalOnly == false) - Log->IMSHit(Desc); - } + /* IMS-Hits can't be checked here as we will have uncompressed file, + but the hashes for the compressed file. What we have was good through + so all we have to ensure later is that we are not stalled. */ + consideredOkay = isIMSHit; + } + else if (ReceivedHashes == ExpectedHashes) + consideredOkay = true; else - Log->Done(Desc); + consideredOkay = false; + + } + else if (Owner->HashesRequired() == true) + consideredOkay = false; + else + consideredOkay = true; + + if (consideredOkay == true) + { + Owner->Done(Message, ReceivedHashes, Config); + ItemDone(); + + // Log that we are done + if (Log != 0) + { + if (isIMSHit) + { + /* Hide 'hits' for local only sources - we also manage to + hide gets */ + if (Config->LocalOnly == false) + Log->IMSHit(Desc); + } + else + Log->Done(Desc); + } + } + else + { + Owner->Status = pkgAcquire::Item::StatAuthError; + Owner->Failed(Message,Config); + ItemDone(); + + if (Log != 0) + Log->Fail(Desc); } break; - } - + } + // 400 URI Failure case 400: { @@ -408,18 +445,18 @@ bool pkgAcquire::Worker::RunMessages() Log->Fail(Desc); break; - } - + } + // 401 General Failure case 401: _error->Error("Method %s General failure: %s",Access.c_str(),LookupTag(Message,"Message").c_str()); break; - + // 403 Media Change case 403: - MediaChange(Message); + MediaChange(Message); break; - } + } } return true; } @@ -432,7 +469,7 @@ bool pkgAcquire::Worker::Capabilities(string Message) { if (Config == 0) return true; - + Config->Version = LookupTag(Message,"Version"); Config->SingleInstance = StringToBool(LookupTag(Message,"Single-Instance"),false); Config->Pipeline = StringToBool(LookupTag(Message,"Pipeline"),false); @@ -447,13 +484,13 @@ bool pkgAcquire::Worker::Capabilities(string Message) clog << "Configured access method " << Config->Access << endl; clog << "Version:" << Config->Version << " SingleInstance:" << Config->SingleInstance << - " Pipeline:" << Config->Pipeline << - " SendConfig:" << Config->SendConfig << - " LocalOnly: " << Config->LocalOnly << - " NeedsCleanup: " << Config->NeedsCleanup << + " Pipeline:" << Config->Pipeline << + " SendConfig:" << Config->SendConfig << + " LocalOnly: " << Config->LocalOnly << + " NeedsCleanup: " << Config->NeedsCleanup << " Removable: " << Config->Removable << endl; } - + return true; } /*}}}*/ @@ -463,10 +500,10 @@ bool pkgAcquire::Worker::Capabilities(string Message) bool pkgAcquire::Worker::MediaChange(string Message) { int status_fd = _config->FindI("APT::Status-Fd",-1); - if(status_fd > 0) + if(status_fd > 0) { string Media = LookupTag(Message,"Media"); - string Drive = LookupTag(Message,"Drive"); + string Drive = LookupTag(Message,"Drive"); ostringstream msg,status; ioprintf(msg,_("Please insert the disc labeled: " "'%s' " @@ -536,12 +573,12 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) { if (OutFd == -1) return false; - + string Message = "600 URI Acquire\n"; Message.reserve(300); Message += "URI: " + Item->URI; Message += "\nFilename: " + Item->Owner->DestFile; - HashStringList const hsl = Item->Owner->HashSums(); + HashStringList const hsl = Item->Owner->GetExpectedHashes(); for (HashStringList::const_iterator hs = hsl.begin(); hs != hsl.end(); ++hs) Message += "\nExpected-" + hs->HashType() + ": " + hs->HashValue(); if(Item->Owner->FileSize > 0) @@ -564,7 +601,7 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) clog << " -> " << Access << ':' << QuoteString(Message,"\n") << endl; OutQueue += Message; OutReady = true; - + return true; } /*}}}*/ @@ -586,7 +623,7 @@ bool pkgAcquire::Worker::OutFdReady() OutQueue.erase(0,Res); if (OutQueue.empty() == true) OutReady = false; - + return true; } /*}}}*/ @@ -608,7 +645,7 @@ bool pkgAcquire::Worker::InFdReady() bool pkgAcquire::Worker::MethodFailure() { _error->Error("Method %s has died unexpectedly!",Access.c_str()); - + // do not reap the child here to show meaningfull error to the user ExecWait(Process,Access.c_str(),false); Process = -1; @@ -620,26 +657,22 @@ bool pkgAcquire::Worker::MethodFailure() InReady = false; OutQueue = string(); MessageQueue.erase(MessageQueue.begin(),MessageQueue.end()); - + return false; } /*}}}*/ -// Worker::Pulse - Called periodically /*{{{*/ +// Worker::Pulse - Called periodically /*{{{*/ // --------------------------------------------------------------------- /* */ void pkgAcquire::Worker::Pulse() { if (CurrentItem == 0) return; - + struct stat Buf; if (stat(CurrentItem->Owner->DestFile.c_str(),&Buf) != 0) return; CurrentSize = Buf.st_size; - - // Hmm? Should not happen... - if (CurrentSize > TotalSize && TotalSize != 0) - TotalSize = CurrentSize; } /*}}}*/ // Worker::ItemDone - Called when the current item is finished /*{{{*/ diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc index 0fa443b4a..11a7e479b 100644 --- a/apt-pkg/contrib/hashes.cc +++ b/apt-pkg/contrib/hashes.cc @@ -23,6 +23,7 @@ #include #include #include +#include #include #include /*}}}*/ @@ -178,6 +179,15 @@ HashString const * HashStringList::find(char const * const type) const /*{{{*/ return NULL; } /*}}}*/ +unsigned long long HashStringList::FileSize() const /*{{{*/ +{ + HashString const * const hsf = find("Checksum-FileSize"); + if (hsf == NULL) + return 0; + std::string const hv = hsf->HashValue(); + return strtoull(hv.c_str(), NULL, 10); +} + /*}}}*/ bool HashStringList::supported(char const * const type) /*{{{*/ { for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t) diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h index ac13c8ace..176ce4faa 100644 --- a/apt-pkg/contrib/hashes.h +++ b/apt-pkg/contrib/hashes.h @@ -87,6 +87,15 @@ class HashStringList */ HashString const * find(char const * const type) const; HashString const * find(std::string const &type) const { return find(type.c_str()); } + + /** finds the filesize hash and returns it as number + * + * @return beware: if the size isn't known we return \b 0 here, + * just like we would do for an empty file. If that is a problem + * for you have to get the size manually out of the list. + */ + unsigned long long FileSize() const; + /** check if the given hash type is supported * * @param type to check diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc index d672b4fd8..185248619 100644 --- a/apt-pkg/deb/debindexfile.cc +++ b/apt-pkg/deb/debindexfile.cc @@ -742,13 +742,13 @@ bool debDebPkgFileIndex::Merge(pkgCacheGenerator& Gen, OpProgress* Prog) const // and give it to the list parser debDebFileParser Parser(DebControl, DebFile); - if(Gen.SelectFile(DebFile, "local", *this) == false) + if(Gen.SelectFile(DebFile, "local", *this, pkgCache::Flag::LocalSource) == false) return _error->Error("Problem with SelectFile %s", DebFile.c_str()); pkgCache::PkgFileIterator File = Gen.GetCurFile(); File->Size = DebControl->Size(); File->mtime = DebControl->ModificationTime(); - + if (Gen.MergeList(Parser) == false) return _error->Error("Problem with MergeLister for %s", DebFile.c_str()); diff --git a/apt-pkg/deb/debmetaindex.cc b/apt-pkg/deb/debmetaindex.cc index aa2db8149..eb5e78e3b 100644 --- a/apt-pkg/deb/debmetaindex.cc +++ b/apt-pkg/deb/debmetaindex.cc @@ -192,11 +192,13 @@ vector * debReleaseIndex::ComputeIndexTargets() const { vector const SectionEntries = src->second; for (vector::const_iterator I = SectionEntries.begin(); I != SectionEntries.end(); ++I) { - IndexTarget * Target = new IndexTarget(); - Target->ShortDesc = "Sources"; - Target->MetaKey = SourceIndexURISuffix(Target->ShortDesc.c_str(), (*I)->Section); - Target->URI = SourceIndexURI(Target->ShortDesc.c_str(), (*I)->Section); - Target->Description = Info (Target->ShortDesc.c_str(), (*I)->Section); + char const * const ShortDesc = "Sources"; + IndexTarget * const Target = new IndexTarget( + SourceIndexURISuffix(ShortDesc, (*I)->Section), + ShortDesc, + Info(ShortDesc, (*I)->Section), + SourceIndexURI(ShortDesc, (*I)->Section) + ); IndexTargets->push_back (Target); } } @@ -212,11 +214,13 @@ vector * debReleaseIndex::ComputeIndexTargets() const { continue; for (vector ::const_iterator I = a->second.begin(); I != a->second.end(); ++I) { - IndexTarget * Target = new IndexTarget(); - Target->ShortDesc = "Packages"; - Target->MetaKey = IndexURISuffix(Target->ShortDesc.c_str(), (*I)->Section, a->first); - Target->URI = IndexURI(Target->ShortDesc.c_str(), (*I)->Section, a->first); - Target->Description = Info (Target->ShortDesc.c_str(), (*I)->Section, a->first); + char const * const ShortDesc = "Packages"; + IndexTarget * const Target = new IndexTarget( + IndexURISuffix(ShortDesc, (*I)->Section, a->first), + ShortDesc, + Info (ShortDesc, (*I)->Section, a->first), + IndexURI(ShortDesc, (*I)->Section, a->first) + ); IndexTargets->push_back (Target); sections.insert((*I)->Section); } @@ -235,11 +239,13 @@ vector * debReleaseIndex::ComputeIndexTargets() const { s != sections.end(); ++s) { for (std::vector::const_iterator l = lang.begin(); l != lang.end(); ++l) { - IndexTarget * Target = new OptionalIndexTarget(); - Target->ShortDesc = "Translation-" + *l; - Target->MetaKey = TranslationIndexURISuffix(l->c_str(), *s); - Target->URI = TranslationIndexURI(l->c_str(), *s); - Target->Description = Info (Target->ShortDesc.c_str(), *s); + std::string const ShortDesc = "Translation-" + *l; + IndexTarget * const Target = new OptionalIndexTarget( + TranslationIndexURISuffix(l->c_str(), *s), + ShortDesc, + Info (ShortDesc.c_str(), *s), + TranslationIndexURI(l->c_str(), *s) + ); IndexTargets->push_back(Target); } } @@ -249,8 +255,6 @@ vector * debReleaseIndex::ComputeIndexTargets() const { /*}}}*/ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool const &GetAll) const { - bool const tryInRelease = _config->FindB("Acquire::TryInRelease", true); - indexRecords * const iR = new indexRecords(Dist); if (Trusted == ALWAYS_TRUSTED) iR->SetTrusted(true); @@ -258,37 +262,17 @@ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool const &GetAll) const iR->SetTrusted(false); // special case for --print-uris - if (GetAll) { - vector *targets = ComputeIndexTargets(); - for (vector ::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) { - new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, HashStringList()); - } - delete targets; - - // this is normally created in pkgAcqMetaSig, but if we run - // in --print-uris mode, we add it here - if (tryInRelease == false) - new pkgAcqMetaIndex(Owner, NULL, - MetaIndexURI("Release"), - MetaIndexInfo("Release"), "Release", - MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg", - ComputeIndexTargets(), - iR); + vector const * const targets = ComputeIndexTargets(); +#define APT_TARGET(X) IndexTarget("", X, MetaIndexInfo(X), MetaIndexURI(X)) + pkgAcqMetaBase * const TransactionManager = new pkgAcqMetaClearSig(Owner, + APT_TARGET("InRelease"), APT_TARGET("Release"), APT_TARGET("Release.gpg"), + targets, iR); +#undef APT_TARGET + if (GetAll) + { + for (vector ::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) + new pkgAcqIndex(Owner, TransactionManager, *Target); } - if (tryInRelease == true) - new pkgAcqMetaClearSig(Owner, - MetaIndexURI("InRelease"), MetaIndexInfo("InRelease"), "InRelease", - MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release", - MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg", - ComputeIndexTargets(), - iR); - else - new pkgAcqMetaIndex(Owner, NULL, - MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release", - MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg", - ComputeIndexTargets(), - iR); return true; } diff --git a/apt-pkg/indexrecords.cc b/apt-pkg/indexrecords.cc index de2617833..c26868cac 100644 --- a/apt-pkg/indexrecords.cc +++ b/apt-pkg/indexrecords.cc @@ -73,7 +73,7 @@ APT_PURE indexRecords::checkSum *indexRecords::Lookup(const string MetaKey) APT_PURE bool indexRecords::Exists(string const &MetaKey) const { - return Entries.count(MetaKey) == 1; + return Entries.find(MetaKey) != Entries.end(); } bool indexRecords::Load(const string Filename) /*{{{*/ diff --git a/apt-pkg/pkgcache.h b/apt-pkg/pkgcache.h index 2ba23c5c0..b4d56611a 100644 --- a/apt-pkg/pkgcache.h +++ b/apt-pkg/pkgcache.h @@ -190,7 +190,12 @@ class pkgCache /*{{{*/ struct Flag { enum PkgFlags {Auto=(1<<0),Essential=(1<<3),Important=(1<<4)}; - enum PkgFFlags {NotSource=(1<<0),NotAutomatic=(1<<1),ButAutomaticUpgrades=(1<<2)}; + enum PkgFFlags { + NotSource=(1<<0), /*!< packages can't be fetched from here, e.g. dpkg/status file */ + NotAutomatic=(1<<1), /*!< archive has a default pin of 1 */ + ButAutomaticUpgrades=(1<<2), /*!< (together with the previous) archive has a default pin of 100 */ + LocalSource=(1<<3), /*!< local sources can't and will not be verified by hashes */ + }; }; protected: diff --git a/methods/file.cc b/methods/file.cc index 043ab04b8..353e54bd5 100644 --- a/methods/file.cc +++ b/methods/file.cc @@ -57,7 +57,11 @@ bool FileMethod::Fetch(FetchItem *Itm) Res.LastModified = Buf.st_mtime; Res.IMSHit = false; if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0) - Res.IMSHit = true; + { + unsigned long long const filesize = Itm->ExpectedHashes.FileSize(); + if (filesize != 0 && filesize == Res.Size) + Res.IMSHit = true; + } } // See if the uncompressed file exists and reuse it diff --git a/test/integration/test-apt-get-source-authenticated b/test/integration/test-apt-get-source-authenticated index 685bc566b..da63f7cb3 100755 --- a/test/integration/test-apt-get-source-authenticated +++ b/test/integration/test-apt-get-source-authenticated @@ -1,7 +1,7 @@ #!/bin/sh # # Regression test for debian bug #749795. Ensure that we fail with -# a error if apt-get source foo will download a source that comes +# an error if apt-get source foo will download a source that comes # from a unauthenticated repository # set -e diff --git a/test/integration/test-apt-sources-deb822 b/test/integration/test-apt-sources-deb822 index d8b2334ad..51fe7bcfe 100755 --- a/test/integration/test-apt-sources-deb822 +++ b/test/integration/test-apt-sources-deb822 @@ -23,46 +23,45 @@ Description: summay msgtest 'Test sources.list' 'old style' echo "deb http://ftp.debian.org/debian stable main" > $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris msgtest 'Test sources.list' 'simple deb822' echo "$BASE" > $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris - +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris msgtest 'Test deb822 with' 'two entries' # Two entries echo "$BASE" > $SOURCES echo "" >> $SOURCES echo "$BASE" | sed s/stable/unstable/ >> $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 'http://ftp.debian.org/debian/dists/unstable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_unstable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 " aptget update --print-uris # two suite entries msgtest 'Test deb822 with' 'two Suite entries' echo "$BASE" | sed -e "s/stable/stable unstable/" > $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 'http://ftp.debian.org/debian/dists/unstable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_unstable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 " aptget update --print-uris msgtest 'Test deb822' 'architecture option' echo "$BASE" > $SOURCES echo "Architectures: amd64 armel" >> $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-amd64/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-amd64_Packages 0 +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-amd64/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-amd64_Packages 0 'http://ftp.debian.org/debian/dists/stable/main/binary-armel/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-armel_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris msgtest 'Test old-style sources.list file which has' 'malformed dist' @@ -85,20 +84,20 @@ testempty aptget update --print-uris # multiple URIs msgtest 'Test deb822 sources.list file which has' 'Multiple URIs work' echo "$BASE" | sed -e 's#http://ftp.debian.org/debian#http://ftp.debian.org/debian http://ftp.de.debian.org/debian#' > $SOURCES -testequal --nomsg "'http://ftp.de.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.de.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +testequal --nomsg "'http://ftp.de.debian.org/debian/dists/stable/InRelease' ftp.de.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.de.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.de.debian.org_debian_dists_stable_main_binary-i386_Packages 0 'http://ftp.de.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.de.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.de.debian.org/debian/dists/stable/InRelease' ftp.de.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris # multiple Type in one field msgtest 'Test deb822 sources.list file which has' 'Multiple Types work' echo "$BASE" | sed -e 's#Types: deb#Types: deb deb-src#' > $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/source/Sources.bz2' ftp.debian.org_debian_dists_stable_main_source_Sources 0 +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/source/Sources.bz2' ftp.debian.org_debian_dists_stable_main_source_Sources 0 'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris # a Suite msgtest 'Test deb822 sources.list file which has' 'a exact path and no sections' @@ -107,6 +106,6 @@ Types: deb URIs: http://emacs.naquadah.org Suites: stable/ EOF -testequal --nomsg "'http://emacs.naquadah.org/stable/Packages.bz2' emacs.naquadah.org_stable_Packages 0 -'http://emacs.naquadah.org/stable/en.bz2' emacs.naquadah.org_stable_en 0 -'http://emacs.naquadah.org/stable/InRelease' emacs.naquadah.org_stable_InRelease 0 " aptget update --print-uris +testequal --nomsg "'http://emacs.naquadah.org/stable/InRelease' emacs.naquadah.org_stable_InRelease 0 +'http://emacs.naquadah.org/stable/Packages.bz2' emacs.naquadah.org_stable_Packages 0 +'http://emacs.naquadah.org/stable/en.bz2' emacs.naquadah.org_stable_en 0 " aptget update --print-uris diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size index 55a5da848..55bba8188 100755 --- a/test/integration/test-apt-update-expected-size +++ b/test/integration/test-apt-update-expected-size @@ -35,7 +35,7 @@ test_packagestoobig() { done NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" testfailuremsg "W: Failed to fetch ${1}/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) -E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=0 -o Debug::Acquire::Transaction=0 +E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::Transaction=0 } methodtest() { diff --git a/test/integration/test-apt-update-file b/test/integration/test-apt-update-file index 1ecf9a38a..665f94fa5 100755 --- a/test/integration/test-apt-update-file +++ b/test/integration/test-apt-update-file @@ -22,6 +22,10 @@ addtrap 'prefix' 'chmod 750 aptarchive/dists/unstable/main/binary-amd64;' chmod 550 aptarchive/dists/unstable/main/binary-amd64 testsuccess aptget update + +# the release files aren't an IMS-hit, but the indexes are +redatereleasefiles '+1 hour' + testsuccess aptget update -o Debug::pkgAcquire::Auth=1 cp -a rootdir/tmp/testsuccess.output rootdir/tmp/update.output diff --git a/test/integration/test-apt-update-nofallback b/test/integration/test-apt-update-nofallback index db4430ea3..f132bcf8e 100755 --- a/test/integration/test-apt-update-nofallback +++ b/test/integration/test-apt-update-nofallback @@ -28,6 +28,7 @@ Description: an autogenerated evil package EOF # avoid ims hit touch -d '+1hour' aptarchive/dists/unstable/main/binary-i386/Packages + compressfile aptarchive/dists/unstable/main/binary-i386/Packages } assert_update_is_refused_and_last_good_state_used() @@ -87,16 +88,16 @@ test_from_inrelease_to_unsigned_with_override() { # setup archive with InRelease file setupaptarchive_with_lists_clean - # FIXME: is not what the server reported 4104 4106 - testsuccess aptget update #-o Debug::pkgAcquire::Worker=1 + testsuccess aptget update # simulate moving to a unsigned but otherwise valid repo simulate_mitm_and_inject_evil_package - generatereleasefiles + generatereleasefiles '+2 hours' + find $APTARCHIVE -name '*Packages*' -exec touch -d '+2 hours' {} \; # and ensure we can update to it (with enough force) testwarning aptget update --allow-insecure-repositories \ - -o Acquire::AllowDowngradeToInsecureRepositories=1 + -o Acquire::AllowDowngradeToInsecureRepositories=1 -o Debug::pkgAcquire::Worker=1 -o Debug::pkgAcquire::Auth=1 # but that the individual packages are still considered untrusted testfailureequal "WARNING: The following packages cannot be authenticated! evil @@ -167,7 +168,7 @@ test_inrelease_to_invalid_inrelease() listcurrentlistsdirectory > lists.before # now remove InRelease and subvert Release do no longer verify - sed -i 's/Codename.*/Codename: evil!'/ $APTARCHIVE/dists/unstable/InRelease + sed -i 's/^Codename:.*/Codename: evil!/' $APTARCHIVE/dists/unstable/InRelease inject_evil_package testwarningequal "W: An error occurred during the signature verification. The repository is not updated and the previous index files will be used. GPG error: file: unstable InRelease: The following signatures were invalid: BADSIG 5A90D141DBAC8DAE Joe Sixpack (APT Testcases Dummy) diff --git a/test/integration/test-apt-update-not-modified b/test/integration/test-apt-update-not-modified index bac33d531..a490f00de 100755 --- a/test/integration/test-apt-update-not-modified +++ b/test/integration/test-apt-update-not-modified @@ -43,7 +43,9 @@ Version: 1 EOF compressfile aptarchive/dists/unstable/main/binary-amd64/Packages testfailureequal "Hit $1 unstable InRelease -Get:1 $1 unstable/main amd64 Packages [$(stat -c '%s' 'aptarchive/dists.good/unstable/main/binary-amd64/Packages.gz') B] +Get:1 $1 unstable/main amd64 Packages [$(stat -c '%s' 'aptarchive/dists/unstable/main/binary-amd64/Packages.gz') B] +Err $1 unstable/main amd64 Packages + Hash Sum mismatch W: Failed to fetch $1/dists/unstable/main/binary-amd64/Packages.gz Hash Sum mismatch E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update @@ -87,8 +89,32 @@ Hit $1 unstable Release Reading package lists..." aptget update testfileequal 'listsdir-without-amd64.lst' "$(listcurrentlistsdirectory)" - # readd arch so its downloaded again + # readd arch so its downloaded again… configarchitecture 'amd64' 'i386' + # … but oh noes, hashsum mismatch! + find aptarchive/dists/unstable/main/binary-amd64/ -type f -delete + cat >> aptarchive/dists/unstable/main/binary-amd64/Packages < rootdir/etc/apt/apt.conf.d/00nolanguages -testsuccess aptget update +testsuccess aptget update -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 listcurrentlistsdirectory > lists.before # insert new version @@ -26,7 +26,7 @@ mkdir aptarchive/dists/unstable/main/binary-i386/saved cp -p aptarchive/dists/unstable/main/binary-i386/Packages* \ aptarchive/dists/unstable/main/binary-i386/saved insertpackage 'unstable' 'foo' 'all' '2.0' - +touch -d '+1 hour' aptarchive/dists/unstable/main/binary-i386/Packages compressfile aptarchive/dists/unstable/main/binary-i386/Packages # ensure that we do not get a I-M-S hit for the Release file @@ -39,7 +39,6 @@ cp -p aptarchive/dists/unstable/main/binary-i386/saved/Packages* \ aptarchive/dists/unstable/main/binary-i386/ # ensure this raises an error -testfailureequal "W: Failed to fetch http://localhost:8080/dists/unstable/main/binary-i386/Packages Hash Sum mismatch - -E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq +testfailuremsg "W: Failed to fetch copy:$(readlink -f ./rootdir)/var/lib/apt/lists/localhost:8080_dists_unstable_main_binary-i386_Packages Hash Sum mismatch +E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 testfileequal lists.before "$(listcurrentlistsdirectory)" diff --git a/test/integration/test-bug-595691-empty-and-broken-archive-files b/test/integration/test-bug-595691-empty-and-broken-archive-files index bca07268c..486b8ba02 100755 --- a/test/integration/test-bug-595691-empty-and-broken-archive-files +++ b/test/integration/test-bug-595691-empty-and-broken-archive-files @@ -12,7 +12,7 @@ setupflataptarchive testaptgetupdate() { rm -rf rootdir/var/lib/apt - aptget update 2>> testaptgetupdate.diff >> testaptgetupdate.diff || true + aptget update >testaptgetupdate.diff 2>&1 || true sed -i -e '/Ign /,+1d' -e '/Release/ d' -e 's#Get:[0-9]\+ #Get: #' -e 's#\[[0-9]* [kMGTPY]*B\]#\[\]#' testaptgetupdate.diff GIVEN="$1" shift diff --git a/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum b/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum index ec74a750b..555d8fcaa 100755 --- a/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum +++ b/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum @@ -176,7 +176,11 @@ testmismatch() { Building dependency tree... Need to get 6 B of source archives. Get:1 http://localhost:8080/ $1 1.0 (dsc) [3 B] +Err http://localhost:8080/ $1 1.0 (dsc) + Hash Sum mismatch Get:2 http://localhost:8080/ $1 1.0 (tar) [3 B] +Err http://localhost:8080/ $1 1.0 (tar) + Hash Sum mismatch E: Failed to fetch http://localhost:8080/${1}_1.0.dsc Hash Sum mismatch E: Failed to fetch http://localhost:8080/${1}_1.0.tar.gz Hash Sum mismatch @@ -238,6 +242,8 @@ Building dependency tree... Need to get 6 B of source archives. Get:1 http://localhost:8080/ pkg-mixed-sha1-bad 1.0 (tar) [3 B] Get:2 http://localhost:8080/ pkg-mixed-sha1-bad 1.0 (dsc) [3 B] +Err http://localhost:8080/ pkg-mixed-sha1-bad 1.0 (dsc) + Hash Sum mismatch E: Failed to fetch http://localhost:8080/pkg-mixed-sha1-bad_1.0.dsc Hash Sum mismatch E: Failed to fetch some archives.' aptget source -d pkg-mixed-sha1-bad @@ -247,6 +253,8 @@ testfailureequal 'Reading package lists... Building dependency tree... Need to get 6 B of source archives. Get:1 http://localhost:8080/ pkg-mixed-sha2-bad 1.0 (tar) [3 B] +Err http://localhost:8080/ pkg-mixed-sha2-bad 1.0 (tar) + Hash Sum mismatch Get:2 http://localhost:8080/ pkg-mixed-sha2-bad 1.0 (dsc) [3 B] E: Failed to fetch http://localhost:8080/pkg-mixed-sha2-bad_1.0.tar.gz Hash Sum mismatch diff --git a/test/libapt/acqprogress_test.cc b/test/libapt/acqprogress_test.cc index 288e05aca..c634733d4 100644 --- a/test/libapt/acqprogress_test.cc +++ b/test/libapt/acqprogress_test.cc @@ -1,4 +1,5 @@ #include +#include #include #include #include @@ -10,9 +11,10 @@ class TestItem: public pkgAcquire::Item { public: - TestItem(pkgAcquire * const Acq) : pkgAcquire::Item(Acq, "", NULL) {} + TestItem(pkgAcquire * const Acq) : pkgAcquire::Item(Acq) {} - virtual std::string DescURI() { return ""; } + virtual std::string DescURI() const { return ""; } + virtual HashStringList GetExpectedHashes() const { return HashStringList(); } }; diff --git a/test/libapt/hashsums_test.cc b/test/libapt/hashsums_test.cc index edcd8a11a..63c63ecd3 100644 --- a/test/libapt/hashsums_test.cc +++ b/test/libapt/hashsums_test.cc @@ -306,6 +306,7 @@ TEST(HashSumsTest, HashStringList) EXPECT_EQ(NULL, list.find(NULL)); EXPECT_EQ(NULL, list.find("")); EXPECT_EQ(NULL, list.find("MD5Sum")); + EXPECT_EQ(0, list.FileSize()); // empty lists aren't equal HashStringList list2; @@ -316,6 +317,8 @@ TEST(HashSumsTest, HashStringList) list.push_back(HashString("Checksum-FileSize", "29")); EXPECT_FALSE(list.empty()); EXPECT_FALSE(list.usable()); + EXPECT_EQ(1, list.size()); + EXPECT_EQ(29, list.FileSize()); Hashes hashes; hashes.Add("The quick brown fox jumps over the lazy dog"); -- cgit v1.2.3