diff options
-rw-r--r-- | apt-pkg/acquire-item.cc | 263 | ||||
-rw-r--r-- | apt-pkg/acquire-item.h | 39 | ||||
-rw-r--r-- | apt-pkg/acquire-worker.cc | 84 | ||||
-rw-r--r-- | apt-pkg/cacheiterators.h | 2 | ||||
-rw-r--r-- | apt-pkg/deb/debindexfile.cc | 26 | ||||
-rw-r--r-- | apt-pkg/pkgcache.cc | 14 | ||||
-rw-r--r-- | apt-private/private-install.cc | 107 | ||||
-rw-r--r-- | apt-private/private-install.h | 18 | ||||
-rw-r--r-- | apt-private/private-search.cc | 9 | ||||
-rw-r--r-- | apt-private/private-show.cc | 250 | ||||
-rw-r--r-- | apt-private/private-show.h | 7 | ||||
-rw-r--r-- | apt-private/private-source.cc | 90 | ||||
-rw-r--r-- | apt-private/private-upgrade.cc | 3 | ||||
-rw-r--r-- | cmdline/apt-cache.cc | 7 | ||||
-rwxr-xr-x | test/integration/test-apt-get-install-deb | 57 | ||||
-rwxr-xr-x | test/integration/test-apt-install-file-reltag | 94 | ||||
-rwxr-xr-x | test/integration/test-apt-source-and-build-dep | 59 | ||||
-rwxr-xr-x | test/integration/test-partial-file-support | 2 | ||||
-rwxr-xr-x | test/integration/test-pdiff-usage | 5 |
19 files changed, 751 insertions, 385 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index f5986a260..a366b8981 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -398,21 +398,66 @@ bool pkgAcqTransactionItem::QueueURI(pkgAcquire::ItemDesc &Item) Status = StatDone; return false; } + // this ensures we rewrite only once and only the first step + auto const OldBaseURI = Target.Option(IndexTarget::BASE_URI); + if (OldBaseURI.empty() || APT::String::Startswith(Item.URI, OldBaseURI) == false) + return pkgAcquire::Item::QueueURI(Item); + // the given URI is our last resort + PushAlternativeURI(std::string(Item.URI), {}, false); // If we got the InRelease file via a mirror, pick all indexes directly from this mirror, too - if (TransactionManager->BaseURI.empty() == false && UsedMirror.empty() && - URI::SiteOnly(Item.URI) != URI::SiteOnly(TransactionManager->BaseURI)) + std::string SameMirrorURI; + if (TransactionManager->BaseURI.empty() == false && TransactionManager->UsedMirror.empty() == false && + URI::SiteOnly(Item.URI) != URI::SiteOnly(TransactionManager->BaseURI)) { - // this ensures we rewrite only once and only the first step - auto const OldBaseURI = Target.Option(IndexTarget::BASE_URI); - if (OldBaseURI.empty() == false && APT::String::Startswith(Item.URI, OldBaseURI)) + auto ExtraPath = Item.URI.substr(OldBaseURI.length()); + auto newURI = flCombine(TransactionManager->BaseURI, std::move(ExtraPath)); + if (IsGoodAlternativeURI(newURI)) { - auto const ExtraPath = Item.URI.substr(OldBaseURI.length()); - Item.URI = flCombine(TransactionManager->BaseURI, ExtraPath); - UsedMirror = TransactionManager->UsedMirror; - if (Item.Description.find(" ") != string::npos) - Item.Description.replace(0, Item.Description.find(" "), UsedMirror); + SameMirrorURI = std::move(newURI); + PushAlternativeURI(std::string(SameMirrorURI), {}, false); } } + // add URI and by-hash based on it + if (AcquireByHash()) + { + // if we use the mirror transport, ask it for by-hash uris + // we need to stick to the same mirror only for non-unique filenames + auto const sameMirrorException = [&]() { + if (Item.URI.find("mirror") == std::string::npos) + return false; + ::URI uri(Item.URI); + return uri.Access == "mirror" || APT::String::Startswith(uri.Access, "mirror+") || + APT::String::Endswith(uri.Access, "+mirror") || uri.Access.find("+mirror+") != std::string::npos; + }(); + if (sameMirrorException) + SameMirrorURI.clear(); + // now add the actual by-hash uris + auto const Expected = GetExpectedHashes(); + auto const TargetHash = Expected.find(nullptr); + auto const PushByHashURI = [&](std::string U) { + if (unlikely(TargetHash == nullptr)) + return false; + auto const trailing_slash = U.find_last_of("/"); + if (unlikely(trailing_slash == std::string::npos)) + return false; + auto byhashSuffix = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); + U.replace(trailing_slash, U.length() - trailing_slash, std::move(byhashSuffix)); + PushAlternativeURI(std::move(U), {}, false); + return true; + }; + PushByHashURI(Item.URI); + if (SameMirrorURI.empty() == false && PushByHashURI(SameMirrorURI) == false) + SameMirrorURI.clear(); + } + // the last URI added is the first one tried + if (unlikely(PopAlternativeURI(Item.URI) == false)) + return false; + if (SameMirrorURI.empty() == false) + { + UsedMirror = TransactionManager->UsedMirror; + if (Item.Description.find(" ") != string::npos) + Item.Description.replace(0, Item.Description.find(" "), UsedMirror); + } return pkgAcquire::Item::QueueURI(Item); } /* The transition manager InRelease itself (or its older sisters-in-law @@ -619,6 +664,26 @@ bool pkgAcqDiffIndex::TransactionState(TransactionStates const state) return true; } /*}}}*/ +// pkgAcqTransactionItem::AcquireByHash and specialisations for child classes /*{{{*/ +bool pkgAcqTransactionItem::AcquireByHash() const +{ + if (TransactionManager->MetaIndexParser == nullptr) + return false; + auto const useByHashConf = Target.Option(IndexTarget::BY_HASH); + if (useByHashConf == "force") + return true; + return StringToBool(useByHashConf) == true && TransactionManager->MetaIndexParser->GetSupportsAcquireByHash(); +} +// pdiff patches have a unique name already, no need for by-hash +bool pkgAcqIndexMergeDiffs::AcquireByHash() const +{ + return false; +} +bool pkgAcqIndexDiffs::AcquireByHash() const +{ + return false; +} + /*}}}*/ class APT_HIDDEN NoActionItem : public pkgAcquire::Item /*{{{*/ /* The sole purpose of this class is having an item which does nothing to @@ -686,11 +751,12 @@ class pkgAcquire::Item::Private public: struct AlternateURI { - std::string const URI; + std::string URI; std::unordered_map<std::string, std::string> changefields; AlternateURI(std::string &&u, decltype(changefields) &&cf) : URI(u), changefields(cf) {} }; std::list<AlternateURI> AlternativeURIs; + std::vector<std::string> BadAlternativeSites; std::vector<std::string> PastRedirections; std::unordered_map<std::string, std::string> CustomFields; unsigned int Retries; @@ -749,14 +815,32 @@ bool pkgAcquire::Item::PopAlternativeURI(std::string &NewURI) /*{{{*/ return true; } /*}}}*/ +bool pkgAcquire::Item::IsGoodAlternativeURI(std::string const &AltUri) const/*{{{*/ +{ + return std::find(d->PastRedirections.cbegin(), d->PastRedirections.cend(), AltUri) == d->PastRedirections.cend() && + std::find(d->BadAlternativeSites.cbegin(), d->BadAlternativeSites.cend(), URI::SiteOnly(AltUri)) == d->BadAlternativeSites.cend(); +} + /*}}}*/ void pkgAcquire::Item::PushAlternativeURI(std::string &&NewURI, std::unordered_map<std::string, std::string> &&fields, bool const at_the_back) /*{{{*/ { + if (IsGoodAlternativeURI(NewURI) == false) + return; if (at_the_back) d->AlternativeURIs.emplace_back(std::move(NewURI), std::move(fields)); else d->AlternativeURIs.emplace_front(std::move(NewURI), std::move(fields)); } /*}}}*/ +void pkgAcquire::Item::RemoveAlternativeSite(std::string &&OldSite) /*{{{*/ +{ + d->AlternativeURIs.erase(std::remove_if(d->AlternativeURIs.begin(), d->AlternativeURIs.end(), + [&](decltype(*d->AlternativeURIs.cbegin()) AltUri) { + return URI::SiteOnly(AltUri.URI) == OldSite; + }), + d->AlternativeURIs.end()); + d->BadAlternativeSites.push_back(std::move(OldSite)); +} + /*}}}*/ unsigned int &pkgAcquire::Item::ModifyRetries() /*{{{*/ { return d->Retries; @@ -1429,7 +1513,6 @@ void pkgAcqMetaClearSig::QueueIndexes(bool const verify) /*{{{*/ std::set<std::string> targetsSeen; bool const hasReleaseFile = TransactionManager->MetaIndexParser != NULL; - bool const metaBaseSupportsByHash = hasReleaseFile && TransactionManager->MetaIndexParser->GetSupportsAcquireByHash(); bool hasHashes = true; auto IndexTargets = TransactionManager->MetaIndexParser->GetIndexTargets(); if (hasReleaseFile && verify == false) @@ -1565,15 +1648,6 @@ void pkgAcqMetaClearSig::QueueIndexes(bool const verify) /*{{{*/ if (types.empty() == false) { std::ostringstream os; - // add the special compressiontype byhash first if supported - std::string const useByHashConf = Target.Option(IndexTarget::BY_HASH); - bool useByHash = false; - if(useByHashConf == "force") - useByHash = true; - else - useByHash = StringToBool(useByHashConf) == true && metaBaseSupportsByHash; - if (useByHash == true) - os << "by-hash "; std::copy(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " ")); os << *types.rbegin(); Target.Options["COMPRESSIONTYPES"] = os.str(); @@ -2219,8 +2293,6 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner, CompressionExtensions = os.str(); } } - if (Target.Option(IndexTarget::COMPRESSIONTYPES).find("by-hash") != std::string::npos) - CompressionExtensions = "by-hash " + CompressionExtensions; Init(GetDiffIndexURI(Target), GetDiffIndexFileName(Target.Description), Target.ShortDesc); if(Debug) @@ -2231,7 +2303,7 @@ void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/ { // list cleanup needs to know that this file as well as the already // present index is ours, so we create an empty diff to save it for us - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, UsedMirror, Target.URI); + new pkgAcqIndexDiffs(Owner, TransactionManager, Target); } /*}}}*/ static bool RemoveFileForBootstrapLinking(std::string &ErrorText, std::string const &For, std::string const &Boot)/*{{{*/ @@ -2585,7 +2657,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ /*}}}*/ void pkgAcqDiffIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/ { - if (CommonFailed(GetDiffIndexURI(Target), GetDiffIndexFileName(Target.Description), Message, Cnf)) + if (CommonFailed(GetDiffIndexURI(Target), Message, Cnf)) return; RenameOnError(PDiffError); @@ -2631,33 +2703,16 @@ void pkgAcqDiffIndex::Done(string const &Message,HashStringList const &Hashes, / } else { - // we have something, queue the diffs - string::size_type const last_space = Description.rfind(" "); - if(last_space != string::npos) - Description.erase(last_space, Description.size()-last_space); - - std::string indexURI = Desc.URI; - auto const byhashidx = indexURI.find("/by-hash/"); - if (byhashidx != std::string::npos) - indexURI = indexURI.substr(0, byhashidx - strlen(".diff")); - else - { - auto end = indexURI.length() - strlen(".diff/Index"); - if (CurrentCompressionExtension != "uncompressed") - end -= (1 + CurrentCompressionExtension.length()); - indexURI = indexURI.substr(0, end); - } - if (pdiff_merge == false) - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, UsedMirror, indexURI, available_patches); + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, available_patches); else { diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size()); for(size_t i = 0; i < available_patches.size(); ++i) (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager, - Target, UsedMirror, indexURI, - available_patches[i], - diffs); + Target, + available_patches[i], + diffs); } } @@ -2681,28 +2736,20 @@ pkgAcqDiffIndex::~pkgAcqDiffIndex() /* The package diff is added to the queue. one object is constructed * for each diff and the index */ -pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire * const Owner, - pkgAcqMetaClearSig * const TransactionManager, - IndexTarget const &Target, - std::string const &indexUsedMirror, std::string const &indexURI, +pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *const Owner, + pkgAcqMetaClearSig *const TransactionManager, + IndexTarget const &Target, vector<DiffInfo> const &diffs) - : pkgAcqBaseIndex(Owner, TransactionManager, Target), indexURI(indexURI), - available_patches(diffs) + : pkgAcqBaseIndex(Owner, TransactionManager, Target), + available_patches(diffs) { DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target); Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); Desc.Owner = this; - Description = Target.Description; Desc.ShortDesc = Target.ShortDesc; - UsedMirror = indexUsedMirror; - if (UsedMirror == "DIRECT") - UsedMirror.clear(); - else if (UsedMirror.empty() == false && Description.find(" ") != string::npos) - Description.replace(0, Description.find(" "), UsedMirror); - if(available_patches.empty() == true) { // we are done (yeah!), check hashes against the final file @@ -2817,8 +2864,8 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ } // queue the right diff - Desc.URI = indexURI + ".diff/" + available_patches[0].file + ".gz"; - Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); + Desc.URI = Target.URI + ".diff/" + available_patches[0].file + ".gz"; + Desc.Description = Target.Description + " " + available_patches[0].file + string(".pdiff"); DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI + ".diff/" + available_patches[0].file), Target); if(Debug) @@ -2870,7 +2917,7 @@ void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, // see if there is more to download if(available_patches.empty() == false) { - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, UsedMirror, indexURI, available_patches); + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, available_patches); Finish(); } else { DestFile = PatchedFile; @@ -2896,28 +2943,20 @@ std::string pkgAcqIndexDiffs::Custom600Headers() const /*{{{*/ pkgAcqIndexDiffs::~pkgAcqIndexDiffs() {} // AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ -pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire * const Owner, - pkgAcqMetaClearSig * const TransactionManager, - IndexTarget const &Target, - std::string const &indexUsedMirror, std::string const &indexURI, - DiffInfo const &patch, - std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches) - : pkgAcqBaseIndex(Owner, TransactionManager, Target), indexURI(indexURI), - patch(patch), allPatches(allPatches), State(StateFetchDiff) +pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *const Owner, + pkgAcqMetaClearSig *const TransactionManager, + IndexTarget const &Target, + DiffInfo const &patch, + std::vector<pkgAcqIndexMergeDiffs *> const *const allPatches) + : pkgAcqBaseIndex(Owner, TransactionManager, Target), + patch(patch), allPatches(allPatches), State(StateFetchDiff) { Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - Description = Target.Description; - UsedMirror = indexUsedMirror; - if (UsedMirror == "DIRECT") - UsedMirror.clear(); - else if (UsedMirror.empty() == false && Description.find(" ") != string::npos) - Description.replace(0, Description.find(" "), UsedMirror); - Desc.Owner = this; Desc.ShortDesc = Target.ShortDesc; - Desc.URI = indexURI + ".diff/" + patch.file + ".gz"; - Desc.Description = Description + " " + patch.file + ".pdiff"; + Desc.URI = Target.URI + ".diff/" + patch.file + ".gz"; + Desc.Description = Target.Description + " " + patch.file + ".pdiff"; DestFile = GetPartialFileNameFromURI(Target.URI + ".diff/" + patch.file + ".gz"); if(Debug) @@ -3073,60 +3112,28 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner, } /*}}}*/ // AcqIndex::Init - deferred Constructor /*{{{*/ -static void NextCompressionExtension(std::string &CurrentCompressionExtension, std::string &CompressionExtensions, bool const preview) +void pkgAcqIndex::Init(string const &URI, string const &URIDesc, + string const &ShortDesc) { + Stage = STAGE_DOWNLOAD; + + DestFile = GetPartialFileNameFromURI(URI); size_t const nextExt = CompressionExtensions.find(' '); if (nextExt == std::string::npos) { CurrentCompressionExtension = CompressionExtensions; - if (preview == false) - CompressionExtensions.clear(); + CompressionExtensions.clear(); } else { CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt); - if (preview == false) - CompressionExtensions = CompressionExtensions.substr(nextExt+1); + CompressionExtensions = CompressionExtensions.substr(nextExt+1); } -} -void pkgAcqIndex::Init(string const &URI, string const &URIDesc, - string const &ShortDesc) -{ - Stage = STAGE_DOWNLOAD; - - DestFile = GetPartialFileNameFromURI(URI); - NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false); if (CurrentCompressionExtension == "uncompressed") { Desc.URI = URI; } - else if (CurrentCompressionExtension == "by-hash") - { - NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, true); - if(unlikely(CurrentCompressionExtension.empty())) - return; - if (CurrentCompressionExtension != "uncompressed") - { - Desc.URI = URI + '.' + CurrentCompressionExtension; - DestFile = DestFile + '.' + CurrentCompressionExtension; - } - else - Desc.URI = URI; - - HashStringList const Hashes = GetExpectedHashes(); - HashString const * const TargetHash = Hashes.find(NULL); - if (unlikely(TargetHash == nullptr)) - return; - std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); - size_t const trailing_slash = Desc.URI.find_last_of("/"); - if (unlikely(trailing_slash == std::string::npos)) - return; - Desc.URI = Desc.URI.replace( - trailing_slash, - Desc.URI.substr(trailing_slash+1).size()+1, - ByHash); - } else if (unlikely(CurrentCompressionExtension.empty())) return; else @@ -3170,24 +3177,10 @@ string pkgAcqIndex::Custom600Headers() const } /*}}}*/ // AcqIndex::Failed - getting the indexfile failed /*{{{*/ -bool pkgAcqIndex::CommonFailed(std::string const &TargetURI, std::string const TargetDesc, - std::string const &Message, pkgAcquire::MethodConfig const * const Cnf) +bool pkgAcqIndex::CommonFailed(std::string const &TargetURI, + std::string const &Message, pkgAcquire::MethodConfig const *const Cnf) { pkgAcqBaseIndex::Failed(Message,Cnf); - - if (UsedMirror.empty() == false && UsedMirror != "DIRECT" && - LookupTag(Message, "FailReason") == "HttpError404") - { - UsedMirror = "DIRECT"; - if (Desc.URI.find("/by-hash/") != std::string::npos) - CompressionExtensions = "by-hash " + CompressionExtensions; - else - CompressionExtensions = CurrentCompressionExtension + ' ' + CompressionExtensions; - Init(TargetURI, TargetDesc, Desc.ShortDesc); - Status = StatIdle; - return true; - } - // authorisation matches will not be fixed by other compression types if (Status != StatAuthError) { @@ -3202,7 +3195,7 @@ bool pkgAcqIndex::CommonFailed(std::string const &TargetURI, std::string const T } void pkgAcqIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) { - if (CommonFailed(Target.URI, Target.Description, Message, Cnf)) + if (CommonFailed(Target.URI, Message, Cnf)) return; if(Target.IsOptional && GetExpectedHashes().empty() && Stage == STAGE_DOWNLOAD) diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 46d79df92..3a5a518c2 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -246,7 +246,9 @@ class pkgAcquire::Item : public WeakPointable /*{{{*/ APT_HIDDEN std::unordered_map<std::string, std::string> &ModifyCustomFields(); // this isn't the super nicest interface either… APT_HIDDEN bool PopAlternativeURI(std::string &NewURI); + APT_HIDDEN bool IsGoodAlternativeURI(std::string const &AltUri) const; APT_HIDDEN void PushAlternativeURI(std::string &&NewURI, std::unordered_map<std::string, std::string> &&fields, bool const at_the_back); + APT_HIDDEN void RemoveAlternativeSite(std::string &&OldSite); /** \brief A "descriptive" URI-like string. * @@ -405,7 +407,7 @@ class APT_HIDDEN pkgAcqTransactionItem: public pkgAcquire::Item /*{{{*/ virtual HashStringList GetExpectedHashes() const APT_OVERRIDE; virtual std::string GetMetaKey() const; virtual bool HashesRequired() const APT_OVERRIDE; - + virtual bool AcquireByHash() const; pkgAcqTransactionItem(pkgAcquire * const Owner, pkgAcqMetaClearSig * const TransactionManager, IndexTarget const &Target) APT_NONNULL(2, 3); virtual ~pkgAcqTransactionItem(); @@ -690,8 +692,8 @@ class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex protected: APT_HIDDEN void Init(std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc); - APT_HIDDEN bool CommonFailed(std::string const &TargetURI, std::string const TargetDesc, - std::string const &Message, pkgAcquire::MethodConfig const * const Cnf); + APT_HIDDEN bool CommonFailed(std::string const &TargetURI, + std::string const &Message, pkgAcquire::MethodConfig const *const Cnf); }; /*}}}*/ struct APT_HIDDEN DiffInfo { /*{{{*/ @@ -728,11 +730,6 @@ class APT_HIDDEN pkgAcqDiffIndex : public pkgAcqIndex /** \brief If \b true, debugging information will be written to std::clog. */ bool Debug; - /** \brief A description of the Packages file (stored in - * pkgAcquire::ItemDesc::Description). - */ - std::string Description; - /** \brief Get the full pathname of the final file for the current URI */ virtual std::string GetFinalFilename() const APT_OVERRIDE; @@ -790,8 +787,6 @@ class APT_HIDDEN pkgAcqDiffIndex : public pkgAcqIndex */ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex { - std::string const indexURI; - protected: /** \brief If \b true, debugging output will be written to @@ -799,9 +794,6 @@ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex */ bool Debug; - /** \brief description of the file being downloaded. */ - std::string Description; - /** \brief information about the current patch */ struct DiffInfo const patch; @@ -837,6 +829,7 @@ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex virtual std::string DescURI() const APT_OVERRIDE {return Target.URI + "Index";}; virtual HashStringList GetExpectedHashes() const APT_OVERRIDE; virtual bool HashesRequired() const APT_OVERRIDE; + virtual bool AcquireByHash() const APT_OVERRIDE; /** \brief Create an index merge-diff item. * @@ -850,10 +843,9 @@ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex * \param allPatches contains all related items so that each item can * check if it was the last one to complete the download step */ - pkgAcqIndexMergeDiffs(pkgAcquire * const Owner, pkgAcqMetaClearSig * const TransactionManager, - IndexTarget const &Target, std::string const &indexUsedMirror, - std::string const &indexURI, DiffInfo const &patch, - std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches) APT_NONNULL(2, 3, 8); + pkgAcqIndexMergeDiffs(pkgAcquire *const Owner, pkgAcqMetaClearSig *const TransactionManager, + IndexTarget const &Target, DiffInfo const &patch, + std::vector<pkgAcqIndexMergeDiffs *> const *const allPatches) APT_NONNULL(2, 3, 6); virtual ~pkgAcqIndexMergeDiffs(); }; /*}}}*/ @@ -870,8 +862,6 @@ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex */ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex { - std::string const indexURI; - private: /** \brief Queue up the next diff download. @@ -905,9 +895,6 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex */ bool Debug; - /** A description of the file being downloaded. */ - std::string Description; - /** The patches that remain to be downloaded, including the patch * being downloaded right now. This list should be ordered so * that each diff appears before any diff that depends on it. @@ -943,6 +930,7 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex virtual std::string DescURI() const APT_OVERRIDE {return Target.URI + "IndexDiffs";}; virtual HashStringList GetExpectedHashes() const APT_OVERRIDE; virtual bool HashesRequired() const APT_OVERRIDE; + virtual bool AcquireByHash() const APT_OVERRIDE; /** \brief Create an index diff item. * @@ -957,10 +945,9 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex * should be ordered so that each diff appears before any diff * that depends on it. */ - pkgAcqIndexDiffs(pkgAcquire * const Owner, pkgAcqMetaClearSig * const TransactionManager, - IndexTarget const &Target, - std::string const &indexUsedMirror, std::string const &indexURI, - std::vector<DiffInfo> const &diffs=std::vector<DiffInfo>()) APT_NONNULL(2, 3); + pkgAcqIndexDiffs(pkgAcquire *const Owner, pkgAcqMetaClearSig *const TransactionManager, + IndexTarget const &Target, + std::vector<DiffInfo> const &diffs = std::vector<DiffInfo>()) APT_NONNULL(2, 3); virtual ~pkgAcqIndexDiffs(); }; /*}}}*/ diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index d159ef84f..c2bbf8bed 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -321,28 +321,35 @@ bool pkgAcquire::Worker::RunMessages() Itm = nullptr; for (auto const &Owner: ItmOwners) { + for (auto alt = AltUris.crbegin(); alt != AltUris.crend(); ++alt) + Owner->PushAlternativeURI(std::string(*alt), {}, false); + pkgAcquire::ItemDesc &desc = Owner->GetItemDesc(); - if (Owner->IsRedirectionLoop(NewURI)) + // for a simplified retry a method might redirect without URI change + // see also IsRedirectionLoop implementation + if (desc.URI != NewURI) { - std::string msg = Message; - msg.append("\nFailReason: RedirectionLoop"); - Owner->Failed(msg, Config); - if (Log != nullptr) - Log->Fail(Owner->GetItemDesc()); - continue; - } + auto newuri = NewURI; + if (Owner->IsGoodAlternativeURI(newuri) == false && Owner->PopAlternativeURI(newuri) == false) + newuri.clear(); + if (newuri.empty() || Owner->IsRedirectionLoop(newuri)) + { + std::string msg = Message; + msg.append("\nFailReason: RedirectionLoop"); + Owner->Failed(msg, Config); + if (Log != nullptr) + Log->Fail(Owner->GetItemDesc()); + continue; + } - if (Log != nullptr) - Log->Done(desc); + if (Log != nullptr) + Log->Done(desc); - ChangeSiteIsMirrorChange(NewURI, desc, Owner); - desc.URI = NewURI; + ChangeSiteIsMirrorChange(NewURI, desc, Owner); + desc.URI = NewURI; + } if (isDoomedItem(Owner) == false) - { - for (auto alt = AltUris.crbegin(); alt != AltUris.crend(); ++alt) - Owner->PushAlternativeURI(std::string(*alt), {}, false); OwnerQ->Owner->Enqueue(desc); - } } break; } @@ -608,28 +615,33 @@ void pkgAcquire::Worker::HandleFailure(std::vector<pkgAcquire::Item *> const &It if (isDoomedItem(Owner) == false) OwnerQ->Owner->Enqueue(SavedDesc); } - else if (Owner->PopAlternativeURI(NewURI)) - { - Owner->FailMessage(Message); - auto &desc = Owner->GetItemDesc(); - if (Log != nullptr) - Log->Fail(desc); - ChangeSiteIsMirrorChange(NewURI, desc, Owner); - desc.URI = NewURI; - if (isDoomedItem(Owner) == false) - OwnerQ->Owner->Enqueue(desc); - } else { - if (errAuthErr && Owner->GetExpectedHashes().empty() == false) - Owner->Status = pkgAcquire::Item::StatAuthError; - else if (errTransient) - Owner->Status = pkgAcquire::Item::StatTransientNetworkError; - auto SavedDesc = Owner->GetItemDesc(); - if (isDoomedItem(Owner) == false) - Owner->Failed(Message, Config); - if (Log != nullptr) - Log->Fail(SavedDesc); + if (errAuthErr) + Owner->RemoveAlternativeSite(URI::SiteOnly(Owner->GetItemDesc().URI)); + if (Owner->PopAlternativeURI(NewURI)) + { + Owner->FailMessage(Message); + auto &desc = Owner->GetItemDesc(); + if (Log != nullptr) + Log->Fail(desc); + ChangeSiteIsMirrorChange(NewURI, desc, Owner); + desc.URI = NewURI; + if (isDoomedItem(Owner) == false) + OwnerQ->Owner->Enqueue(desc); + } + else + { + if (errAuthErr && Owner->GetExpectedHashes().empty() == false) + Owner->Status = pkgAcquire::Item::StatAuthError; + else if (errTransient) + Owner->Status = pkgAcquire::Item::StatTransientNetworkError; + auto SavedDesc = Owner->GetItemDesc(); + if (isDoomedItem(Owner) == false) + Owner->Failed(Message, Config); + if (Log != nullptr) + Log->Fail(SavedDesc); + } } } } diff --git a/apt-pkg/cacheiterators.h b/apt-pkg/cacheiterators.h index 52915aae4..20853061e 100644 --- a/apt-pkg/cacheiterators.h +++ b/apt-pkg/cacheiterators.h @@ -422,6 +422,7 @@ class pkgCache::RlsFileIterator : public Iterator<ReleaseFile, RlsFileIterator> inline const char *Site() const {return S->Site == 0?0:Owner->StrP + S->Site;} inline bool Flagged(pkgCache::Flag::ReleaseFileFlags const flag) const {return (S->Flags & flag) == flag; } + APT_DEPRECATED_MSG("Can be remove without replacement; it is a no-op") bool IsOk(); std::string RelStr(); @@ -457,6 +458,7 @@ class pkgCache::PkgFileIterator : public Iterator<PackageFile, PkgFileIterator> inline const char *Architecture() const {return S->Architecture == 0?0:Owner->StrP + S->Architecture;} inline const char *IndexType() const {return S->IndexType == 0?0:Owner->StrP + S->IndexType;} + APT_DEPRECATED_MSG("Can be remove without replacement; it is a no-op") bool IsOk(); std::string RelStr(); diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc index 10ebd1d3e..f7e3c7a5c 100644 --- a/apt-pkg/deb/debindexfile.cc +++ b/apt-pkg/deb/debindexfile.cc @@ -185,28 +185,22 @@ bool debDebPkgFileIndex::GetContent(std::ostream &content, std::string const &de if(Popen((const char**)&Args[0], PipeFd, Child, FileFd::ReadOnly) == false) return _error->Error("Popen failed"); - content << "Filename: " << debfile << "\n"; - content << "Size: " << std::to_string(Buf.st_size) << "\n"; + std::string line; bool first_line_seen = false; - char buffer[1024]; - do { - unsigned long long actual = 0; - if (PipeFd.Read(buffer, sizeof(buffer)-1, &actual) == false) - return _error->Errno("read", "Failed to read dpkg pipe"); - if (actual == 0) - break; - buffer[actual] = '\0'; - char const * b = buffer; + while (PipeFd.ReadLine(line)) + { if (first_line_seen == false) { - for (; *b != '\0' && (*b == '\n' || *b == '\r'); ++b) - /* skip over leading newlines */; - if (*b == '\0') + if (line.empty()) continue; first_line_seen = true; } - content << b; - } while(true); + else if (line.empty()) + break; + content << line << "\n"; + } + content << "Filename: " << debfile << "\n"; + content << "Size: " << std::to_string(Buf.st_size) << "\n"; ExecWait(Child, "Popen"); return true; diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc index 6b9c645a4..92f0cb777 100644 --- a/apt-pkg/pkgcache.cc +++ b/apt-pkg/pkgcache.cc @@ -968,13 +968,6 @@ const char * pkgCache::VerIterator::MultiArchType() const included here. */ bool pkgCache::RlsFileIterator::IsOk() { - struct stat Buf; - if (stat(FileName(),&Buf) != 0) - return false; - - if (Buf.st_size != (signed)S->Size || Buf.st_mtime != S->mtime) - return false; - return true; } /*}}}*/ @@ -1002,13 +995,6 @@ string pkgCache::RlsFileIterator::RelStr() included here. */ bool pkgCache::PkgFileIterator::IsOk() { - struct stat Buf; - if (stat(FileName(),&Buf) != 0) - return false; - - if (Buf.st_size != (signed)S->Size || Buf.st_mtime != S->mtime) - return false; - return true; } /*}}}*/ diff --git a/apt-private/private-install.cc b/apt-private/private-install.cc index e1beb21c6..f90e7097f 100644 --- a/apt-private/private-install.cc +++ b/apt-private/private-install.cc @@ -564,16 +564,16 @@ static const unsigned short MOD_INSTALL = 2; bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache, int UpgradeMode) { - std::vector<std::string> VolatileCmdL; + std::vector<PseudoPkg> VolatileCmdL; return DoCacheManipulationFromCommandLine(CmdL, VolatileCmdL, Cache, UpgradeMode); } -bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<std::string> &VolatileCmdL, CacheFile &Cache, int UpgradeMode) +bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<PseudoPkg> &VolatileCmdL, CacheFile &Cache, int UpgradeMode) { std::map<unsigned short, APT::VersionSet> verset; std::set<std::string> UnknownPackages; return DoCacheManipulationFromCommandLine(CmdL, VolatileCmdL, Cache, verset, UpgradeMode, UnknownPackages); } -bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<std::string> &VolatileCmdL, CacheFile &Cache, +bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<PseudoPkg> &VolatileCmdL, CacheFile &Cache, std::map<unsigned short, APT::VersionSet> &verset, int UpgradeMode, std::set<std::string> &UnknownPackages) { // Enter the special broken fixing mode if the user specified arguments @@ -611,13 +611,18 @@ bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<std::stri for (auto const &I: VolatileCmdL) { - pkgCache::PkgIterator const P = Cache->FindPkg(I); + pkgCache::PkgIterator const P = Cache->FindPkg(I.name); if (P.end()) continue; // Set any version providing the .deb as the candidate. for (auto Prv = P.ProvidesList(); Prv.end() == false; Prv++) - Cache.GetDepCache()->SetCandidateVersion(Prv.OwnerVer()); + { + if (I.release.empty()) + Cache.GetDepCache()->SetCandidateVersion(Prv.OwnerVer()); + else + Cache.GetDepCache()->SetCandidateRelease(Prv.OwnerVer(), I.release); + } // via cacheset to have our usual virtual handling APT::VersionContainerInterface::FromPackage(&(verset[MOD_INSTALL]), Cache, P, APT::CacheSetHelper::CANDIDATE, helper); @@ -703,6 +708,83 @@ bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<std::stri return true; } /*}}}*/ +bool AddVolatileSourceFile(pkgSourceList *const SL, PseudoPkg &&pkg, std::vector<PseudoPkg> &VolatileCmdL)/*{{{*/ +{ + auto const ext = flExtension(pkg.name); + if (ext != "dsc" && FileExists(pkg.name + "/debian/control") == false) + return false; + std::vector<std::string> files; + SL->AddVolatileFile(pkg.name, &files); + for (auto &&f: files) + VolatileCmdL.emplace_back(std::move(f), pkg.arch, pkg.release, pkg.index); + return true; + +} + /*}}}*/ +bool AddVolatileBinaryFile(pkgSourceList *const SL, PseudoPkg &&pkg, std::vector<PseudoPkg> &VolatileCmdL)/*{{{*/ +{ + auto const ext = flExtension(pkg.name); + if (ext != "deb" && ext != "ddeb" && ext != "changes") + return false; + std::vector<std::string> files; + SL->AddVolatileFile(pkg.name, &files); + for (auto &&f: files) + VolatileCmdL.emplace_back(std::move(f), pkg.arch, pkg.release, pkg.index); + return true; +} + /*}}}*/ +static bool AddIfVolatile(pkgSourceList *const SL, std::vector<PseudoPkg> &VolatileCmdL, bool (*Add)(pkgSourceList *const, PseudoPkg &&, std::vector<PseudoPkg> &), char const * const I, std::string const &pseudoArch)/*{{{*/ +{ + if (I != nullptr && (I[0] == '/' || (I[0] == '.' && (I[1] == '\0' || (I[1] == '.' && (I[2] == '\0' || I[2] == '/')) || I[1] == '/')))) + { + PseudoPkg pkg(I, pseudoArch, "", SL->GetVolatileFiles().size()); + if (FileExists(I)) // this accepts directories and symlinks, too + { + if (Add(SL, std::move(pkg), VolatileCmdL)) + ; + else + _error->Error(_("Unsupported file %s given on commandline"), I); + return true; + } + else + { + auto const found = pkg.name.rfind("/"); + if (found == pkg.name.find("/")) + _error->Error(_("Unsupported file %s given on commandline"), I); + else + { + pkg.release = pkg.name.substr(found + 1); + pkg.name.erase(found); + if (Add(SL, std::move(pkg), VolatileCmdL)) + ; + else + _error->Error(_("Unsupported file %s given on commandline"), I); + } + return true; + } + } + return false; +} + /*}}}*/ +std::vector<PseudoPkg> GetAllPackagesAsPseudo(pkgSourceList *const SL, CommandLine &CmdL, bool (*Add)(pkgSourceList *const, PseudoPkg &&, std::vector<PseudoPkg> &), std::string const &pseudoArch)/*{{{*/ +{ + std::vector<PseudoPkg> PkgCmdL; + std::for_each(CmdL.FileList + 1, CmdL.FileList + CmdL.FileSize(), [&](char const *const I) { + if (AddIfVolatile(SL, PkgCmdL, Add, I, pseudoArch) == false) + PkgCmdL.emplace_back(I, pseudoArch, "", -1); + }); + return PkgCmdL; +} + /*}}}*/ +std::vector<PseudoPkg> GetPseudoPackages(pkgSourceList *const SL, CommandLine &CmdL, bool (*Add)(pkgSourceList *const, PseudoPkg &&, std::vector<PseudoPkg> &), std::string const &pseudoArch)/*{{{*/ +{ + std::vector<PseudoPkg> VolatileCmdL; + std::remove_if(CmdL.FileList + 1, CmdL.FileList + 1 + CmdL.FileSize(), [&](char const *const I) { + return AddIfVolatile(SL, VolatileCmdL, Add, I, pseudoArch); + }); + return VolatileCmdL; +} + /*}}}*/ // DoInstall - Install packages from the command line /*{{{*/ // --------------------------------------------------------------------- /* Install named packages */ @@ -721,8 +803,7 @@ struct PkgIsExtraInstalled { bool DoInstall(CommandLine &CmdL) { CacheFile Cache; - std::vector<std::string> VolatileCmdL; - Cache.GetSourceList()->AddVolatileFiles(CmdL, &VolatileCmdL); + auto VolatileCmdL = GetPseudoPackages(Cache.GetSourceList(), CmdL, AddVolatileBinaryFile, ""); // then open the cache if (Cache.OpenForInstall() == false || @@ -945,13 +1026,21 @@ bool TryToInstall::propergateReleaseCandiateSwitching(std::list<std::pair<pkgCac c != Changed.end(); ++c) { if (c->second.end() == true) + { + auto const pkgname = c->first.ParentPkg().FullName(true); + if (APT::String::Startswith(pkgname, "builddeps:")) + continue; ioprintf(out, _("Selected version '%s' (%s) for '%s'\n"), - c->first.VerStr(), c->first.RelStr().c_str(), c->first.ParentPkg().FullName(true).c_str()); + c->first.VerStr(), c->first.RelStr().c_str(), pkgname.c_str()); + } else if (c->first.ParentPkg()->Group != c->second.ParentPkg()->Group) { + auto pkgname = c->second.ParentPkg().FullName(true); + if (APT::String::Startswith(pkgname, "builddeps:")) + pkgname.replace(0, strlen("builddeps"), "src"); pkgCache::VerIterator V = (*Cache)[c->first.ParentPkg()].CandidateVerIter(*Cache); ioprintf(out, _("Selected version '%s' (%s) for '%s' because of '%s'\n"), V.VerStr(), - V.RelStr().c_str(), V.ParentPkg().FullName(true).c_str(), c->second.ParentPkg().FullName(true).c_str()); + V.RelStr().c_str(), V.ParentPkg().FullName(true).c_str(), pkgname.c_str()); } } return Success; diff --git a/apt-private/private-install.h b/apt-private/private-install.h index 2d27756c9..39a040e7d 100644 --- a/apt-private/private-install.h +++ b/apt-private/private-install.h @@ -17,9 +17,23 @@ class pkgProblemResolver; APT_PUBLIC bool DoInstall(CommandLine &Cmd); -bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<std::string> &VolatileCmdL, CacheFile &Cache, +struct PseudoPkg +{ + std::string name; + std::string arch; + std::string release; + ssize_t index; + PseudoPkg(std::string const &n, std::string const &a, std::string const &r) : name(n), arch(a), release(r), index(-1) {} + PseudoPkg(std::string const &n, std::string const &a, std::string const &r, ssize_t i) : name(n), arch(a), release(r), index(i) {} +}; +std::vector<PseudoPkg> GetAllPackagesAsPseudo(pkgSourceList *const SL, CommandLine &CmdL, bool (*Add)(pkgSourceList *const, PseudoPkg &&, std::vector<PseudoPkg> &), std::string const &pseudoArch); +std::vector<PseudoPkg> GetPseudoPackages(pkgSourceList *const SL, CommandLine &CmdL, bool (*Add)(pkgSourceList *const, PseudoPkg &&, std::vector<PseudoPkg> &), std::string const &pseudoArch); +bool AddVolatileBinaryFile(pkgSourceList *const SL, PseudoPkg &&pkg, std::vector<PseudoPkg> &VolatileCmdL); +bool AddVolatileSourceFile(pkgSourceList *const SL, PseudoPkg &&pkg, std::vector<PseudoPkg> &VolatileCmdL); + +bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<PseudoPkg> &VolatileCmdL, CacheFile &Cache, std::map<unsigned short, APT::VersionSet> &verset, int UpgradeMode, std::set<std::string> &UnknownPackages); -bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<std::string> &VolatileCmdL, CacheFile &Cache, int UpgradeMode); +bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, std::vector<PseudoPkg> &VolatileCmdL, CacheFile &Cache, int UpgradeMode); bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache, int UpgradeMode); APT_PUBLIC bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true, diff --git a/apt-private/private-search.cc b/apt-private/private-search.cc index 52a52e522..de1b19758 100644 --- a/apt-private/private-search.cc +++ b/apt-private/private-search.cc @@ -316,7 +316,14 @@ static bool Search(CommandLine &CmdL) if (matchedAll == true) { if (ShowFull == true) - DisplayRecordV1(CacheFile, J->V, std::cout); + { + pkgCache::VerFileIterator Vf; + auto &Parser = LookupParser(Recs, J->V, Vf); + char const *Start, *Stop; + Parser.GetRec(Start, Stop); + size_t const Length = Stop - Start; + DisplayRecordV1(CacheFile, Recs, J->V, Vf, Start, Length, std::cout); + } else printf("%s - %s\n",P.Name().c_str(),P.ShortDesc().c_str()); } diff --git a/apt-private/private-show.cc b/apt-private/private-show.cc index afe448a33..15c05d420 100644 --- a/apt-private/private-show.cc +++ b/apt-private/private-show.cc @@ -20,6 +20,7 @@ #include <apt-private/private-cacheset.h> #include <apt-private/private-output.h> +#include <apt-private/private-install.h> #include <apt-private/private-show.h> #include <ostream> @@ -30,35 +31,23 @@ #include <apti18n.h> /*}}}*/ -static bool OpenPackagesFile(pkgCacheFile &CacheFile, pkgCache::VerIterator const &V,/*{{{*/ - FileFd &PkgF, pkgCache::VerFileIterator &Vf) +pkgRecords::Parser &LookupParser(pkgRecords &Recs, pkgCache::VerIterator const &V, pkgCache::VerFileIterator &Vf) /*{{{*/ { - pkgCache const * const Cache = CacheFile.GetPkgCache(); - if (unlikely(Cache == NULL)) - return false; - - // Find an appropriate file Vf = V.FileList(); for (; Vf.end() == false; ++Vf) if ((Vf.File()->Flags & pkgCache::Flag::NotSource) == 0) break; if (Vf.end() == true) Vf = V.FileList(); - - // Check and load the package list file - pkgCache::PkgFileIterator I = Vf.File(); - if (I.IsOk() == false) - return _error->Error(_("Package file %s is out of sync."),I.FileName()); - - // Read the record - return PkgF.Open(I.FileName(), FileFd::ReadOnly, FileFd::Extension); + return Recs.Lookup(Vf); } /*}}}*/ -static APT_PURE unsigned char const* skipDescriptionFields(unsigned char const * DescP)/*{{{*/ +static APT_PURE char const *skipDescriptionFields(char const *DescP, size_t const Length) /*{{{*/ { + auto const backup = DescP; char const * const TagName = "\nDescription"; size_t const TagLen = strlen(TagName); - while ((DescP = (unsigned char*)strchr((char*)DescP, '\n')) != NULL) + while ((DescP = static_cast<char const *>(memchr(DescP, '\n', Length - (DescP - backup)))) != nullptr) { if (DescP[1] == ' ') DescP += 2; @@ -72,114 +61,126 @@ static APT_PURE unsigned char const* skipDescriptionFields(unsigned char const * return DescP; } /*}}}*/ -bool DisplayRecordV1(pkgCacheFile &CacheFile, pkgCache::VerIterator const &V,/*{{{*/ - std::ostream &out) +static APT_PURE char const *findDescriptionField(char const *DescP, size_t const Length) /*{{{*/ { - FileFd PkgF; - pkgCache::VerFileIterator Vf; - if (OpenPackagesFile(CacheFile, V, PkgF, Vf) == false) - return false; + auto const backup = DescP; + char const * const TagName = "\nDescription"; + size_t const TagLen = strlen(TagName); + while ((DescP = static_cast<char const *>(memchr(DescP, '\n', Length - (DescP - backup)))) != nullptr) + { + if (strncmp(DescP, TagName, TagLen) == 0) + break; + else + ++DescP; + } + if (DescP != nullptr) + ++DescP; + return DescP; +} + /*}}}*/ - pkgCache * const Cache = CacheFile.GetPkgCache(); - if (unlikely(Cache == NULL)) +bool DisplayRecordV1(pkgCacheFile &, pkgRecords &Recs, /*{{{*/ + pkgCache::VerIterator const &V, pkgCache::VerFileIterator const &, + char const *Buffer, size_t Length, std::ostream &out) +{ + if (unlikely(Length == 0)) return false; - // Read the record (and ensure that it ends with a newline and NUL) - unsigned char *Buffer = new unsigned char[Cache->HeaderP->MaxVerFileSize+2]; - Buffer[Vf->Size] = '\n'; - Buffer[Vf->Size+1] = '\0'; - if (PkgF.Seek(Vf->Offset) == false || - PkgF.Read(Buffer,Vf->Size) == false) + auto const Desc = V.TranslatedDescription(); + if (Desc.end()) { - delete [] Buffer; - return false; + // we have no translation output whatever we have got + return FileFd::Write(STDOUT_FILENO, Buffer, Length); } // Get a pointer to start of Description field - const unsigned char *DescP = (unsigned char*)strstr((char*)Buffer, "\nDescription"); - if (DescP != NULL) - ++DescP; - else - DescP = Buffer + Vf->Size; + char const *DescP = findDescriptionField(Buffer, Length); + if (DescP == nullptr) + DescP = Buffer + Length; // Write all but Description - size_t const length = DescP - Buffer; - if (length != 0 && FileFd::Write(STDOUT_FILENO, Buffer, length) == false) - { - delete [] Buffer; + size_t const untilDesc = DescP - Buffer; + if (untilDesc != 0 && FileFd::Write(STDOUT_FILENO, Buffer, untilDesc) == false) return false; - } // Show the right description - pkgRecords Recs(*Cache); - pkgCache::DescIterator Desc = V.TranslatedDescription(); - if (Desc.end() == false) - { - pkgRecords::Parser &P = Recs.Lookup(Desc.FileList()); - out << "Description" << ( (strcmp(Desc.LanguageCode(),"") != 0) ? "-" : "" ) << Desc.LanguageCode() << ": " << P.LongDesc(); - out << std::endl << "Description-md5: " << Desc.md5() << std::endl; + char desctag[50]; + auto const langcode = Desc.LanguageCode(); + if (strcmp(langcode, "") == 0) + strcpy(desctag, "\nDescription"); + else + snprintf(desctag, sizeof(desctag), "\nDescription-%s", langcode); - // Find the first field after the description (if there is any) - DescP = skipDescriptionFields(DescP); + out << desctag + 1 << ": "; + auto const Df = Desc.FileList(); + if (Df.end() == false) + { + pkgRecords::Parser &P = Recs.Lookup(Df); + out << P.LongDesc(); } - // else we have no translation, so we found a lonely Description-md5 -> don't skip it + + out << std::endl << "Description-md5: " << Desc.md5() << std::endl; + + // Find the first field after the description (if there is any) + DescP = skipDescriptionFields(DescP, Length - (DescP - Buffer)); // write the rest of the buffer, but skip mixed in Descriptions* fields - while (DescP != NULL) + while (DescP != nullptr) { - const unsigned char * const Start = DescP; - const unsigned char *End = (unsigned char*)strstr((char*)DescP, "\nDescription"); - if (End == NULL) + char const *const Start = DescP; + char const *End = findDescriptionField(DescP, Length - (DescP - Buffer)); + if (End == nullptr) { - End = &Buffer[Vf->Size]; - DescP = NULL; + DescP = nullptr; + End = Buffer + Length - 1; + size_t endings = 0; + while (*End == '\n') + { + --End; + if (*End == '\r') + --End; + ++endings; + } + if (endings >= 1) + { + ++End; + if (*End == '\r') + ++End; + } + ++End; } else - { - ++End; // get the newline into the output - DescP = skipDescriptionFields(End + strlen("Description")); - } + DescP = skipDescriptionFields(End + strlen("Description"), Length - (End - Buffer)); + size_t const length = End - Start; if (length != 0 && FileFd::Write(STDOUT_FILENO, Start, length) == false) - { - delete [] Buffer; return false; - } } // write a final newline after the last field out << std::endl; - delete [] Buffer; return true; } /*}}}*/ -static bool DisplayRecordV2(pkgCacheFile &CacheFile, pkgCache::VerIterator const &V,/*{{{*/ - std::ostream &out) +static bool DisplayRecordV2(pkgCacheFile &CacheFile, pkgRecords &Recs, /*{{{*/ + pkgCache::VerIterator const &V, pkgCache::VerFileIterator const &Vf, + char const *Buffer, size_t const Length, std::ostream &out) { - FileFd PkgF; - pkgCache::VerFileIterator Vf; - if (OpenPackagesFile(CacheFile, V, PkgF, Vf) == false) - return false; - // Check and load the package list file pkgCache::PkgFileIterator I = Vf.File(); - if (I.IsOk() == false) - return _error->Error(_("Package file %s is out of sync."),I.FileName()); // find matching sources.list metaindex pkgSourceList *SrcList = CacheFile.GetSourceList(); pkgIndexFile *Index; if (SrcList->FindIndex(I, Index) == false && _system->FindIndex(I, Index) == false) - return _error->Error("Can not find indexfile for Package %s (%s)", - V.ParentPkg().Name(), V.VerStr()); + return _error->Error("Can not find indexfile for Package %s (%s)", + V.ParentPkg().Name(), V.VerStr()); std::string source_index_file = Index->Describe(true); // Read the record pkgTagSection Tags; - pkgTagFile TagF(&PkgF); - - if (TagF.Jump(Tags, V.FileList()->Offset) == false) + if (Tags.Scan(Buffer, Length, true) == false) return _error->Error("Internal Error, Unable to parse a package record"); // make size nice @@ -234,10 +235,6 @@ static bool DisplayRecordV2(pkgCacheFile &CacheFile, pkgCache::VerIterator const return _error->Error("Internal Error, Unable to parse a package record"); // write the description - pkgCache * const Cache = CacheFile.GetPkgCache(); - if (unlikely(Cache == NULL)) - return false; - pkgRecords Recs(*Cache); // FIXME: show (optionally) all available translations(?) pkgCache::DescIterator Desc = V.TranslatedDescription(); if (Desc.end() == false) @@ -245,7 +242,7 @@ static bool DisplayRecordV2(pkgCacheFile &CacheFile, pkgCache::VerIterator const pkgRecords::Parser &P = Recs.Lookup(Desc.FileList()); out << "Description: " << P.LongDesc(); } - + // write a final newline (after the description) out << std::endl << std::endl; @@ -255,26 +252,93 @@ static bool DisplayRecordV2(pkgCacheFile &CacheFile, pkgCache::VerIterator const bool ShowPackage(CommandLine &CmdL) /*{{{*/ { pkgCacheFile CacheFile; + auto VolatileCmdL = GetAllPackagesAsPseudo(CacheFile.GetSourceList(), CmdL, AddVolatileBinaryFile, ""); + + if (unlikely(CacheFile.GetPkgCache() == nullptr)) + return false; CacheSetHelperVirtuals helper(true, GlobalError::NOTICE); APT::CacheSetHelper::VerSelector const select = _config->FindB("APT::Cache::AllVersions", true) ? APT::CacheSetHelper::ALL : APT::CacheSetHelper::CANDIDATE; if (select == APT::CacheSetHelper::CANDIDATE && CacheFile.GetDepCache() == nullptr) return false; - APT::VersionList const verset = APT::VersionList::FromCommandLine(CacheFile, CmdL.FileList + 1, select, helper); + + APT::VersionList verset; + size_t normalPackages = 0; + for (auto const &I: VolatileCmdL) + { + if (I.index == -1) + { + APT::VersionContainerInterface::FromString(&verset, CacheFile, I.name, select, helper); + ++normalPackages; + } + else + { + if (select != APT::CacheSetHelper::CANDIDATE && unlikely(CacheFile.GetDepCache() == nullptr)) + return false; + pkgCache::PkgIterator const P = CacheFile->FindPkg(I.name); + if (unlikely(P.end())) + continue; + + // Set any version providing the .deb as the candidate. + for (auto Prv = P.ProvidesList(); Prv.end() == false; ++Prv) + { + if (I.release.empty()) + CacheFile->SetCandidateVersion(Prv.OwnerVer()); + else + CacheFile->SetCandidateRelease(Prv.OwnerVer(), I.release); + + // via cacheset to have our usual handling + APT::VersionContainerInterface::FromPackage(&verset, CacheFile, Prv.OwnerPkg(), APT::CacheSetHelper::CANDIDATE, helper); + } + } + } + int const ShowVersion = _config->FindI("APT::Cache::Show::Version", 1); + pkgRecords Recs(CacheFile); for (APT::VersionList::const_iterator Ver = verset.begin(); Ver != verset.end(); ++Ver) + { + pkgCache::VerFileIterator Vf; + auto &Parser = LookupParser(Recs, Ver, Vf); + char const *Start, *Stop; + Parser.GetRec(Start, Stop); + size_t const Length = Stop - Start; + if (ShowVersion <= 1) { - if (DisplayRecordV1(CacheFile, Ver, std::cout) == false) + if (DisplayRecordV1(CacheFile, Recs, Ver, Vf, Start, Length, std::cout) == false) return false; } - else - if (DisplayRecordV2(CacheFile, Ver, c1out) == false) - return false; + else if (DisplayRecordV2(CacheFile, Recs, Ver, Vf, Start, Length + 1, c1out) == false) + return false; + } - if (select == APT::CacheSetHelper::CANDIDATE) + if (select == APT::CacheSetHelper::CANDIDATE && normalPackages != 0) { - APT::VersionList const verset_all = APT::VersionList::FromCommandLine(CacheFile, CmdL.FileList + 1, APT::CacheSetHelper::ALL, helper); + APT::VersionList verset_all; + for (auto const &I: VolatileCmdL) + { + if (I.index == -1) + APT::VersionContainerInterface::FromString(&verset_all, CacheFile, I.name, APT::CacheSetHelper::ALL, helper); + else + { + pkgCache::PkgIterator const P = CacheFile->FindPkg(I.name); + if (unlikely(P.end())) + continue; + + // Set any version providing the .deb as the candidate. + for (auto Prv = P.ProvidesList(); Prv.end() == false; ++Prv) + { + if (I.release.empty()) + CacheFile->SetCandidateVersion(Prv.OwnerVer()); + else + CacheFile->SetCandidateRelease(Prv.OwnerVer(), I.release); + + // via cacheset to have our usual virtual handling + APT::VersionContainerInterface::FromPackage(&verset_all, CacheFile, Prv.OwnerPkg(), APT::CacheSetHelper::CANDIDATE, helper); + } + } + } + int const records = verset_all.size() - verset.size(); if (records > 0) _error->Notice(P_("There is %i additional record. Please use the '-a' switch to see it", "There are %i additional records. Please use the '-a' switch to see them.", records), records); diff --git a/apt-private/private-show.h b/apt-private/private-show.h index e48979c2c..9e5fa995f 100644 --- a/apt-private/private-show.h +++ b/apt-private/private-show.h @@ -3,6 +3,7 @@ #include <apt-pkg/macros.h> #include <apt-pkg/pkgcache.h> +#include <apt-pkg/pkgrecords.h> #include <iostream> @@ -10,8 +11,12 @@ class CommandLine; class pkgCacheFile; APT_PUBLIC bool ShowPackage(CommandLine &CmdL); -APT_PUBLIC bool DisplayRecordV1(pkgCacheFile &CacheFile, pkgCache::VerIterator const &V, std::ostream &out); APT_PUBLIC bool ShowSrcPackage(CommandLine &CmdL); APT_PUBLIC bool Policy(CommandLine &CmdL); +pkgRecords::Parser &LookupParser(pkgRecords &Recs, pkgCache::VerIterator const &V, pkgCache::VerFileIterator &Vf); +bool DisplayRecordV1(pkgCacheFile &CacheFile, pkgRecords &Recs, + pkgCache::VerIterator const &V, pkgCache::VerFileIterator const &Vf, + char const *Buffer, size_t const Length, std::ostream &out); + #endif diff --git a/apt-private/private-source.cc b/apt-private/private-source.cc index 32651cfdb..c8a48a74a 100644 --- a/apt-private/private-source.cc +++ b/apt-private/private-source.cc @@ -636,15 +636,6 @@ static void WriteBuildDependencyPackage(std::ostringstream &buildDepsPkgFile, } bool DoBuildDep(CommandLine &CmdL) { - CacheFile Cache; - std::vector<std::string> VolatileCmdL; - Cache.GetSourceList()->AddVolatileFiles(CmdL, &VolatileCmdL); - - _config->Set("APT::Install-Recommends", false); - - if (CmdL.FileSize() <= 1 && VolatileCmdL.empty()) - return _error->Error(_("Must specify at least one package to check builddeps for")); - bool StripMultiArch; std::string hostArch = _config->Find("APT::Get::Host-Architecture"); if (hostArch.empty() == false) @@ -656,9 +647,19 @@ bool DoBuildDep(CommandLine &CmdL) } else StripMultiArch = true; + auto const nativeArch = _config->Find("APT::Architecture"); + std::string const pseudoArch = hostArch.empty() ? nativeArch : hostArch; + + CacheFile Cache; + auto VolatileCmdL = GetPseudoPackages(Cache.GetSourceList(), CmdL, AddVolatileSourceFile, pseudoArch); + + _config->Set("APT::Install-Recommends", false); + + if (CmdL.FileSize() <= 1 && VolatileCmdL.empty()) + return _error->Error(_("Must specify at least one package to check builddeps for")); std::ostringstream buildDepsPkgFile; - std::vector<std::pair<std::string,std::string>> pseudoPkgs; + std::vector<PseudoPkg> pseudoPkgs; // deal with the build essentials first { std::vector<pkgSrcRecords::Parser::BuildDepRec> BuildDeps; @@ -673,43 +674,42 @@ bool DoBuildDep(CommandLine &CmdL) BuildDeps.push_back(rec); } std::string const pseudo = "builddeps:essentials"; - std::string const nativeArch = _config->Find("APT::Architecture"); WriteBuildDependencyPackage(buildDepsPkgFile, pseudo, nativeArch, BuildDeps); - pseudoPkgs.emplace_back(pseudo, nativeArch); + pseudoPkgs.emplace_back(pseudo, nativeArch, ""); } // Read the source list if (Cache.BuildSourceList() == false) return false; pkgSourceList *List = Cache.GetSourceList(); - std::string const pseudoArch = hostArch.empty() ? _config->Find("APT::Architecture") : hostArch; - // FIXME: Avoid volatile sources == cmdline assumption { auto const VolatileSources = List->GetVolatileFiles(); - if (VolatileSources.size() == VolatileCmdL.size()) + for (auto &&pkg : VolatileCmdL) { - for (size_t i = 0; i < VolatileSources.size(); ++i) + if (unlikely(pkg.index == -1)) { - auto const Src = VolatileCmdL[i]; - if (DirectoryExists(Src)) - ioprintf(c1out, _("Note, using directory '%s' to get the build dependencies\n"), Src.c_str()); - else - ioprintf(c1out, _("Note, using file '%s' to get the build dependencies\n"), Src.c_str()); - std::unique_ptr<pkgSrcRecords::Parser> Last(VolatileSources[i]->CreateSrcParser()); - if (Last == nullptr) - return _error->Error(_("Unable to find a source package for %s"), Src.c_str()); - - std::string const pseudo = std::string("builddeps:") + Src; - WriteBuildDependencyPackage(buildDepsPkgFile, pseudo, pseudoArch, - GetBuildDeps(Last.get(), Src.c_str(), StripMultiArch, hostArch)); - pseudoPkgs.emplace_back(pseudo, pseudoArch); + _error->Error(_("Unable to find a source package for %s"), pkg.name.c_str()); + continue; } + if (DirectoryExists(pkg.name)) + ioprintf(c1out, _("Note, using directory '%s' to get the build dependencies\n"), pkg.name.c_str()); + else + ioprintf(c1out, _("Note, using file '%s' to get the build dependencies\n"), pkg.name.c_str()); + std::unique_ptr<pkgSrcRecords::Parser> Last(VolatileSources[pkg.index]->CreateSrcParser()); + if (Last == nullptr) + { + _error->Error(_("Unable to find a source package for %s"), pkg.name.c_str()); + continue; + } + + auto pseudo = std::string("builddeps:") + pkg.name; + WriteBuildDependencyPackage(buildDepsPkgFile, pseudo, pseudoArch, + GetBuildDeps(Last.get(), pkg.name.c_str(), StripMultiArch, hostArch)); + pkg.name = std::move(pseudo); + pseudoPkgs.push_back(std::move(pkg)); } - else - return _error->Error("Implementation error: Volatile sources (%lu) and" - "commandline elements (%lu) do not match!", VolatileSources.size(), - VolatileCmdL.size()); + VolatileCmdL.clear(); } bool const WantLock = _config->FindB("APT::Get::Print-URIs", false) == false; @@ -731,7 +731,13 @@ bool DoBuildDep(CommandLine &CmdL) std::string const pseudo = std::string("builddeps:") + Src; WriteBuildDependencyPackage(buildDepsPkgFile, pseudo, pseudoArch, GetBuildDeps(Last, Src.c_str(), StripMultiArch, hostArch)); - pseudoPkgs.emplace_back(pseudo, pseudoArch); + std::string reltag = *I; + size_t found = reltag.find_last_of("/"); + if (found == std::string::npos) + reltag.clear(); + else + reltag.erase(0, found + 1); + pseudoPkgs.emplace_back(pseudo, pseudoArch, std::move(reltag)); } } @@ -745,12 +751,24 @@ bool DoBuildDep(CommandLine &CmdL) { pkgDepCache::ActionGroup group(Cache); TryToInstall InstallAction(Cache, &Fix, false); + std::list<std::pair<pkgCache::VerIterator, std::string>> candSwitch; + for (auto const &pkg: pseudoPkgs) + { + pkgCache::PkgIterator const Pkg = Cache->FindPkg(pkg.name, pkg.arch); + if (Pkg.end()) + continue; + if (pkg.release.empty()) + Cache->SetCandidateVersion(Pkg.VersionList()); + else + candSwitch.emplace_back(Pkg.VersionList(), pkg.release); + } + if (candSwitch.empty() == false) + InstallAction.propergateReleaseCandiateSwitching(candSwitch, c0out); for (auto const &pkg: pseudoPkgs) { - pkgCache::PkgIterator const Pkg = Cache->FindPkg(pkg.first, pkg.second); + pkgCache::PkgIterator const Pkg = Cache->FindPkg(pkg.name, pkg.arch); if (Pkg.end()) continue; - Cache->SetCandidateVersion(Pkg.VersionList()); InstallAction(Cache[Pkg].CandidateVerIter(Cache)); removeAgain.push_back(Pkg); } diff --git a/apt-private/private-upgrade.cc b/apt-private/private-upgrade.cc index 989f6b0c1..aeaf5066b 100644 --- a/apt-private/private-upgrade.cc +++ b/apt-private/private-upgrade.cc @@ -19,8 +19,7 @@ static bool UpgradeHelper(CommandLine &CmdL, int UpgradeFlags) { CacheFile Cache; - std::vector<std::string> VolatileCmdL; - Cache.GetSourceList()->AddVolatileFiles(CmdL, &VolatileCmdL); + auto VolatileCmdL = GetPseudoPackages(Cache.GetSourceList(), CmdL, AddVolatileBinaryFile, ""); if (Cache.OpenForInstall() == false || Cache.CheckDeps() == false) return false; diff --git a/cmdline/apt-cache.cc b/cmdline/apt-cache.cc index 085962699..f1b1e144d 100644 --- a/cmdline/apt-cache.cc +++ b/cmdline/apt-cache.cc @@ -470,12 +470,7 @@ static bool DumpAvail(CommandLine &) for (pkgCache::VerFile **J = VFList; *J != 0;) { pkgCache::PkgFileIterator File(*Cache,(*J)->File + Cache->PkgFileP); - if (File.IsOk() == false) - { - _error->Error(_("Package file %s is out of sync."),File.FileName()); - break; - } - + // FIXME: Add support for volatile/with-source files FileFd PkgF(File.FileName(),FileFd::ReadOnly, FileFd::Extension); if (_error->PendingError() == true) break; diff --git a/test/integration/test-apt-get-install-deb b/test/integration/test-apt-get-install-deb index 36e94a2dc..844f1d7c5 100755 --- a/test/integration/test-apt-get-install-deb +++ b/test/integration/test-apt-get-install-deb @@ -8,20 +8,26 @@ setupenvironment configarchitecture 'amd64' 'i386' # regression test for #754904 -testfailureequal 'E: Unsupported file /dev/null given on commandline' aptget install -qq /dev/null - -# only consider .deb files cat > foo.rpm <<EOF I'm not a deb, I'm a teapot. EOF -testfailureequal 'E: Unsupported file ./foo.rpm given on commandline' aptget install -qq ./foo.rpm +for exe in apt aptget; do + for cmd in install remove purge upgrade full-upgrade; do + testfailureequal 'E: Unsupported file /dev/null given on commandline' $exe $cmd -qq /dev/null + testfailureequal 'E: Unsupported file ./foo.rpm given on commandline' $exe $cmd -qq ./foo.rpm + done +done # and ensure we fail for invalid debs mv foo.rpm foo.deb -testfailuremsg "E: Sub-process Popen returned an error code (2) +for exe in apt aptget; do + for cmd in install remove purge upgrade full-upgrade; do + testfailuremsg "E: Sub-process Popen returned an error code (2) E: Encountered a section with no Package: header E: Problem with MergeList ${TMPWORKINGDIRECTORY}/foo.deb -E: The package lists or status file could not be parsed or opened." aptget install ./foo.deb +E: The package lists or status file could not be parsed or opened." $exe $cmd ./foo.deb + done +done buildsimplenativepackage 'foo' 'i386,amd64' '1.0' @@ -40,6 +46,41 @@ The following packages have unmet dependencies: foo : Conflicts: foo:i386 but 1.0 is to be installed E: Unable to correct problems, you have held broken packages." aptget install ./incoming/foo_1.0_i386.deb ./incoming/foo_1.0_amd64.deb -s +testsuccess apt show foo --with-source ./incoming/foo_1.0_amd64.deb +testequal 'Package: foo +Version: 1.0' grep -e '^Package:' -e '^Version:' -e '^Architecture:' rootdir/tmp/testsuccess.output +testsuccess apt show ./incoming/foo_1.0_amd64.deb +testequal 'Package: foo +Version: 1.0' grep -e '^Package:' -e '^Version:' -e '^Architecture:' rootdir/tmp/testsuccess.output +testsuccess apt show foo:i386 ./incoming/foo_1.0_amd64.deb --with-source ./incoming/foo_1.0_i386.deb +testequal 'Package: foo:i386 +Version: 1.0 +Package: foo +Version: 1.0' grep -e '^Package:' -e '^Version:' -e '^Architecture:' rootdir/tmp/testsuccess.output + +testsuccess aptcache show foo --with-source ./incoming/foo_1.0_amd64.deb +testequal 'Package: foo +Version: 1.0 +Architecture: amd64' grep -e '^Package:' -e '^Version:' -e '^Architecture:' rootdir/tmp/testsuccess.output +testsuccess aptcache show ./incoming/foo_1.0_amd64.deb +testequal 'Package: foo +Version: 1.0 +Architecture: amd64' grep -e '^Package:' -e '^Version:' -e '^Architecture:' rootdir/tmp/testsuccess.output +testsuccess aptcache show foo:i386 ./incoming/foo_1.0_amd64.deb --with-source ./incoming/foo_1.0_i386.deb +testequal 'Package: foo +Version: 1.0 +Architecture: i386 +Package: foo +Version: 1.0 +Architecture: amd64' grep -e '^Package:' -e '^Version:' -e '^Architecture:' rootdir/tmp/testsuccess.output + +testsuccessequal 'Sorting... +Full Text Search... +foo/local-deb 1.0 amd64 + an autogenerated dummy foo=1.0/unstable +' apt search foo --with-source ./incoming/foo_1.0_amd64.deb +testsuccessequal 'foo - an autogenerated dummy foo=1.0/unstable' aptcache search foo --with-source ./incoming/foo_1.0_amd64.deb + testdpkgnotinstalled 'foo' 'foo:i386' testsuccess aptget install ./incoming/foo_1.0_i386.deb -o Debug::pkgCacheGen=1 testdpkginstalled 'foo:i386' @@ -124,6 +165,9 @@ createpkg 'leading-newline' ' ' createpkg 'trailing-newline' '' ' ' +createpkg 'double-trailing-newline' '' ' + +' echo 'Package: /pkg-/ Pin: release a=experimental @@ -132,6 +176,7 @@ Pin-Priority: 501' > rootdir/etc/apt/preferences.d/pinit testsuccess aptget install ./incoming/pkg-as-it-should-be_0_all.deb testsuccess aptget install "$(readlink -f ./incoming/pkg-leading-newline_0_all.deb)" testsuccess aptget install ./incoming/pkg-trailing-newline_0_all.deb +testsuccess aptget install ./incoming/pkg-double-trailing-newline_0_all.deb testempty apt clean if [ "$(id -u)" = '0' ]; then diff --git a/test/integration/test-apt-install-file-reltag b/test/integration/test-apt-install-file-reltag new file mode 100755 index 000000000..afbf9bef9 --- /dev/null +++ b/test/integration/test-apt-install-file-reltag @@ -0,0 +1,94 @@ +#!/bin/sh +set -e + +TESTDIR="$(readlink -f "$(dirname "$0")")" +. "$TESTDIR/framework" + +setupenvironment +configarchitecture 'i386' + +insertpackage 'unstable' 'foo' 'all' '2' 'Depends: foo-common (= 2)' +insertpackage 'unstable' 'foo-common' 'all' '2' +insertpackage 'unstable' 'baz' 'all' '1' +insertpackage 'experimental' 'foo' 'all' '5' 'Depends: foo-common (= 5)' +insertpackage 'experimental' 'foo-common' 'all' '5' 'Source: foo (5)' +insertpackage 'experimental' 'baz' 'all' '2' +setupaptarchive + +insertinstalledpackage 'build-essential' 'all' '1' + +cat > foobar.dsc <<EOF +Format: 3.0 (native) +Source: foobar +Binary: foobar +Architecture: all +Version: 1 +Maintainer: Joe Sixpack <joe@example.org> +Build-Depends: foo (= 5), baz +Standards-Version: 4.1.3 +EOF +buildsimplenativepackage 'foobar2' 'all' '1' 'unstable' 'Depends: foo (= 5), baz' + +ln -s "$(readlink -f ./incoming/foobar2_1_all.deb)" foobar.deb +mkdir -p foobar +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt build-dep "$(readlink -f ./foobar.dsc)" -s +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt install "$(readlink -f ./foobar.deb)" -s +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt build-dep ./foobar.dsc -s +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt install ./foobar.deb -s +cd foobar +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt build-dep ../foobar.dsc -s +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt install ../foobar.deb -s +cd .. + +SUCCESSDSC='The following NEW packages will be installed: + baz foo foo-common +0 upgraded, 3 newly installed, 0 to remove and 0 not upgraded. +Inst baz (1 unstable [all]) +Inst foo-common (5 experimental [all]) +Inst foo (5 experimental [all]) +Conf baz (1 unstable [all]) +Conf foo-common (5 experimental [all]) +Conf foo (5 experimental [all])' +SUCCESSDEB='The following additional packages will be installed: + baz foo foo-common +The following NEW packages will be installed: + baz foo foo-common foobar2 +0 upgraded, 4 newly installed, 0 to remove and 0 not upgraded. +Inst baz (1 unstable [all]) +Inst foo-common (5 experimental [all]) +Inst foo (5 experimental [all]) +Inst foobar2 (1 local-deb [all]) +Conf baz (1 unstable [all]) +Conf foo-common (5 experimental [all]) +Conf foo (5 experimental [all]) +Conf foobar2 (1 local-deb [all])' +testsuccessequal "Note, using file '$(readlink -f ./foobar.dsc)' to get the build dependencies +$SUCCESSDSC" apt build-dep "$(readlink -f ./foobar.dsc)/experimental" -s -q=2 +testsuccessequal "Reading package lists... +Building dependency tree... +Note, selecting 'foobar2' instead of '$(readlink -f ./foobar.deb)' +$SUCCESSDEB" apt install "$(readlink -f ./foobar.deb)/experimental" -s +testsuccessequal "Note, using file './foobar.dsc' to get the build dependencies +$SUCCESSDSC" apt build-dep ./foobar.dsc/experimental -sq=2 +testsuccessequal "Reading package lists... +Building dependency tree... +Note, selecting 'foobar2' instead of './foobar.deb' +$SUCCESSDEB" apt install "./foobar.deb/experimental" -s +cd foobar +testsuccessequal "Note, using file '../foobar.dsc' to get the build dependencies +$SUCCESSDSC" apt build-dep ../foobar.dsc/experimental -sqq +testsuccessequal "Reading package lists... +Building dependency tree... +Note, selecting 'foobar2' instead of '../foobar.deb' +$SUCCESSDEB" apt install "../foobar.deb/experimental" -s +cd .. + +msgmsg 'fail with' 'incorrect release' +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt build-dep "$(readlink -f ./foobar.dsc)/stable" -s +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt install "$(readlink -f ./foobar.deb)/stable" -s +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt build-dep ./foobar.dsc/stable -s +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt install ./foobar.deb/stable -s +cd foobar +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt build-dep ../foobar.dsc/stable -s +testfailuremsg 'E: Unable to correct problems, you have held broken packages.' apt install ../foobar.deb/stable -s +cd .. diff --git a/test/integration/test-apt-source-and-build-dep b/test/integration/test-apt-source-and-build-dep index 7f7457217..24790a578 100755 --- a/test/integration/test-apt-source-and-build-dep +++ b/test/integration/test-apt-source-and-build-dep @@ -14,6 +14,10 @@ insertpackage 'wheezy' 'build-essential' 'all' '1.0' # a "normal" package with source and binary insertpackage 'unstable' 'foo' 'all' '2.0' insertsource 'unstable' 'foo' 'all' '2.0' +insertpackage 'unstable' 'foo-common' 'all' '2.0' 'Source: foo (2.0)' +insertpackage 'experimental' 'foo' 'all' '5' 'Depends: foo-common (= 5)' +insertpackage 'experimental' 'foo-common' 'all' '5' 'Source: foo (5)' +insertsource 'experimental' 'foo-source' 'all' '42' 'Build-Depends: foo (= 5), baz' # binary packages with Source-field insertpackage 'unstable,testing' 'bin' 'i386' '3-2+b1' 'Source: bin (3-2)' @@ -40,6 +44,8 @@ insertsource 'wheezy' 'foo' 'all' '0.1' insertsource 'stable' 'baz' 'all' '1.0' insertsource 'unstable' 'baz' 'all' '2.0' insertsource 'unstable' 'baz' 'all' '1.5' +insertpackage 'unstable' 'baz' 'all' '2.0' +insertpackage 'experimental' 'baz' 'all' '5.0' # ensure we really have the situation we wanted (first 2.0 is foo above) testequal 'Version: 2.0 @@ -189,3 +195,56 @@ Need to get 0 B/43 B of source archives. Fetch source bin-backport" apt source bin/stable -s -q testsuccessequal "$(getbuilddep 'bin-backport' "Picking 'bin-backport' as source package instead of 'bin' Selected version '2-2' (stable) for bin-backport")" apt build-dep bin/stable -s + +testsuccess apt install foo/experimental -s +testfailure apt build-dep foo-source -s +testsuccessequal "Reading package lists... +Selected version '42' (experimental) for foo-source +Reading package lists... +Building dependency tree... +Selected version '5' (experimental [all]) for 'foo' because of 'src:foo-source' +Selected version '5' (experimental [all]) for 'foo-common' because of 'foo' +The following NEW packages will be installed: + baz build-essential foo foo-common +0 upgraded, 4 newly installed, 0 to remove and 0 not upgraded. +Inst baz (2.0 unstable [all]) +Inst build-essential (1.0 wheezy [all]) +Inst foo-common (5 experimental [all]) +Inst foo (5 experimental [all]) +Conf baz (2.0 unstable [all]) +Conf build-essential (1.0 wheezy [all]) +Conf foo-common (5 experimental [all]) +Conf foo (5 experimental [all])" apt build-dep foo-source/experimental -s +testsuccessequal "Reading package lists... +Selected version '42' (experimental) for foo-source +Reading package lists... +Building dependency tree... +The following NEW packages will be installed: + baz build-essential foo foo-common +0 upgraded, 4 newly installed, 0 to remove and 0 not upgraded. +Inst baz (5.0 experimental [all]) +Inst build-essential (1.0 wheezy [all]) +Inst foo-common (5 experimental [all]) +Inst foo (5 experimental [all]) +Conf baz (5.0 experimental [all]) +Conf build-essential (1.0 wheezy [all]) +Conf foo-common (5 experimental [all]) +Conf foo (5 experimental [all])" apt build-dep foo-source -t experimental -s +# this checks that mentioning the source pkg baz has no influence on the binary package baz +testsuccessequal "Reading package lists... +Selected version '42' (experimental) for foo-source +Selected version '2.0' (unstable) for baz +baz has no build depends. +Reading package lists... +Building dependency tree... +The following NEW packages will be installed: + baz build-essential foo foo-common +0 upgraded, 4 newly installed, 0 to remove and 0 not upgraded. +Inst baz (5.0 experimental [all]) +Inst build-essential (1.0 wheezy [all]) +Inst foo-common (5 experimental [all]) +Inst foo (5 experimental [all]) +Conf baz (5.0 experimental [all]) +Conf build-essential (1.0 wheezy [all]) +Conf foo-common (5 experimental [all]) +Conf foo (5 experimental [all])" apt build-dep foo-source baz/unstable -t experimental -s diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support index 9b5eed1e5..88fa91324 100755 --- a/test/integration/test-partial-file-support +++ b/test/integration/test-partial-file-support @@ -24,7 +24,7 @@ testdownloadfile() { else msgpass fi - sed -e '/^ <- / s#%20# #g' -e '/^ <- / s#%0a#\n#g' "$DOWNLOADLOG" | grep '^.*-Hash: ' > receivedhashes.log + sed -e '/^ <- / s#%20# #g' -e '/^ <- / s#%0a#\n#g' "$DOWNLOADLOG" | grep '^.*-Hash: ' > receivedhashes.log || true testsuccess test -s receivedhashes.log local HASHES_OK=0 local HASHES_BAD=0 diff --git a/test/integration/test-pdiff-usage b/test/integration/test-pdiff-usage index 5a650ad83..7cda2ee45 100755 --- a/test/integration/test-pdiff-usage +++ b/test/integration/test-pdiff-usage @@ -138,6 +138,8 @@ SHA256-Download: mkdir -p "${BYHASH}" find "${NORMAL}/" -maxdepth 1 -name "Index*" -exec mv '{}' "$BYHASH" \; ln -s "${BYHASH}/Index.gz" "${BYHASH}/$(sha256sum "${BYHASH}/Index.gz" | cut -f1 -d' ')" + echo 'foobar' > "${BYHASH}/$(sha256sum "$PATCHFILE" | cut -f1 -d' ')" + echo 'foobar' > "${BYHASH}/$(sha256sum "${PATCHFILE}.gz" | cut -f1 -d' ')" rm -rf rootdir/var/lib/apt/lists cp -a rootdir/var/lib/apt/lists-bak rootdir/var/lib/apt/lists wasmergeused "$@" -o Acquire::By-Hash=force @@ -398,7 +400,8 @@ testcase -o Acquire::IndexTargets::deb::Packages::KeepCompressed=true partialleftovers() { generatepartialleftovers "redirectme_Packages.${LOWCOSTEXT}" "redirectme_Packages-patched.${LOWCOSTEXT}"; } -webserverconfig 'aptwebserver::redirect::replace::/redirectme/' "http://0.0.0.0:${APTHTTPPORT}/" +# redirect the InRelease file only – the other files are auto-redirected by apt +webserverconfig 'aptwebserver::redirect::replace::/redirectme/I' "http://0.0.0.0:${APTHTTPPORT}/I" rewritesourceslist "http://localhost:${APTHTTPPORT}/redirectme" aptautotest_apt_update() { aptautotest_aptget_update "$@" |