From 5d88572318ed7e271101b1ae8f2cc139a1a3f705 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Wed, 26 Jan 2011 16:19:30 +0100 Subject: - change the internal handling of Extensions in pkgAcqIndex - add a special uncompressed compression type to prefer those files * methods/{gzip,bzip}.cc: - print a good error message if FileSize() is zero --- apt-pkg/acquire-item.cc | 114 ++++++++++++--------- apt-pkg/acquire-item.h | 9 +- apt-pkg/aptconfiguration.cc | 8 ++ debian/changelog | 6 +- doc/apt.conf.5.xml | 9 +- doc/examples/configure-index | 2 +- methods/bzip2.cc | 3 +- methods/gzip.cc | 3 +- .../test-bug-595691-empty-and-broken-archive-files | 9 +- 9 files changed, 96 insertions(+), 67 deletions(-) diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 2cd6ab359..a603a3d70 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -622,29 +622,61 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, HashString ExpectedHash, string comprExt) : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash) { + if(comprExt.empty() == true) + { + // autoselect the compression method + std::vector types = APT::Configuration::getCompressionTypes(); + for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) + comprExt.append(*t).append(" "); + if (comprExt.empty() == false) + comprExt.erase(comprExt.end()-1); + } + CompressionExtension = comprExt; + + Init(URI, URIDesc, ShortDesc); +} +pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target, + HashString const &ExpectedHash, indexRecords const *MetaIndexParser) + : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash) +{ + // autoselect the compression method + std::vector types = APT::Configuration::getCompressionTypes(); + CompressionExtension = ""; + if (ExpectedHash.empty() == false) + { + for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) + if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true) + CompressionExtension.append(*t).append(" "); + } + else + { + for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) + CompressionExtension.append(*t).append(" "); + } + if (CompressionExtension.empty() == false) + CompressionExtension.erase(CompressionExtension.end()-1); + + Init(Target->URI, Target->Description, Target->ShortDesc); +} + /*}}}*/ +// AcqIndex::Init - defered Constructor /*{{{*/ +void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) { Decompression = false; Erase = false; DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(URI); - if(comprExt.empty()) - { - // autoselect the compression method - std::vector types = APT::Configuration::getCompressionTypes(); - if (types.empty() == true) - comprExt = "plain"; - else - comprExt = "." + types[0]; - } - CompressionExtension = ((comprExt == "plain" || comprExt == ".") ? "" : comprExt); - - Desc.URI = URI + CompressionExtension; + std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); + if (comprExt == "uncompressed") + Desc.URI = URI; + else + Desc.URI = URI + '.' + comprExt; Desc.Description = URIDesc; Desc.Owner = this; Desc.ShortDesc = ShortDesc; - + QueueURI(Desc); } /*}}}*/ @@ -666,37 +698,18 @@ string pkgAcqIndex::Custom600Headers() /*}}}*/ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ { - std::vector types = APT::Configuration::getCompressionTypes(); - - for (std::vector::const_iterator t = types.begin(); - t != types.end(); t++) + size_t const nextExt = CompressionExtension.find(' '); + if (nextExt != std::string::npos) { - // jump over all already tried compression types - const unsigned int nameLen = Desc.URI.size() - (*t).size(); - if(Desc.URI.substr(nameLen) != *t) - continue; - - // we want to try it with the next extension (and make sure to - // not skip over the end) - t++; - if (t == types.end()) - break; - - // queue new download - Desc.URI = Desc.URI.substr(0, nameLen) + *t; - new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc, - ExpectedHash, string(".").append(*t)); - - Status = StatDone; - Complete = false; - Dequeue(); + CompressionExtension = CompressionExtension.substr(nextExt+1); + Init(RealURI, Desc.Description, Desc.ShortDesc); return; } // on decompression failure, remove bad versions in partial/ - if(Decompression && Erase) { + if (Decompression && Erase) { string s = _config->FindDir("Dir::State::lists") + "partial/"; - s += URItoFileName(RealURI); + s.append(URItoFileName(RealURI)); unlink(s.c_str()); } @@ -773,8 +786,8 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, Status = StatError; ErrorText = "Method gave a blank filename"; } - - string compExt = flExtension(flNotDir(URI(Desc.URI).Path)); + + std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); // The files timestamp matches if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) { @@ -807,12 +820,7 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, // get the binary name for your used compression type decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),""); if(decompProg.empty() == false); - // flExtensions returns the full name if no extension is found - // this is why we have this complicated compare operation here - // FIMXE: add a new flJustExtension() that return "" if no - // extension is found and use that above so that it can - // be tested against "" - else if(compExt == flNotDir(URI(Desc.URI).Path)) + else if(compExt == "uncompressed") decompProg = "copy"; else { _error->Error("Unsupported extension: %s", compExt.c_str()); @@ -853,6 +861,15 @@ string pkgAcqIndexTrans::Custom600Headers() /* */ void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { + size_t const nextExt = CompressionExtension.find(' '); + if (nextExt != std::string::npos) + { + CompressionExtension = CompressionExtension.substr(nextExt+1); + Init(RealURI, Desc.Description, Desc.ShortDesc); + Status = StatIdle; + return; + } + if (Cnf->LocalOnly == true || StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) { @@ -862,7 +879,7 @@ void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Dequeue(); return; } - + Item::Failed(Message,Cnf); } /*}}}*/ @@ -1197,8 +1214,7 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/ new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description, (*Target)->ShortDesc, ExpectedIndexHash); else - new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexHash); + new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser); } } /*}}}*/ diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 581761e32..92098e3d4 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -528,8 +528,8 @@ class pkgAcqIndex : public pkgAcquire::Item /** \brief The expected hashsum of the decompressed index file. */ HashString ExpectedHash; - /** \brief The compression-related file extension that is being - * added to the downloaded file (e.g., ".gz" or ".bz2"). + /** \brief The compression-related file extensions that are being + * added to the downloaded file one by one if first fails (e.g., "gz bz2"). */ string CompressionExtension; @@ -540,7 +540,7 @@ class pkgAcqIndex : public pkgAcquire::Item virtual void Done(string Message,unsigned long Size,string Md5Hash, pkgAcquire::MethodConfig *Cnf); virtual string Custom600Headers(); - virtual string DescURI() {return RealURI + CompressionExtension;}; + virtual string DescURI() {return Desc.URI;}; virtual string HashSum() {return ExpectedHash.toStr(); }; /** \brief Create a pkgAcqIndex. @@ -565,6 +565,9 @@ class pkgAcqIndex : public pkgAcquire::Item pkgAcqIndex(pkgAcquire *Owner,string URI,string URIDesc, string ShortDesc, HashString ExpectedHash, string compressExt=""); + pkgAcqIndex(pkgAcquire *Owner, struct IndexTarget const * const Target, + HashString const &ExpectedHash, indexRecords const *MetaIndexParser); + void Init(string const &URI, string const &URIDesc, string const &ShortDesc); }; /*}}}*/ /** \brief An acquire item that is responsible for fetching a {{{ diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc index 52f54073c..e97ebfed7 100644 --- a/apt-pkg/aptconfiguration.cc +++ b/apt-pkg/aptconfiguration.cc @@ -90,6 +90,14 @@ const Configuration::getCompressionTypes(bool const &Cached) { types.push_back(Types->Tag); } + // add the special "uncompressed" type + if (std::find(types.begin(), types.end(), "uncompressed") == types.end()) + { + string const uncompr = _config->FindFile("Dir::Bin::uncompressed", ""); + if (uncompr.empty() == true || FileExists(uncompr) == true) + types.push_back("uncompressed"); + } + return types; } /*}}}*/ diff --git a/debian/changelog b/debian/changelog index 2413b9c5b..7b125fc43 100644 --- a/debian/changelog +++ b/debian/changelog @@ -54,6 +54,8 @@ apt (0.8.11+wheezy) unstable; urgency=low * apt-pkg/acquire-item.cc: - don't uncompress downloaded pdiff files before feeding it to rred - try downloading clearsigned InRelease before trying Release.gpg + - change the internal handling of Extensions in pkgAcqIndex + - add a special uncompressed compression type to prefer those files * cmdline/apt-key: - don't set trustdb-name as non-root so 'list' and 'finger' can be used without being root (Closes: #393005, #592107) @@ -62,8 +64,10 @@ apt (0.8.11+wheezy) unstable; urgency=low * ftparchive/writer.cc: - add config option to search for more patterns in release command - include Index files by default in the Release file + * methods/{gzip,bzip}.cc: + - print a good error message if FileSize() is zero - -- David Kalnischkies Mon, 24 Jan 2011 15:36:50 +0100 + -- David Kalnischkies Wed, 26 Jan 2011 16:06:10 +0100 apt (0.8.10.3) unstable; urgency=low diff --git a/doc/apt.conf.5.xml b/doc/apt.conf.5.xml index a19d85dbc..a423dac24 100644 --- a/doc/apt.conf.5.xml +++ b/doc/apt.conf.5.xml @@ -442,12 +442,11 @@ DPkg::Pre-Install-Pkgs {"/usr/sbin/dpkg-preconfigure --apt";}; the bzip2 method (the inbuilt) setting is Dir::Bin::bzip2 "/bin/bzip2"; Note also that list entries specified on the command line will be added at the end of the list specified in the configuration files, but before the default entries. To prefer a type in this case - over the ones specified in in the configuration files you can set the option direct - not in list style. + over the ones specified in the configuration files you can set the option direct - not in list style. This will not override the defined list, it will only prefix the list with this type. - While it is possible to add an empty compression type to the order list, but APT in its current - version doesn't understand it correctly and will display many warnings about not downloaded files - - these warnings are most of the time false negatives. Future versions will maybe include a way to - really prefer uncompressed files to support the usage of local mirrors. + The special type uncompressed can be used to give uncompressed files a + preference, but note that most archives doesn't provide uncompressed files so this is mostly only + useable for local mirrors. GzipIndexes diff --git a/doc/examples/configure-index b/doc/examples/configure-index index 6c078d75f..fd14d4dd7 100644 --- a/doc/examples/configure-index +++ b/doc/examples/configure-index @@ -281,7 +281,7 @@ Acquire lzma "lzma"; gz "gzip"; - Order { "gz"; "lzma"; "bz2"; }; + Order { "uncompressed"; "gz"; "lzma"; "bz2"; }; }; Languages diff --git a/methods/bzip2.cc b/methods/bzip2.cc index ccc3669a2..42932dded 100644 --- a/methods/bzip2.cc +++ b/methods/bzip2.cc @@ -56,9 +56,8 @@ bool Bzip2Method::Fetch(FetchItem *Itm) // Open the source and destination files FileFd From(Path,FileFd::ReadOnly); - // FIXME add an error message saying that empty files can't be valid archives if(From.FileSize() == 0) - return false; + return _error->Error(_("Empty files can't be valid archives")); int GzOut[2]; if (pipe(GzOut) < 0) diff --git a/methods/gzip.cc b/methods/gzip.cc index f1c76066e..fc4e1ecfd 100644 --- a/methods/gzip.cc +++ b/methods/gzip.cc @@ -48,9 +48,8 @@ bool GzipMethod::Fetch(FetchItem *Itm) // Open the source and destination files FileFd From(Path,FileFd::ReadOnlyGzip); - // FIXME add an error message saying that empty files can't be valid archives if(From.FileSize() == 0) - return false; + return _error->Error(_("Empty files can't be valid archives")); FileFd To(Itm->DestFile,FileFd::WriteAtomic); To.EraseOnFailure(); diff --git a/test/integration/test-bug-595691-empty-and-broken-archive-files b/test/integration/test-bug-595691-empty-and-broken-archive-files index c5379dca0..398d0cd1b 100755 --- a/test/integration/test-bug-595691-empty-and-broken-archive-files +++ b/test/integration/test-bug-595691-empty-and-broken-archive-files @@ -56,6 +56,7 @@ setupcompressor() { lzma) COMPRESS="lzma";; esac echo "Acquire::CompressionTypes::Order { \"${COMPRESS}\"; }; +Dir::Bin::uncompressed \"/does/not/exist\"; Dir::Bin::gzip \"/does/not/exist\"; Dir::Bin::bzip2 \"/does/not/exist\"; Dir::Bin::lzma \"/does/not/exist\";" > rootdir/etc/apt/apt.conf.d/00compressor @@ -93,8 +94,8 @@ Reading package lists..." "empty archive Packages.$COMPRESS over file" testaptgetupdate "Ign file:$(readlink -f aptarchive)/ Translation-en Get:1 file: InRelease [] Err file: Packages - Undetermined Error -W: Failed to fetch file:$(readlink -f aptarchive/Packages.$COMPRESS) Undetermined Error + Empty files can't be valid archives +W: Failed to fetch ${COMPRESSOR}:$(readlink -f aptarchive/Packages.$COMPRESS) Empty files can't be valid archives E: Some index files failed to download, they have been ignored, or old ones used instead." "empty file Packages.$COMPRESS over file" } @@ -127,8 +128,8 @@ Reading package lists..." "empty archive Packages.$COMPRESS over http" Ign http://localhost/ Translation-en Get:2 http://localhost Packages Err http://localhost Packages - Undetermined Error -W: Failed to fetch http://localhost:8080/Packages.$COMPRESS Undetermined Error + Empty files can't be valid archives +W: Failed to fetch ${COMPRESSOR}:$(readlink -f rootdir/var/lib/apt/lists/partial/localhost:8080_Packages) Empty files can't be valid archives E: Some index files failed to download, they have been ignored, or old ones used instead." "empty file Packages.$COMPRESS over http" } -- cgit v1.2.3