summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--apt-inst/contrib/arfile.h2
-rw-r--r--apt-inst/contrib/extracttar.cc13
-rw-r--r--apt-inst/contrib/extracttar.h4
-rw-r--r--apt-inst/deb/debfile.cc4
-rw-r--r--apt-inst/deb/debfile.h4
-rw-r--r--apt-inst/dirstream.cc1
-rw-r--r--apt-inst/dirstream.h4
-rw-r--r--apt-inst/extract.cc2
-rw-r--r--apt-inst/makefile2
-rw-r--r--apt-pkg/acquire-item.cc2224
-rw-r--r--apt-pkg/acquire-item.h855
-rw-r--r--apt-pkg/acquire-method.cc69
-rw-r--r--apt-pkg/acquire-method.h18
-rw-r--r--apt-pkg/acquire-worker.cc49
-rw-r--r--apt-pkg/acquire.cc137
-rw-r--r--apt-pkg/acquire.h29
-rw-r--r--apt-pkg/algorithms.cc16
-rw-r--r--apt-pkg/algorithms.h28
-rw-r--r--apt-pkg/aptconfiguration.cc60
-rw-r--r--apt-pkg/aptconfiguration.h3
-rw-r--r--apt-pkg/cachefilter.cc144
-rw-r--r--apt-pkg/cachefilter.h158
-rw-r--r--apt-pkg/cacheiterators.h16
-rw-r--r--apt-pkg/cacheset.cc374
-rw-r--r--apt-pkg/cacheset.h582
-rw-r--r--apt-pkg/cdrom.cc10
-rw-r--r--apt-pkg/clean.cc2
-rw-r--r--apt-pkg/clean.h8
-rw-r--r--apt-pkg/contrib/cmndline.cc19
-rw-r--r--apt-pkg/contrib/configuration.cc3
-rw-r--r--apt-pkg/contrib/configuration.h16
-rw-r--r--apt-pkg/contrib/fileutl.cc235
-rw-r--r--apt-pkg/contrib/fileutl.h35
-rw-r--r--apt-pkg/contrib/gpgv.cc63
-rw-r--r--apt-pkg/contrib/hashes.cc213
-rw-r--r--apt-pkg/contrib/hashes.h165
-rw-r--r--apt-pkg/contrib/macros.h2
-rw-r--r--apt-pkg/contrib/netrc.cc12
-rw-r--r--apt-pkg/contrib/netrc.h4
-rw-r--r--apt-pkg/contrib/strutl.cc26
-rw-r--r--apt-pkg/contrib/strutl.h6
-rw-r--r--apt-pkg/deb/debindexfile.cc191
-rw-r--r--apt-pkg/deb/debindexfile.h86
-rw-r--r--apt-pkg/deb/deblistparser.cc161
-rw-r--r--apt-pkg/deb/deblistparser.h28
-rw-r--r--apt-pkg/deb/debmetaindex.cc73
-rw-r--r--apt-pkg/deb/debmetaindex.h29
-rw-r--r--apt-pkg/deb/debrecords.cc103
-rw-r--r--apt-pkg/deb/debrecords.h32
-rw-r--r--apt-pkg/deb/debsrcrecords.cc161
-rw-r--r--apt-pkg/deb/debsrcrecords.h7
-rw-r--r--apt-pkg/deb/debsystem.h2
-rw-r--r--apt-pkg/deb/dpkgpm.cc17
-rw-r--r--apt-pkg/deb/dpkgpm.h15
-rw-r--r--apt-pkg/depcache.cc16
-rw-r--r--apt-pkg/depcache.h25
-rw-r--r--apt-pkg/edsp.cc6
-rw-r--r--apt-pkg/edsp/edspindexfile.cc2
-rw-r--r--apt-pkg/indexcopy.cc10
-rw-r--r--apt-pkg/indexcopy.h6
-rw-r--r--apt-pkg/indexfile.h1
-rw-r--r--apt-pkg/indexrecords.cc65
-rw-r--r--apt-pkg/indexrecords.h47
-rw-r--r--apt-pkg/init.cc10
-rw-r--r--apt-pkg/install-progress.cc2
-rw-r--r--apt-pkg/install-progress.h7
-rw-r--r--apt-pkg/metaindex.h2
-rw-r--r--apt-pkg/packagemanager.cc3
-rw-r--r--apt-pkg/packagemanager.h6
-rw-r--r--apt-pkg/pkgcache.cc95
-rw-r--r--apt-pkg/pkgcache.h238
-rw-r--r--apt-pkg/pkgcachegen.cc342
-rw-r--r--apt-pkg/pkgcachegen.h59
-rw-r--r--apt-pkg/pkgrecords.cc2
-rw-r--r--apt-pkg/pkgrecords.h44
-rw-r--r--apt-pkg/pkgsystem.h6
-rw-r--r--apt-pkg/sourcelist.cc8
-rw-r--r--apt-pkg/sourcelist.h16
-rw-r--r--apt-pkg/srcrecords.h16
-rw-r--r--apt-pkg/tagfile.cc233
-rw-r--r--apt-pkg/tagfile.h70
-rw-r--r--apt-pkg/update.cc4
-rw-r--r--apt-pkg/update.h3
-rw-r--r--apt-pkg/upgrade.cc113
-rw-r--r--apt-pkg/upgrade.h14
-rw-r--r--apt-private/acqprogress.cc14
-rw-r--r--apt-private/private-cachefile.cc4
-rw-r--r--apt-private/private-cachefile.h23
-rw-r--r--apt-private/private-cacheset.cc8
-rw-r--r--apt-private/private-cacheset.h25
-rw-r--r--apt-private/private-cmndline.cc1
-rw-r--r--apt-private/private-download.cc89
-rw-r--r--apt-private/private-download.h8
-rw-r--r--apt-private/private-install.cc86
-rw-r--r--apt-private/private-install.h2
-rw-r--r--apt-private/private-list.cc12
-rw-r--r--apt-private/private-show.cc8
-rw-r--r--apt-private/private-update.cc4
-rw-r--r--apt-private/private-upgrade.cc4
-rw-r--r--buildlib/config.h.in4
-rw-r--r--cmdline/apt-cache.cc149
-rw-r--r--cmdline/apt-dump-solver.cc2
-rw-r--r--cmdline/apt-extracttemplates.cc17
-rw-r--r--cmdline/apt-extracttemplates.h3
-rw-r--r--cmdline/apt-get.cc212
-rw-r--r--cmdline/apt-helper.cc10
-rw-r--r--cmdline/apt-internal-solver.cc7
-rw-r--r--cmdline/apt-key.in403
-rw-r--r--configure.ac8
-rwxr-xr-x[-rw-r--r--]debian/apt.postinst23
-rwxr-xr-xdebian/apt.postrm (renamed from debian/postrm)0
-rw-r--r--debian/changelog207
-rw-r--r--debian/control8
-rw-r--r--debian/gbp.conf2
-rw-r--r--debian/libapt-inst1.6.install.in (renamed from debian/libapt-inst1.5.install.in)0
-rw-r--r--debian/libapt-inst1.6.symbols (renamed from debian/libapt-inst1.5.symbols)10
-rw-r--r--debian/libapt-pkg4.14.install.in (renamed from debian/libapt-pkg4.12.install.in)0
-rw-r--r--debian/libapt-pkg4.14.symbols (renamed from debian/libapt-pkg4.12.symbols)474
-rwxr-xr-xdebian/postinst39
-rw-r--r--debian/tests/control4
-rw-r--r--doc/apt-get.8.xml8
-rw-r--r--doc/apt.conf.5.xml38
-rw-r--r--doc/examples/configure-index2
-rw-r--r--doc/po/pl.po8
-rw-r--r--doc/po/pt_BR.po8
-rw-r--r--ftparchive/cachedb.cc200
-rw-r--r--ftparchive/cachedb.h35
-rw-r--r--ftparchive/contents.cc10
-rw-r--r--ftparchive/sources.h2
-rw-r--r--ftparchive/writer.cc324
-rw-r--r--ftparchive/writer.h14
-rw-r--r--methods/copy.cc9
-rw-r--r--methods/ftp.cc25
-rw-r--r--methods/ftp.h3
-rw-r--r--methods/gpgv.cc27
-rw-r--r--methods/gzip.cc11
-rw-r--r--methods/http.cc15
-rw-r--r--methods/http.h4
-rw-r--r--methods/http_main.cc4
-rw-r--r--methods/https.cc16
-rw-r--r--methods/https.h6
-rw-r--r--methods/server.cc72
-rw-r--r--methods/server.h10
-rw-r--r--po/ChangeLog1077
-rw-r--r--po/apt-all.pot4
-rw-r--r--po/ar.po8
-rw-r--r--po/ast.po12
-rw-r--r--po/bg.po14
-rw-r--r--po/bs.po8
-rw-r--r--po/ca.po14
-rw-r--r--po/cs.po20
-rw-r--r--po/cy.po8
-rw-r--r--po/da.po12
-rw-r--r--po/de.po12
-rw-r--r--po/dz.po10
-rw-r--r--po/el.po12
-rw-r--r--po/es.po12
-rw-r--r--po/eu.po13
-rw-r--r--po/fi.po13
-rw-r--r--po/fr.po14
-rw-r--r--po/gl.po14
-rw-r--r--po/he.po6
-rw-r--r--po/hu.po12
-rw-r--r--po/it.po12
-rw-r--r--po/ja.po58
-rw-r--r--po/km.po12
-rw-r--r--po/ko.po12
-rw-r--r--po/ku.po10
-rw-r--r--po/lt.po10
-rw-r--r--po/mr.po12
-rw-r--r--po/nb.po12
-rw-r--r--po/ne.po13
-rw-r--r--po/nl.po12
-rw-r--r--po/nn.po8
-rw-r--r--po/pl.po12
-rw-r--r--po/pt.po12
-rw-r--r--po/pt_BR.po13
-rw-r--r--po/ro.po13
-rw-r--r--po/ru.po12
-rw-r--r--po/sk.po12
-rw-r--r--po/sl.po12
-rw-r--r--po/sv.po12
-rw-r--r--po/th.po12
-rw-r--r--po/tl.po13
-rw-r--r--po/tr.po187
-rw-r--r--po/uk.po12
-rw-r--r--po/vi.po12
-rw-r--r--po/zh_CN.po12
-rw-r--r--po/zh_TW.po13
-rw-r--r--test/integration/framework147
-rwxr-xr-xtest/integration/skip-aptwebserver25
-rwxr-xr-xtest/integration/test-allow-scores-for-all-dependency-types6
-rwxr-xr-xtest/integration/test-apt-by-hash-update49
-rwxr-xr-xtest/integration/test-apt-cdrom2
-rwxr-xr-xtest/integration/test-apt-ftparchive-src-cachedb5
-rwxr-xr-xtest/integration/test-apt-get-build-dep129
-rwxr-xr-xtest/integration/test-apt-get-changelog15
-rwxr-xr-xtest/integration/test-apt-get-clean35
-rwxr-xr-xtest/integration/test-apt-get-download54
-rwxr-xr-xtest/integration/test-apt-get-install-deb30
-rwxr-xr-xtest/integration/test-apt-get-source-authenticated2
-rwxr-xr-xtest/integration/test-apt-get-update-unauth-warning43
-rwxr-xr-xtest/integration/test-apt-get-upgrade3
-rwxr-xr-xtest/integration/test-apt-helper14
-rwxr-xr-xtest/integration/test-apt-key236
-rwxr-xr-xtest/integration/test-apt-key-net-update9
-rwxr-xr-xtest/integration/test-apt-progress-fd8
-rwxr-xr-xtest/integration/test-apt-sources-deb82248
-rwxr-xr-xtest/integration/test-apt-update-expected-size44
-rwxr-xr-xtest/integration/test-apt-update-file5
-rwxr-xr-xtest/integration/test-apt-update-ims88
-rwxr-xr-xtest/integration/test-apt-update-nofallback234
-rwxr-xr-xtest/integration/test-apt-update-rollback195
-rwxr-xr-xtest/integration/test-apt-update-transactions24
-rwxr-xr-xtest/integration/test-apt-update-unauth47
-rwxr-xr-xtest/integration/test-bug-254770-segfault-if-cache-not-buildable14
-rwxr-xr-xtest/integration/test-bug-507998-dist-upgrade-recommends1
-rwxr-xr-xtest/integration/test-bug-591882-conkeror2
-rwxr-xr-xtest/integration/test-bug-595691-empty-and-broken-archive-files2
-rwxr-xr-xtest/integration/test-bug-596498-trusted-unsigned-repo2
-rwxr-xr-xtest/integration/test-bug-605394-versioned-or-groups1
-rwxr-xr-xtest/integration/test-bug-612099-multiarch-conflicts2
-rwxr-xr-xtest/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted5
-rwxr-xr-xtest/integration/test-bug-64141-install-dependencies-for-on-hold2
-rwxr-xr-xtest/integration/test-bug-657695-resolver-breaks-on-virtuals1
-rwxr-xr-xtest/integration/test-bug-675449-essential-are-protected1
-rwxr-xr-xtest/integration/test-bug-680041-apt-mark-holds-correctly4
-rwxr-xr-xtest/integration/test-bug-683786-build-dep-on-virtual-packages4
-rwxr-xr-xtest/integration/test-bug-686346-package-missing-architecture1
-rwxr-xr-xtest/integration/test-bug-712435-missing-descriptions9
-rwxr-xr-xtest/integration/test-bug-717891-abolute-uris-for-proxies2
-rwxr-xr-xtest/integration/test-bug-722207-print-uris-even-if-very-quiet8
-rwxr-xr-xtest/integration/test-bug-728500-tempdir3
-rwxr-xr-xtest/integration/test-bug-733028-gpg-resource-limit27
-rwxr-xr-xtest/integration/test-bug-735967-lib32-to-i386-unavailable2
-rwxr-xr-xtest/integration/test-bug-738785-switch-protocol10
-rwxr-xr-xtest/integration/test-bug-740843-versioned-up-down-breaks2
-rwxr-xr-xtest/integration/test-bug-745036-new-foreign-invalidates-cache29
-rwxr-xr-xtest/integration/test-bug-758153-versioned-provides-support1
-rwxr-xr-xtest/integration/test-compressed-indexes11
-rwxr-xr-xtest/integration/test-conflicts-loop1
-rwxr-xr-xtest/integration/test-cve-2013-1051-InRelease-parsing2
-rwxr-xr-xtest/integration/test-hashsum-verification10
-rwxr-xr-xtest/integration/test-http-pipeline-messup47
-rwxr-xr-xtest/integration/test-kernel-helper-autoremove2
-rwxr-xr-xtest/integration/test-pdiff-usage57
-rwxr-xr-xtest/integration/test-pin-non-existent-package3
-rwxr-xr-xtest/integration/test-policy-pinning3
-rwxr-xr-xtest/integration/test-prevent-markinstall-multiarch-same-versionscrew1
-rwxr-xr-xtest/integration/test-provides-gone-with-upgrade1
-rwxr-xr-xtest/integration/test-releasefile-verification13
-rwxr-xr-xtest/integration/test-resolve-by-keep-new-recommends1
-rwxr-xr-xtest/integration/test-sourceslist-trusted-options195
-rwxr-xr-xtest/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum262
-rwxr-xr-xtest/integration/test-ubuntu-bug-1304403-obsolete-priority-standard2
-rwxr-xr-xtest/integration/test-ubuntu-bug-346386-apt-get-update-paywall11
-rwxr-xr-xtest/integration/test-ubuntu-bug-784473-InRelease-one-message-only4
-rwxr-xr-xtest/integration/test-ubuntu-bug-985852-pre-depends-or-group-ordering1
-rwxr-xr-xtest/integration/test-very-tight-loop-configure-with-unpacking-new-packages2
-rwxr-xr-xtest/integration/test-xorg-break-providers2
-rw-r--r--test/interactive-helper/makefile2
-rw-r--r--test/libapt/commandline_test.cc68
-rw-r--r--test/libapt/fileutl_test.cc58
-rw-r--r--test/libapt/hashsums_test.cc149
-rw-r--r--test/libapt/makefile4
-rw-r--r--test/libapt/strutil_test.cc9
-rw-r--r--test/libapt/tagfile_test.cc179
267 files changed, 9777 insertions, 5494 deletions
diff --git a/apt-inst/contrib/arfile.h b/apt-inst/contrib/arfile.h
index 0f62a34a0..5aa38aedc 100644
--- a/apt-inst/contrib/arfile.h
+++ b/apt-inst/contrib/arfile.h
@@ -61,7 +61,7 @@ struct ARArchive::Member
unsigned long long Size;
// Location of the data.
- unsigned long Start;
+ unsigned long long Start;
Member *Next;
Member() : Start(0), Next(0) {};
diff --git a/apt-inst/contrib/extracttar.cc b/apt-inst/contrib/extracttar.cc
index 0ba3f0521..2c86d0d01 100644
--- a/apt-inst/contrib/extracttar.cc
+++ b/apt-inst/contrib/extracttar.cc
@@ -60,9 +60,8 @@ struct ExtractTar::TarHeader
// ExtractTar::ExtractTar - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-ExtractTar::ExtractTar(FileFd &Fd,unsigned long Max,string DecompressionProgram) : File(Fd),
- MaxInSize(Max), DecompressProg(DecompressionProgram)
-
+ExtractTar::ExtractTar(FileFd &Fd,unsigned long long Max,string DecompressionProgram)
+ : File(Fd), MaxInSize(Max), DecompressProg(DecompressionProgram)
{
GZPid = -1;
Eof = false;
@@ -267,7 +266,7 @@ bool ExtractTar::Go(pkgDirStream &Stream)
case GNU_LongLink:
{
- unsigned long Length = Itm.Size;
+ unsigned long long Length = Itm.Size;
unsigned char Block[512];
while (Length > 0)
{
@@ -286,7 +285,7 @@ bool ExtractTar::Go(pkgDirStream &Stream)
case GNU_LongName:
{
- unsigned long Length = Itm.Size;
+ unsigned long long Length = Itm.Size;
unsigned char Block[512];
while (Length > 0)
{
@@ -315,11 +314,11 @@ bool ExtractTar::Go(pkgDirStream &Stream)
return false;
// Copy the file over the FD
- unsigned long Size = Itm.Size;
+ unsigned long long Size = Itm.Size;
while (Size != 0)
{
unsigned char Junk[32*1024];
- unsigned long Read = min(Size,(unsigned long)sizeof(Junk));
+ unsigned long Read = min(Size, (unsigned long long)sizeof(Junk));
if (InFd.Read(Junk,((Read+511)/512)*512) == false)
return false;
diff --git a/apt-inst/contrib/extracttar.h b/apt-inst/contrib/extracttar.h
index 4b29df314..472e018f4 100644
--- a/apt-inst/contrib/extracttar.h
+++ b/apt-inst/contrib/extracttar.h
@@ -39,7 +39,7 @@ class ExtractTar
GNU_LongLink = 'K',GNU_LongName = 'L'};
FileFd &File;
- unsigned long MaxInSize;
+ unsigned long long MaxInSize;
int GZPid;
FileFd InFd;
bool Eof;
@@ -53,7 +53,7 @@ class ExtractTar
bool Go(pkgDirStream &Stream);
- ExtractTar(FileFd &Fd,unsigned long Max,std::string DecompressionProgram);
+ ExtractTar(FileFd &Fd,unsigned long long Max,std::string DecompressionProgram);
virtual ~ExtractTar();
};
diff --git a/apt-inst/deb/debfile.cc b/apt-inst/deb/debfile.cc
index a63cb6716..4853a13c7 100644
--- a/apt-inst/deb/debfile.cc
+++ b/apt-inst/deb/debfile.cc
@@ -203,7 +203,7 @@ bool debDebFile::MemControlExtract::DoItem(Item &Itm,int &Fd)
/* Just memcopy the block from the tar extractor and put it in the right
place in the pre-allocated memory block. */
bool debDebFile::MemControlExtract::Process(Item &/*Itm*/,const unsigned char *Data,
- unsigned long Size,unsigned long Pos)
+ unsigned long long Size,unsigned long long Pos)
{
memcpy(Control + Pos, Data,Size);
return true;
@@ -232,7 +232,7 @@ bool debDebFile::MemControlExtract::Read(debDebFile &Deb)
// ---------------------------------------------------------------------
/* The given memory block is loaded into the parser and parsed as a control
record. */
-bool debDebFile::MemControlExtract::TakeControl(const void *Data,unsigned long Size)
+bool debDebFile::MemControlExtract::TakeControl(const void *Data,unsigned long long Size)
{
delete [] Control;
Control = new char[Size+2];
diff --git a/apt-inst/deb/debfile.h b/apt-inst/deb/debfile.h
index 880bcf6c5..b068efcec 100644
--- a/apt-inst/deb/debfile.h
+++ b/apt-inst/deb/debfile.h
@@ -81,12 +81,12 @@ class debDebFile::MemControlExtract : public pkgDirStream
// Members from DirStream
virtual bool DoItem(Item &Itm,int &Fd);
virtual bool Process(Item &Itm,const unsigned char *Data,
- unsigned long Size,unsigned long Pos);
+ unsigned long long Size,unsigned long long Pos);
// Helpers
bool Read(debDebFile &Deb);
- bool TakeControl(const void *Data,unsigned long Size);
+ bool TakeControl(const void *Data,unsigned long long Size);
MemControlExtract() : IsControl(false), Control(0), Length(0), Member("control") {};
MemControlExtract(std::string Member) : IsControl(false), Control(0), Length(0), Member(Member) {};
diff --git a/apt-inst/dirstream.cc b/apt-inst/dirstream.cc
index 39ebb3bb4..888020bfb 100644
--- a/apt-inst/dirstream.cc
+++ b/apt-inst/dirstream.cc
@@ -76,7 +76,6 @@ bool pkgDirStream::DoItem(Item &Itm,int &Fd)
if(mkdir(Itm.Name,Itm.Mode) < 0)
return false;
return true;
- break;
}
case Item::FIFO:
break;
diff --git a/apt-inst/dirstream.h b/apt-inst/dirstream.h
index 1be2688a1..571fe86a5 100644
--- a/apt-inst/dirstream.h
+++ b/apt-inst/dirstream.h
@@ -37,10 +37,10 @@ class pkgDirStream
Directory, FIFO} Type;
char *Name;
char *LinkTarget;
+ unsigned long long Size;
unsigned long Mode;
unsigned long UID;
unsigned long GID;
- unsigned long Size;
unsigned long MTime;
unsigned long Major;
unsigned long Minor;
@@ -50,7 +50,7 @@ class pkgDirStream
virtual bool Fail(Item &Itm,int Fd);
virtual bool FinishedFile(Item &Itm,int Fd);
virtual bool Process(Item &/*Itm*/,const unsigned char * /*Data*/,
- unsigned long /*Size*/,unsigned long /*Pos*/) {return true;};
+ unsigned long long /*Size*/,unsigned long long /*Pos*/) {return true;};
virtual ~pkgDirStream() {};
};
diff --git a/apt-inst/extract.cc b/apt-inst/extract.cc
index b60784450..026182c18 100644
--- a/apt-inst/extract.cc
+++ b/apt-inst/extract.cc
@@ -404,7 +404,7 @@ bool pkgExtract::HandleOverwrites(pkgFLCache::NodeIterator Nde,
// Now see if this package matches one in a replace depends
pkgCache::DepIterator Dep = Ver.DependsList();
bool Ok = false;
- for (; Dep.end() == false; Dep++)
+ for (; Dep.end() == false; ++Dep)
{
if (Dep->Type != pkgCache::Dep::Replaces)
continue;
diff --git a/apt-inst/makefile b/apt-inst/makefile
index af887bba8..e4a3ae702 100644
--- a/apt-inst/makefile
+++ b/apt-inst/makefile
@@ -14,7 +14,7 @@ include ../buildlib/libversion.mak
# The library name
LIBRARY=apt-inst
-MAJOR=1.5
+MAJOR=1.6
MINOR=0
SLIBS=$(PTHREADLIB) -lapt-pkg
APT_DOMAIN:=libapt-inst$(MAJOR)
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index 253cbdaf7..5187738e9 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -44,22 +44,72 @@
#include <sstream>
#include <stdio.h>
#include <ctime>
+#include <sys/types.h>
+#include <pwd.h>
+#include <grp.h>
#include <apti18n.h>
/*}}}*/
using namespace std;
+static void printHashSumComparision(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/
+{
+ if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false)
+ return;
+ std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl;
+ for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs)
+ std::cerr << "\t- " << hs->toStr() << std::endl;
+ std::cerr << " Actual Hash: " << std::endl;
+ for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs)
+ std::cerr << "\t- " << hs->toStr() << std::endl;
+}
+ /*}}}*/
+static void ChangeOwnerAndPermissionOfFile(char const * const requester, char const * const file, char const * const user, char const * const group, mode_t const mode)
+{
+ // ensure the file is owned by root and has good permissions
+ struct passwd const * const pw = getpwnam(user);
+ struct group const * const gr = getgrnam(group);
+ if (getuid() == 0) // if we aren't root, we can't chown, so don't try it
+ {
+ if (pw != NULL && gr != NULL && chown(file, pw->pw_uid, gr->gr_gid) != 0)
+ _error->WarningE(requester, "chown to %s:%s of file %s failed", user, group, file);
+ }
+ if (chmod(file, mode) != 0)
+ _error->WarningE(requester, "chmod 0%o of file %s failed", mode, file);
+}
+static std::string GetPartialFileName(std::string const &file)
+{
+ std::string DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += file;
+ return DestFile;
+}
+static std::string GetPartialFileNameFromURI(std::string const &uri)
+{
+ return GetPartialFileName(URItoFileName(uri));
+}
+
+
// Acquire::Item::Item - Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-pkgAcquire::Item::Item(pkgAcquire *Owner) : Owner(Owner), FileSize(0),
- PartialSize(0), Mode(0), ID(0), Complete(false),
- Local(false), QueueCounter(0)
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+pkgAcquire::Item::Item(pkgAcquire *Owner,
+ HashStringList const &ExpectedHashes,
+ pkgAcqMetaBase *TransactionManager)
+ : Owner(Owner), FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false),
+ Local(false), QueueCounter(0), TransactionManager(TransactionManager),
+ ExpectedAdditionalItems(0), ExpectedHashes(ExpectedHashes)
{
Owner->Add(this);
Status = StatIdle;
+ if(TransactionManager != NULL)
+ TransactionManager->Add(this);
}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
/*}}}*/
// Acquire::Item::~Item - Destructor /*{{{*/
// ---------------------------------------------------------------------
@@ -75,15 +125,15 @@ pkgAcquire::Item::~Item()
fetch this object */
void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
{
- Status = StatIdle;
- ErrorText = LookupTag(Message,"Message");
+ if(ErrorText == "")
+ ErrorText = LookupTag(Message,"Message");
UsedMirror = LookupTag(Message,"UsedMirror");
if (QueueCounter <= 1)
{
/* This indicates that the file is not available right now but might
be sometime later. If we do a retry cycle then this should be
retried [CDROMs] */
- if (Cnf->LocalOnly == true &&
+ if (Cnf != NULL && Cnf->LocalOnly == true &&
StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
{
Status = StatIdle;
@@ -92,11 +142,18 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
}
Status = StatError;
+ Complete = false;
Dequeue();
- }
+ }
+ else
+ Status = StatIdle;
- // report mirror failure back to LP if we actually use a mirror
+ // check fail reason
string FailReason = LookupTag(Message, "FailReason");
+ if(FailReason == "MaximumSizeExceeded")
+ Rename(DestFile, DestFile+".FAILED");
+
+ // report mirror failure back to LP if we actually use a mirror
if(FailReason.size() != 0)
ReportMirrorFailure(FailReason);
else
@@ -117,12 +174,12 @@ void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size)
// Acquire::Item::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcquire::Item::Done(string Message,unsigned long long Size,string /*Hash*/,
+void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringList const &/*Hash*/,
pkgAcquire::MethodConfig * /*Cnf*/)
{
// We just downloaded something..
string FileName = LookupTag(Message,"Filename");
- UsedMirror = LookupTag(Message,"UsedMirror");
+ UsedMirror = LookupTag(Message,"UsedMirror");
if (Complete == false && !Local && FileName == DestFile)
{
if (Owner->Log != 0)
@@ -140,7 +197,7 @@ void pkgAcquire::Item::Done(string Message,unsigned long long Size,string /*Hash
// ---------------------------------------------------------------------
/* This helper function is used by a lot of item methods as their final
step */
-void pkgAcquire::Item::Rename(string From,string To)
+bool pkgAcquire::Item::Rename(string From,string To)
{
if (rename(From.c_str(),To.c_str()) != 0)
{
@@ -148,10 +205,28 @@ void pkgAcquire::Item::Rename(string From,string To)
snprintf(S,sizeof(S),_("rename failed, %s (%s -> %s)."),strerror(errno),
From.c_str(),To.c_str());
Status = StatError;
- ErrorText = S;
+ ErrorText += S;
+ return false;
}
+ return true;
}
/*}}}*/
+
+void pkgAcquire::Item::QueueURI(ItemDesc &Item)
+{
+ if (RealFileExists(DestFile))
+ {
+ std::string SandboxUser = _config->Find("APT::Sandbox::User");
+ ChangeOwnerAndPermissionOfFile("GetPartialFileName", DestFile.c_str(),
+ SandboxUser.c_str(), "root", 0600);
+ }
+ Owner->Enqueue(Item);
+}
+void pkgAcquire::Item::Dequeue()
+{
+ Owner->Dequeue(this);
+}
+
bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
{
if(FileExists(DestFile))
@@ -174,10 +249,31 @@ bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const
Status = StatError;
// do not report as usually its not the mirrors fault, but Portal/Proxy
break;
+ case SignatureError:
+ ErrorText = _("Signature error");
+ Status = StatError;
+ break;
+ case NotClearsigned:
+ ErrorText = _("Does not start with a cleartext signature");
+ Status = StatError;
+ break;
}
return false;
}
/*}}}*/
+void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess)/*{{{*/
+{
+ ActiveSubprocess = subprocess;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ Mode = ActiveSubprocess.c_str();
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+}
+ /*}}}*/
// Acquire::Item::ReportMirrorFailure /*{{{*/
// ---------------------------------------------------------------------
void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
@@ -222,118 +318,6 @@ void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
}
}
/*}}}*/
-// AcqSubIndex::AcqSubIndex - Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* Get a sub-index file based on checksums from a 'master' file and
- possibly query additional files */
-pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,
- string const &URIDesc, string const &ShortDesc,
- HashString const &ExpectedHash)
- : Item(Owner), ExpectedHash(ExpectedHash)
-{
- /* XXX: Beware: Currently this class does nothing (of value) anymore ! */
- Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false);
-
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
-
- Desc.URI = URI;
- Desc.Description = URIDesc;
- Desc.Owner = this;
- Desc.ShortDesc = ShortDesc;
-
- QueueURI(Desc);
-
- if(Debug)
- std::clog << "pkgAcqSubIndex: " << Desc.URI << std::endl;
-}
- /*}}}*/
-// AcqSubIndex::Custom600Headers - Insert custom request headers /*{{{*/
-// ---------------------------------------------------------------------
-/* The only header we use is the last-modified header. */
-string pkgAcqSubIndex::Custom600Headers()
-{
- string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(Desc.URI);
-
- struct stat Buf;
- if (stat(Final.c_str(),&Buf) != 0)
- return "\nIndex-File: true\nFail-Ignore: true\n";
- return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
-}
- /*}}}*/
-void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
-{
- if(Debug)
- std::clog << "pkgAcqSubIndex failed: " << Desc.URI << " with " << Message << std::endl;
-
- Complete = false;
- Status = StatDone;
- Dequeue();
-
- // No good Index is provided
-}
- /*}}}*/
-void pkgAcqSubIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
- pkgAcquire::MethodConfig *Cnf)
-{
- if(Debug)
- std::clog << "pkgAcqSubIndex::Done(): " << Desc.URI << std::endl;
-
- string FileName = LookupTag(Message,"Filename");
- if (FileName.empty() == true)
- {
- Status = StatError;
- ErrorText = "Method gave a blank filename";
- return;
- }
-
- if (FileName != DestFile)
- {
- Local = true;
- Desc.URI = "copy:" + FileName;
- QueueURI(Desc);
- return;
- }
-
- Item::Done(Message,Size,Md5Hash,Cnf);
-
- string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI);
-
- /* Downloaded invalid transindex => Error (LP: #346386) (Closes: #627642) */
- indexRecords SubIndexParser;
- if (FileExists(DestFile) == true && !SubIndexParser.Load(DestFile)) {
- Status = StatError;
- ErrorText = SubIndexParser.ErrorText;
- return;
- }
-
- // success in downloading the index
- // rename the index
- if(Debug)
- std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl;
- Rename(DestFile,FinalFile);
- chmod(FinalFile.c_str(),0644);
- DestFile = FinalFile;
-
- if(ParseIndex(DestFile) == false)
- return Failed("", NULL);
-
- Complete = true;
- Status = StatDone;
- Dequeue();
- return;
-}
- /*}}}*/
-bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/
-{
- indexRecords SubIndexParser;
- if (FileExists(IndexFile) == false || SubIndexParser.Load(IndexFile) == false)
- return false;
- // so something with the downloaded index
- return true;
-}
- /*}}}*/
// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* Get the DiffIndex file first and see if there are patches available
@@ -342,21 +326,23 @@ bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/
* the original packages file
*/
pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
- string URI,string URIDesc,string ShortDesc,
- HashString ExpectedHash)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
- Description(URIDesc)
+ pkgAcqMetaBase *TransactionManager,
+ IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser)
+ : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes,
+ MetaIndexParser), PackagesFileReadyInPartial(false)
{
Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
- Desc.Description = URIDesc + "/DiffIndex";
+ RealURI = Target->URI;
Desc.Owner = this;
- Desc.ShortDesc = ShortDesc;
- Desc.URI = URI + ".diff/Index";
+ Desc.Description = Target->Description + ".diff/Index";
+ Desc.ShortDesc = Target->ShortDesc;
+ Desc.URI = Target->URI + ".diff/Index";
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(Desc.URI);
+ DestFile = GetPartialFileNameFromURI(Desc.URI);
if(Debug)
std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
@@ -372,9 +358,7 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
Desc.URI.substr(0,strlen("file:/")) == "file:/")
{
// we don't have a pkg file or we don't want to queue
- if(Debug)
- std::clog << "No index file, local or canceld by user" << std::endl;
- Failed("", NULL);
+ Failed("No index file, local or canceld by user", NULL);
return;
}
@@ -389,11 +373,11 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqDiffIndex::Custom600Headers()
+string pkgAcqDiffIndex::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(Desc.URI);
-
+
if(Debug)
std::clog << "Custom600Header-IMS: " << Final << std::endl;
@@ -406,190 +390,333 @@ string pkgAcqDiffIndex::Custom600Headers()
/*}}}*/
bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
{
+ // failing here is fine: our caller will take care of trying to
+ // get the complete file if patching fails
if(Debug)
std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
<< std::endl;
- pkgTagSection Tags;
- string ServerSha1;
- vector<DiffInfo> available_patches;
-
FileFd Fd(IndexDiffFile,FileFd::ReadOnly);
pkgTagFile TF(&Fd);
if (_error->PendingError() == true)
return false;
- if(TF.Step(Tags) == true)
+ pkgTagSection Tags;
+ if(unlikely(TF.Step(Tags) == false))
+ return false;
+
+ HashStringList ServerHashes;
+ unsigned long long ServerSize = 0;
+
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
{
- bool found = false;
- DiffInfo d;
- string size;
+ std::string tagname = *type;
+ tagname.append("-Current");
+ std::string const tmp = Tags.FindS(tagname.c_str());
+ if (tmp.empty() == true)
+ continue;
- string const tmp = Tags.FindS("SHA1-Current");
+ string hash;
+ unsigned long long size;
std::stringstream ss(tmp);
- ss >> ServerSha1 >> size;
- unsigned long const ServerSize = atol(size.c_str());
+ ss >> hash >> size;
+ if (unlikely(hash.empty() == true))
+ continue;
+ if (unlikely(ServerSize != 0 && ServerSize != size))
+ continue;
+ ServerHashes.push_back(HashString(*type, hash));
+ ServerSize = size;
+ }
- FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
- SHA1Summation SHA1;
- SHA1.AddFD(fd);
- string const local_sha1 = SHA1.Result();
+ if (ServerHashes.usable() == false)
+ {
+ if (Debug == true)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl;
+ return false;
+ }
- if(local_sha1 == ServerSha1)
+ if (ServerHashes != HashSums())
+ {
+ if (Debug == true)
{
- // we have the same sha1 as the server so we are done here
- if(Debug)
- std::clog << "Package file is up-to-date" << std::endl;
- // list cleanup needs to know that this file as well as the already
- // present index is ours, so we create an empty diff to save it for us
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
- return true;
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl;
+ printHashSumComparision(CurrentPackagesFile, ServerHashes, HashSums());
}
- else
+ return false;
+ }
+
+ if (ServerHashes.VerifyFile(CurrentPackagesFile) == true)
+ {
+ // we have the same sha1 as the server so we are done here
+ if(Debug)
+ std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl;
+
+ // list cleanup needs to know that this file as well as the already
+ // present index is ours, so we create an empty diff to save it for us
+ new pkgAcqIndexDiffs(Owner, TransactionManager, Target,
+ ExpectedHashes, MetaIndexParser);
+ return true;
+ }
+
+ FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
+ Hashes LocalHashesCalc;
+ LocalHashesCalc.AddFD(fd);
+ HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
+
+ if(Debug)
+ std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at "
+ << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl;
+
+ // parse all of (provided) history
+ vector<DiffInfo> available_patches;
+ bool firstAcceptedHashes = true;
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
+ {
+ if (LocalHashes.find(*type) == NULL)
+ continue;
+
+ std::string tagname = *type;
+ tagname.append("-History");
+ std::string const tmp = Tags.FindS(tagname.c_str());
+ if (tmp.empty() == true)
+ continue;
+
+ string hash, filename;
+ unsigned long long size;
+ std::stringstream ss(tmp);
+
+ while (ss >> hash >> size >> filename)
{
- if(Debug)
- std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl;
+ if (unlikely(hash.empty() == true || filename.empty() == true))
+ continue;
- // check the historie and see what patches we need
- string const history = Tags.FindS("SHA1-History");
- std::stringstream hist(history);
- while(hist >> d.sha1 >> size >> d.file)
+ // see if we have a record for this file already
+ std::vector<DiffInfo>::iterator cur = available_patches.begin();
+ for (; cur != available_patches.end(); ++cur)
{
- // read until the first match is found
- // from that point on, we probably need all diffs
- if(d.sha1 == local_sha1)
- found=true;
- else if (found == false)
+ if (cur->file != filename || unlikely(cur->result_size != size))
continue;
-
- if(Debug)
- std::clog << "Need to get diff: " << d.file << std::endl;
- available_patches.push_back(d);
+ cur->result_hashes.push_back(HashString(*type, hash));
+ break;
}
-
- if (available_patches.empty() == false)
+ if (cur != available_patches.end())
+ continue;
+ if (firstAcceptedHashes == true)
{
- // patching with too many files is rather slow compared to a fast download
- unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
- if (fileLimit != 0 && fileLimit < available_patches.size())
- {
- if (Debug)
- std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
- << ") so fallback to complete download" << std::endl;
- return false;
- }
-
- // see if the patches are too big
- found = false; // it was true and it will be true again at the end
- d = *available_patches.begin();
- string const firstPatch = d.file;
- unsigned long patchesSize = 0;
- std::stringstream patches(Tags.FindS("SHA1-Patches"));
- while(patches >> d.sha1 >> size >> d.file)
- {
- if (firstPatch == d.file)
- found = true;
- else if (found == false)
- continue;
-
- patchesSize += atol(size.c_str());
- }
- unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
- if (sizeLimit > 0 && (sizeLimit/100) < patchesSize)
- {
- if (Debug)
- std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
- << ") so fallback to complete download" << std::endl;
- return false;
- }
+ DiffInfo next;
+ next.file = filename;
+ next.result_hashes.push_back(HashString(*type, hash));
+ next.result_size = size;
+ next.patch_size = 0;
+ available_patches.push_back(next);
+ }
+ else
+ {
+ if (Debug == true)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
+ << " wasn't in the list for the first parsed hash! (history)" << std::endl;
+ break;
}
}
+ firstAcceptedHashes = false;
+ }
+
+ if (unlikely(available_patches.empty() == true))
+ {
+ if (Debug)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
+ << "Couldn't find any patches for the patch series." << std::endl;
+ return false;
+ }
+
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
+ {
+ if (LocalHashes.find(*type) == NULL)
+ continue;
+
+ std::string tagname = *type;
+ tagname.append("-Patches");
+ std::string const tmp = Tags.FindS(tagname.c_str());
+ if (tmp.empty() == true)
+ continue;
+
+ string hash, filename;
+ unsigned long long size;
+ std::stringstream ss(tmp);
- // we have something, queue the next diff
- if(found)
+ while (ss >> hash >> size >> filename)
{
- // queue the diffs
- string::size_type const last_space = Description.rfind(" ");
- if(last_space != string::npos)
- Description.erase(last_space, Description.size()-last_space);
-
- /* decide if we should download patches one by one or in one go:
- The first is good if the server merges patches, but many don't so client
- based merging can be attempt in which case the second is better.
- "bad things" will happen if patches are merged on the server,
- but client side merging is attempt as well */
- bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
- if (pdiff_merge == true)
- {
- // reprepro adds this flag if it has merged patches on the server
- std::string const precedence = Tags.FindS("X-Patch-Precedence");
- pdiff_merge = (precedence != "merged");
- }
+ if (unlikely(hash.empty() == true || filename.empty() == true))
+ continue;
- if (pdiff_merge == false)
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
- else
+ // see if we have a record for this file already
+ std::vector<DiffInfo>::iterator cur = available_patches.begin();
+ for (; cur != available_patches.end(); ++cur)
{
- std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
- for(size_t i = 0; i < available_patches.size(); ++i)
- (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, RealURI, Description, Desc.ShortDesc, ExpectedHash,
- available_patches[i], diffs);
+ if (cur->file != filename)
+ continue;
+ if (unlikely(cur->patch_size != 0 && cur->patch_size != size))
+ continue;
+ cur->patch_hashes.push_back(HashString(*type, hash));
+ cur->patch_size = size;
+ break;
}
-
- Complete = false;
- Status = StatDone;
- Dequeue();
- return true;
+ if (cur != available_patches.end())
+ continue;
+ if (Debug == true)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
+ << " wasn't in the list for the first parsed hash! (patches)" << std::endl;
+ break;
}
}
+
+ bool foundStart = false;
+ for (std::vector<DiffInfo>::iterator cur = available_patches.begin();
+ cur != available_patches.end(); ++cur)
+ {
+ if (LocalHashes != cur->result_hashes)
+ continue;
+
+ available_patches.erase(available_patches.begin(), cur);
+ foundStart = true;
+ break;
+ }
+
+ if (foundStart == false || unlikely(available_patches.empty() == true))
+ {
+ if (Debug)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
+ << "Couldn't find the start of the patch series." << std::endl;
+ return false;
+ }
+
+ // patching with too many files is rather slow compared to a fast download
+ unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
+ if (fileLimit != 0 && fileLimit < available_patches.size())
+ {
+ if (Debug)
+ std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
+ << ") so fallback to complete download" << std::endl;
+ return false;
+ }
+
+ // calculate the size of all patches we have to get
+ // note that all sizes are uncompressed, while we download compressed files
+ unsigned long long patchesSize = 0;
+ for (std::vector<DiffInfo>::const_iterator cur = available_patches.begin();
+ cur != available_patches.end(); ++cur)
+ patchesSize += cur->patch_size;
+ unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
+ if (false && sizeLimit > 0 && (sizeLimit/100) < patchesSize)
+ {
+ if (Debug)
+ std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
+ << ") so fallback to complete download" << std::endl;
+ return false;
+ }
+
+ // FIXME: make this use the method
+ PackagesFileReadyInPartial = true;
+ std::string const Partial = GetPartialFileNameFromURI(RealURI);
+
+ FileFd From(CurrentPackagesFile, FileFd::ReadOnly);
+ FileFd To(Partial, FileFd::WriteEmpty);
+ if(CopyFile(From, To) == false)
+ return _error->Errno("CopyFile", "failed to copy");
- // Nothing found, report and return false
- // Failing here is ok, if we return false later, the full
- // IndexFile is queued
if(Debug)
- std::clog << "Can't find a patch in the index file" << std::endl;
- return false;
+ std::cerr << "Done copying " << CurrentPackagesFile
+ << " -> " << Partial
+ << std::endl;
+
+ // we have something, queue the diffs
+ string::size_type const last_space = Description.rfind(" ");
+ if(last_space != string::npos)
+ Description.erase(last_space, Description.size()-last_space);
+
+ /* decide if we should download patches one by one or in one go:
+ The first is good if the server merges patches, but many don't so client
+ based merging can be attempt in which case the second is better.
+ "bad things" will happen if patches are merged on the server,
+ but client side merging is attempt as well */
+ bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
+ if (pdiff_merge == true)
+ {
+ // reprepro adds this flag if it has merged patches on the server
+ std::string const precedence = Tags.FindS("X-Patch-Precedence");
+ pdiff_merge = (precedence != "merged");
+ }
+
+ if (pdiff_merge == false)
+ {
+ new pkgAcqIndexDiffs(Owner, TransactionManager, Target, ExpectedHashes,
+ MetaIndexParser, available_patches);
+ }
+ else
+ {
+ std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
+ for(size_t i = 0; i < available_patches.size(); ++i)
+ (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager,
+ Target,
+ ExpectedHashes,
+ MetaIndexParser,
+ available_patches[i],
+ diffs);
+ }
+
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+ return true;
}
/*}}}*/
-void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
+void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/
{
if(Debug)
std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
<< "Falling back to normal index file acquire" << std::endl;
- new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash);
+ new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser);
- Complete = false;
+ Item::Failed(Message,Cnf);
Status = StatDone;
- Dequeue();
}
/*}}}*/
-void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl;
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message, Size, Hashes, Cnf);
+
+ // verify the index target
+ if(Target && Target->MetaKey != "" && MetaIndexParser && Hashes.usable())
+ {
+ std::string IndexMetaKey = Target->MetaKey + ".diff/Index";
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(IndexMetaKey);
+ if(Record && Record->Hashes.usable() && Hashes != Record->Hashes)
+ {
+ RenameOnError(HashSumMismatch);
+ printHashSumComparision(RealURI, Record->Hashes, Hashes);
+ Failed(Message, Cnf);
+ return;
+ }
+
+ }
string FinalFile;
- FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
+ FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(Desc.URI);
- // success in downloading the index
- // rename the index
- FinalFile += string(".IndexDiff");
- if(Debug)
- std::clog << "Renaming: " << DestFile << " -> " << FinalFile
- << std::endl;
- Rename(DestFile,FinalFile);
- chmod(FinalFile.c_str(),0644);
- DestFile = FinalFile;
+ if(StringToBool(LookupTag(Message,"IMS-Hit"),false))
+ DestFile = FinalFile;
if(!ParseDiffIndex(DestFile))
- return Failed("", NULL);
+ return Failed("Message: Couldn't parse pdiff index", Cnf);
+
+ // queue for final move
+ TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
Complete = true;
Status = StatDone;
@@ -603,26 +730,28 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash
* for each diff and the index
*/
pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
- string URI,string URIDesc,string ShortDesc,
- HashString ExpectedHash,
- string ServerSha1,
+ pkgAcqMetaBase *TransactionManager,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser,
vector<DiffInfo> diffs)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
- available_patches(diffs), ServerSha1(ServerSha1)
+ : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser),
+ available_patches(diffs)
{
-
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
+ DestFile = GetPartialFileNameFromURI(Target->URI);
Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
- Description = URIDesc;
+ RealURI = Target->URI;
Desc.Owner = this;
- Desc.ShortDesc = ShortDesc;
+ Description = Target->Description;
+ Desc.ShortDesc = Target->ShortDesc;
if(available_patches.empty() == true)
{
- // we are done (yeah!)
+ // we are done (yeah!), check hashes against the final file
+ DestFile = _config->FindDir("Dir::State::lists");
+ DestFile += URItoFileName(Target->URI);
Finish(true);
}
else
@@ -638,28 +767,34 @@ void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)
if(Debug)
std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
<< "Falling back to normal index file acquire" << std::endl;
- new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
- ExpectedHash);
+ new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser);
Finish();
}
/*}}}*/
// Finish - helper that cleans the item out of the fetcher queue /*{{{*/
void pkgAcqIndexDiffs::Finish(bool allDone)
{
+ if(Debug)
+ std::clog << "pkgAcqIndexDiffs::Finish(): "
+ << allDone << " "
+ << Desc.URI << std::endl;
+
// we restore the original name, this is required, otherwise
// the file will be cleaned
if(allDone)
{
- DestFile = _config->FindDir("Dir::State::lists");
- DestFile += URItoFileName(RealURI);
-
- if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
+ if(HashSums().usable() && !HashSums().VerifyFile(DestFile))
{
RenameOnError(HashSumMismatch);
Dequeue();
return;
}
+ // queue for copy
+ std::string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
+
// this is for the "real" finish
Complete = true;
Status = StatDone;
@@ -679,21 +814,32 @@ void pkgAcqIndexDiffs::Finish(bool allDone)
/*}}}*/
bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
{
-
// calc sha1 of the just patched file
- string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(RealURI);
+ std::string const FinalFile = GetPartialFileNameFromURI(RealURI);
+
+ if(!FileExists(FinalFile))
+ {
+ Failed("Message: No FinalFile " + FinalFile + " available", NULL);
+ return false;
+ }
FileFd fd(FinalFile, FileFd::ReadOnly);
- SHA1Summation SHA1;
- SHA1.AddFD(fd);
- string local_sha1 = string(SHA1.Result());
+ Hashes LocalHashesCalc;
+ LocalHashesCalc.AddFD(fd);
+ HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
+
if(Debug)
- std::clog << "QueueNextDiff: "
- << FinalFile << " (" << local_sha1 << ")"<<std::endl;
+ std::clog << "QueueNextDiff: " << FinalFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl;
+
+ if (unlikely(LocalHashes.usable() == false || ExpectedHashes.usable() == false))
+ {
+ Failed("Local/Expected hashes are not usable", NULL);
+ return false;
+ }
+
// final file reached before all patches are applied
- if(local_sha1 == ServerSha1)
+ if(LocalHashes == ExpectedHashes)
{
Finish(true);
return true;
@@ -701,10 +847,10 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
// remove all patches until the next matching patch is found
// this requires the Index file to be ordered
- for(vector<DiffInfo>::iterator I=available_patches.begin();
+ for(vector<DiffInfo>::iterator I = available_patches.begin();
available_patches.empty() == false &&
I != available_patches.end() &&
- I->sha1 != local_sha1;
+ I->result_hashes != LocalHashes;
++I)
{
available_patches.erase(I);
@@ -713,38 +859,48 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
// error checking and falling back if no patch was found
if(available_patches.empty() == true)
{
- Failed("", NULL);
+ Failed("No patches left to reach target", NULL);
return false;
}
// queue the right diff
Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz";
Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file);
+ DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + available_patches[0].file);
if(Debug)
std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl;
-
+
QueueURI(Desc);
return true;
}
/*}}}*/
-void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl;
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message, Size, Hashes, Cnf);
- string FinalFile;
- FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
+ // FIXME: verify this download too before feeding it to rred
+ std::string const FinalFile = GetPartialFileNameFromURI(RealURI);
// success in downloading a diff, enter ApplyDiff state
if(State == StateFetchDiff)
{
+ FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip);
+ class Hashes LocalHashesCalc;
+ LocalHashesCalc.AddFD(fd);
+ HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
+
+ if (fd.Size() != available_patches[0].patch_size ||
+ available_patches[0].patch_hashes != LocalHashes)
+ {
+ Failed("Patch has Size/Hashsum mismatch", NULL);
+ return;
+ }
// rred excepts the patch as $FinalFile.ed
Rename(DestFile,FinalFile+".ed");
@@ -756,7 +912,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Has
Local = true;
Desc.URI = "rred:" + FinalFile;
QueueURI(Desc);
- Mode = "rred";
+ SetActiveSubprocess("rred");
return;
}
@@ -779,37 +935,39 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Has
// see if there is more to download
if(available_patches.empty() == false) {
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
+ new pkgAcqIndexDiffs(Owner, TransactionManager, Target,
+ ExpectedHashes, MetaIndexParser,
+ available_patches);
return Finish();
} else
+ // update
+ DestFile = FinalFile;
return Finish(true);
}
}
/*}}}*/
// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
- string const &URI, string const &URIDesc,
- string const &ShortDesc, HashString const &ExpectedHash,
- DiffInfo const &patch,
- std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
- patch(patch),allPatches(allPatches), State(StateFetchDiff)
+ pkgAcqMetaBase *TransactionManager,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser,
+ DiffInfo const &patch,
+ std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
+ : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser),
+ patch(patch), allPatches(allPatches), State(StateFetchDiff)
{
-
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
-
Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
- Description = URIDesc;
+ RealURI = Target->URI;
Desc.Owner = this;
- Desc.ShortDesc = ShortDesc;
+ Description = Target->Description;
+ Desc.ShortDesc = Target->ShortDesc;
Desc.URI = RealURI + ".diff/" + patch.file + ".gz";
Desc.Description = Description + " " + patch.file + string(".pdiff");
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(RealURI + ".diff/" + patch.file);
+
+ DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + patch.file);
if(Debug)
std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
@@ -817,13 +975,13 @@ pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
QueueURI(Desc);
}
/*}}}*/
-void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
+void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/
{
if(Debug)
std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
- Complete = false;
+
+ Item::Failed(Message,Cnf);
Status = StatDone;
- Dequeue();
// check if we are the first to fail, otherwise we are done here
State = StateDoneDiff;
@@ -835,22 +993,33 @@ void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*C
// first failure means we should fallback
State = StateErrorDiff;
std::clog << "Falling back to normal index file acquire" << std::endl;
- new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
- ExpectedHash);
+ new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser);
}
/*}}}*/
-void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message,Size,Hashes,Cnf);
- string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+ // FIXME: verify download before feeding it to rred
+ string const FinalFile = GetPartialFileNameFromURI(RealURI);
if (State == StateFetchDiff)
{
+ FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip);
+ class Hashes LocalHashesCalc;
+ LocalHashesCalc.AddFD(fd);
+ HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
+
+ if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes)
+ {
+ Failed("Patch has Size/Hashsum mismatch", NULL);
+ return;
+ }
+
// rred expects the patch as $FinalFile.ed.$patchname.gz
Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz");
@@ -874,35 +1043,38 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string M
Local = true;
Desc.URI = "rred:" + FinalFile;
QueueURI(Desc);
- Mode = "rred";
+ SetActiveSubprocess("rred");
return;
}
// success in download/apply all diffs, clean up
else if (State == StateApplyDiff)
{
// see if we really got the expected file
- if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
+ if(ExpectedHashes.usable() && !ExpectedHashes.VerifyFile(DestFile))
{
RenameOnError(HashSumMismatch);
return;
}
+
+ std::string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+
// move the result into place
if(Debug)
- std::clog << "Moving patched file in place: " << std::endl
+ std::clog << "Queue patched file in place: " << std::endl
<< DestFile << " -> " << FinalFile << std::endl;
- Rename(DestFile, FinalFile);
- chmod(FinalFile.c_str(), 0644);
- // otherwise lists cleanup will eat the file
- DestFile = FinalFile;
+ // queue for copy by the transaction manager
+ TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
// ensure the ed's are gone regardless of list-cleanup
for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
I != allPatches->end(); ++I)
{
- std::string patch = FinalFile + ".ed." + (*I)->patch.file + ".gz";
- unlink(patch.c_str());
+ std::string const PartialFile = GetPartialFileNameFromURI(RealURI);
+ std::string patch = PartialFile + ".ed." + (*I)->patch.file + ".gz";
+ unlink(patch.c_str());
}
// all set and done
@@ -912,66 +1084,116 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string M
}
}
/*}}}*/
+// AcqBaseIndex::VerifyHashByMetaKey - verify hash for the given metakey /*{{{*/
+bool pkgAcqBaseIndex::VerifyHashByMetaKey(HashStringList const &Hashes)
+{
+ if(MetaKey != "" && Hashes.usable())
+ {
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
+ if(Record && Record->Hashes.usable() && Hashes != Record->Hashes)
+ {
+ printHashSumComparision(RealURI, Record->Hashes, Hashes);
+ return false;
+ }
+ }
+ return true;
+}
+ /*}}}*/
// AcqIndex::AcqIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
-/* The package file is added to the queue and a second class is
- instantiated to fetch the revision file */
+/* The package file is added to the queue and a second class is
+ instantiated to fetch the revision file */
pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
string URI,string URIDesc,string ShortDesc,
- HashString ExpectedHash, string comprExt)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash)
+ HashStringList const &ExpectedHash)
+ : pkgAcqBaseIndex(Owner, 0, NULL, ExpectedHash, NULL)
{
- if(comprExt.empty() == true)
- {
- // autoselect the compression method
- std::vector<std::string> types = APT::Configuration::getCompressionTypes();
- for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
- comprExt.append(*t).append(" ");
- if (comprExt.empty() == false)
- comprExt.erase(comprExt.end()-1);
- }
- CompressionExtension = comprExt;
+ RealURI = URI;
+ AutoSelectCompression();
Init(URI, URIDesc, ShortDesc);
+
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "New pkgIndex with TransactionManager "
+ << TransactionManager << std::endl;
}
-pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
- : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash)
+ /*}}}*/
+// AcqIndex::AcqIndex - Constructor /*{{{*/
+pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
+ IndexTarget const *Target,
+ HashStringList const &ExpectedHash,
+ indexRecords *MetaIndexParser)
+ : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHash,
+ MetaIndexParser)
{
+ RealURI = Target->URI;
+
// autoselect the compression method
+ AutoSelectCompression();
+ Init(Target->URI, Target->Description, Target->ShortDesc);
+
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "New pkgIndex with TransactionManager "
+ << TransactionManager << std::endl;
+}
+ /*}}}*/
+// AcqIndex::AutoSelectCompression - Select compression /*{{{*/
+void pkgAcqIndex::AutoSelectCompression()
+{
std::vector<std::string> types = APT::Configuration::getCompressionTypes();
- CompressionExtension = "";
- if (ExpectedHash.empty() == false)
+ CompressionExtensions = "";
+ if (ExpectedHashes.usable())
{
- for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
- if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true)
- CompressionExtension.append(*t).append(" ");
+ for (std::vector<std::string>::const_iterator t = types.begin();
+ t != types.end(); ++t)
+ {
+ std::string CompressedMetaKey = string(Target->MetaKey).append(".").append(*t);
+ if (*t == "uncompressed" ||
+ MetaIndexParser->Exists(CompressedMetaKey) == true)
+ CompressionExtensions.append(*t).append(" ");
+ }
}
else
{
for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
- CompressionExtension.append(*t).append(" ");
+ CompressionExtensions.append(*t).append(" ");
}
- if (CompressionExtension.empty() == false)
- CompressionExtension.erase(CompressionExtension.end()-1);
-
- Init(Target->URI, Target->Description, Target->ShortDesc);
+ if (CompressionExtensions.empty() == false)
+ CompressionExtensions.erase(CompressionExtensions.end()-1);
}
/*}}}*/
// AcqIndex::Init - defered Constructor /*{{{*/
-void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) {
- Decompression = false;
- Erase = false;
+void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
+ string const &ShortDesc)
+{
+ Stage = STAGE_DOWNLOAD;
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
+ DestFile = GetPartialFileNameFromURI(URI);
- std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
- if (comprExt == "uncompressed")
+ CurrentCompressionExtension = CompressionExtensions.substr(0, CompressionExtensions.find(' '));
+ if (CurrentCompressionExtension == "uncompressed")
+ {
Desc.URI = URI;
- else {
- Desc.URI = URI + '.' + comprExt;
- DestFile = DestFile + '.' + comprExt;
+ if(Target)
+ MetaKey = string(Target->MetaKey);
+ }
+ else
+ {
+ Desc.URI = URI + '.' + CurrentCompressionExtension;
+ DestFile = DestFile + '.' + CurrentCompressionExtension;
+ if(Target)
+ MetaKey = string(Target->MetaKey) + '.' + CurrentCompressionExtension;
+ }
+
+ // load the filesize
+ if(MetaIndexParser)
+ {
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
+ if(Record)
+ FileSize = Record->Size;
+
+ InitByHashIfNeeded(MetaKey);
}
Desc.Description = URIDesc;
@@ -981,22 +1203,44 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &S
QueueURI(Desc);
}
/*}}}*/
+// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/
+void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey)
+{
+ // TODO:
+ // - (maybe?) add support for by-hash into the sources.list as flag
+ // - make apt-ftparchive generate the hashes (and expire?)
+ std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash";
+ if(_config->FindB("APT::Acquire::By-Hash", false) == true ||
+ _config->FindB(HostKnob, false) == true ||
+ MetaIndexParser->GetSupportsAcquireByHash())
+ {
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
+ if(Record)
+ {
+ // FIXME: should we really use the best hash here? or a fixed one?
+ const HashString *TargetHash = Record->Hashes.find("");
+ std::string ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
+ size_t trailing_slash = Desc.URI.find_last_of("/");
+ Desc.URI = Desc.URI.replace(
+ trailing_slash,
+ Desc.URI.substr(trailing_slash+1).size()+1,
+ ByHash);
+ } else {
+ _error->Warning(
+ "Fetching ByHash requested but can not find record for %s",
+ MetaKey.c_str());
+ }
+ }
+}
+ /*}}}*/
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqIndex::Custom600Headers()
+string pkgAcqIndex::Custom600Headers() const
{
- std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
- string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(RealURI);
- if (_config->FindB("Acquire::GzipIndexes",false))
- Final += compExt;
-
+ string Final = GetFinalFilename();
+
string msg = "\nIndex-File: true";
- // FIXME: this really should use "IndexTarget::IsOptional()" but that
- // seems to be difficult without breaking ABI
- if (ShortDesc().find("Translation") != 0)
- msg += "\nFail-Ignore: true";
struct stat Buf;
if (stat(Final.c_str(),&Buf) == 0)
msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
@@ -1004,131 +1248,140 @@ string pkgAcqIndex::Custom600Headers()
return msg;
}
/*}}}*/
-void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+// pkgAcqIndex::Failed - getting the indexfile failed /*{{{*/
+void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
{
- size_t const nextExt = CompressionExtension.find(' ');
+ size_t const nextExt = CompressionExtensions.find(' ');
if (nextExt != std::string::npos)
{
- CompressionExtension = CompressionExtension.substr(nextExt+1);
+ CompressionExtensions = CompressionExtensions.substr(nextExt+1);
Init(RealURI, Desc.Description, Desc.ShortDesc);
return;
}
// on decompression failure, remove bad versions in partial/
- if (Decompression && Erase) {
- string s = _config->FindDir("Dir::State::lists") + "partial/";
- s.append(URItoFileName(RealURI));
- unlink(s.c_str());
+ if (Stage == STAGE_DECOMPRESS_AND_VERIFY)
+ {
+ unlink(EraseFileName.c_str());
}
Item::Failed(Message,Cnf);
+
+ /// cancel the entire transaction
+ TransactionManager->AbortTransaction();
}
/*}}}*/
-// pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/
-std::string pkgAcqIndex::GetFinalFilename(std::string const &URI,
- std::string const &compExt)
+// pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/
+std::string pkgAcqIndex::GetFinalFilename() const
{
std::string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(URI);
+ FinalFile += URItoFileName(RealURI);
if (_config->FindB("Acquire::GzipIndexes",false) == true)
- FinalFile += '.' + compExt;
+ FinalFile += '.' + CurrentCompressionExtension;
return FinalFile;
}
/*}}}*/
// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/
-void pkgAcqIndex::ReverifyAfterIMS(std::string const &FileName)
+void pkgAcqIndex::ReverifyAfterIMS()
{
- std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
+ // update destfile to *not* include the compression extension when doing
+ // a reverify (as its uncompressed on disk already)
+ DestFile = GetPartialFileNameFromURI(RealURI);
+
+ // do not reverify cdrom sources as apt-cdrom may rewrite the Packages
+ // file when its doing the indexcopy
+ if (RealURI.substr(0,6) == "cdrom:")
+ return;
+
+ // adjust DestFile if its compressed on disk
if (_config->FindB("Acquire::GzipIndexes",false) == true)
- DestFile += compExt;
+ DestFile += '.' + CurrentCompressionExtension;
- string FinalFile = GetFinalFilename(RealURI, compExt);
- Rename(FinalFile, FileName);
- Decompression = true;
- Desc.URI = "copy:" + FileName;
+ // copy FinalFile into partial/ so that we check the hash again
+ string FinalFile = GetFinalFilename();
+ Stage = STAGE_DECOMPRESS_AND_VERIFY;
+ Desc.URI = "copy:" + FinalFile;
QueueURI(Desc);
}
/*}}}*/
+// AcqIndex::ValidateFile - Validate the content of the downloaded file /*{{{*/
+bool pkgAcqIndex::ValidateFile(const std::string &FileName)
+{
+ // FIXME: this can go away once we only ever download stuff that
+ // has a valid hash and we never do GET based probing
+ // FIXME2: this also leaks debian-isms into the code and should go therefore
+
+ /* Always validate the index file for correctness (all indexes must
+ * have a Package field) (LP: #346386) (Closes: #627642)
+ */
+ FileFd fd(FileName, FileFd::ReadOnly, FileFd::Extension);
+ // Only test for correctness if the content of the file is not empty
+ // (empty is ok)
+ if (fd.Size() > 0)
+ {
+ pkgTagSection sec;
+ pkgTagFile tag(&fd);
+
+ // all our current indexes have a field 'Package' in each section
+ if (_error->PendingError() == true ||
+ tag.Step(sec) == false ||
+ sec.Exists("Package") == false)
+ return false;
+ }
+ return true;
+}
+ /*}}}*/
// AcqIndex::Done - Finished a fetch /*{{{*/
// ---------------------------------------------------------------------
/* This goes through a number of states.. On the initial fetch the
method could possibly return an alternate filename which points
to the uncompressed version of the file. If this is so the file
is copied into the partial directory. In all other cases the file
- is decompressed with a gzip uri. */
-void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
+ is decompressed with a compressed uri. */
+void pkgAcqIndex::Done(string Message,
+ unsigned long long Size,
+ HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,Hash,Cfg);
- std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
+ Item::Done(Message,Size,Hashes,Cfg);
- if (Decompression == true)
+ switch(Stage)
{
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
- std::cerr << std::endl << RealURI << ": Computed Hash: " << Hash;
- std::cerr << " Expected Hash: " << ExpectedHash.toStr() << std::endl;
- }
-
- if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash)
- {
- Desc.URI = RealURI;
- RenameOnError(HashSumMismatch);
- return;
- }
-
- // FIXME: this can go away once we only ever download stuff that
- // has a valid hash and we never do GET based probing
- //
- /* Always verify the index file for correctness (all indexes must
- * have a Package field) (LP: #346386) (Closes: #627642)
- */
- FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Extension);
- // Only test for correctness if the file is not empty (empty is ok)
- if (fd.Size() > 0)
- {
- pkgTagSection sec;
- pkgTagFile tag(&fd);
-
- // all our current indexes have a field 'Package' in each section
- if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false)
- {
- RenameOnError(InvalidFormat);
- return;
- }
- }
-
- // Done, move it into position
- string FinalFile = GetFinalFilename(RealURI, compExt);
- Rename(DestFile,FinalFile);
- chmod(FinalFile.c_str(),0644);
-
- /* We restore the original name to DestFile so that the clean operation
- will work OK */
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(RealURI);
- if (_config->FindB("Acquire::GzipIndexes",false))
- DestFile += '.' + compExt;
-
- // Remove the compressed version.
- if (Erase == true)
- unlink(DestFile.c_str());
+ case STAGE_DOWNLOAD:
+ StageDownloadDone(Message, Hashes, Cfg);
+ break;
+ case STAGE_DECOMPRESS_AND_VERIFY:
+ StageDecompressDone(Message, Hashes, Cfg);
+ break;
+ }
+}
+ /*}}}*/
+// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/
+void pkgAcqIndex::StageDownloadDone(string Message,
+ HashStringList const &Hashes,
+ pkgAcquire::MethodConfig *Cfg)
+{
+ // First check if the calculcated Hash of the (compressed) downloaded
+ // file matches the hash we have in the MetaIndexRecords for this file
+ if(VerifyHashByMetaKey(Hashes) == false)
+ {
+ RenameOnError(HashSumMismatch);
+ Failed(Message, Cfg);
return;
}
- Erase = false;
Complete = true;
-
+
// Handle the unzipd case
string FileName = LookupTag(Message,"Alt-Filename");
if (FileName.empty() == false)
{
- Decompression = true;
+ Stage = STAGE_DECOMPRESS_AND_VERIFY;
Local = true;
DestFile += ".decomp";
Desc.URI = "copy:" + FileName;
QueueURI(Desc);
- Mode = "copy";
+ SetActiveSubprocess("copy");
return;
}
@@ -1139,88 +1392,106 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
ErrorText = "Method gave a blank filename";
}
- if (FileName == DestFile)
- Erase = true;
- else
+ // Methods like e.g. "file:" will give us a (compressed) FileName that is
+ // not the "DestFile" we set, in this case we uncompress from the local file
+ if (FileName != DestFile)
Local = true;
+ else
+ EraseFileName = FileName;
- // The files timestamp matches, for non-local URLs reverify the local
- // file, for local file, uncompress again to ensure the hashsum is still
- // matching the Release file
- bool const IsCDROM = RealURI.substr(0,6) == "cdrom:";
- if ((Local == false || IsCDROM == true) &&
- StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
+ // we need to verify the file against the current Release file again
+ // on if-modfied-since hit to avoid a stale attack against us
+ if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
{
- // set destfile to the final destfile
- if(_config->FindB("Acquire::GzipIndexes",false) == false)
- {
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(RealURI);
- }
-
- // do not reverify cdrom sources as apt-cdrom may rewrite the Packages
- // file when its doing the indexcopy
- if (IsCDROM == false)
- ReverifyAfterIMS(FileName);
+ // The files timestamp matches, reverify by copy into partial/
+ EraseFileName = "";
+ ReverifyAfterIMS();
return;
}
- string decompProg;
- // If we enable compressed indexes, queue for hash verification
+ // If we have compressed indexes enabled, queue for hash verification
if (_config->FindB("Acquire::GzipIndexes",false))
{
- DestFile = _config->FindDir("Dir::State::lists");
- DestFile += URItoFileName(RealURI) + '.' + compExt;
-
- Decompression = true;
+ DestFile = GetPartialFileNameFromURI(RealURI + '.' + CurrentCompressionExtension);
+ EraseFileName = "";
+ Stage = STAGE_DECOMPRESS_AND_VERIFY;
Desc.URI = "copy:" + FileName;
QueueURI(Desc);
-
+ SetActiveSubprocess("copy");
return;
}
// get the binary name for your used compression type
- decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
- if(decompProg.empty() == false);
- else if(compExt == "uncompressed")
+ string decompProg;
+ if(CurrentCompressionExtension == "uncompressed")
decompProg = "copy";
- else {
- _error->Error("Unsupported extension: %s", compExt.c_str());
+ else
+ decompProg = _config->Find(string("Acquire::CompressionTypes::").append(CurrentCompressionExtension),"");
+ if(decompProg.empty() == true)
+ {
+ _error->Error("Unsupported extension: %s", CurrentCompressionExtension.c_str());
return;
}
- Decompression = true;
+ // queue uri for the next stage
+ Stage = STAGE_DECOMPRESS_AND_VERIFY;
DestFile += ".decomp";
Desc.URI = decompProg + ":" + FileName;
QueueURI(Desc);
+ SetActiveSubprocess(decompProg);
+}
+ /*}}}*/
+// pkgAcqIndex::StageDecompressDone - Final verification /*{{{*/
+void pkgAcqIndex::StageDecompressDone(string Message,
+ HashStringList const &Hashes,
+ pkgAcquire::MethodConfig *Cfg)
+{
+ if (ExpectedHashes.usable() && ExpectedHashes != Hashes)
+ {
+ Desc.URI = RealURI;
+ RenameOnError(HashSumMismatch);
+ printHashSumComparision(RealURI, ExpectedHashes, Hashes);
+ Failed(Message, Cfg);
+ return;
+ }
- // FIXME: this points to a c++ string that goes out of scope
- Mode = decompProg.c_str();
+ if(!ValidateFile(DestFile))
+ {
+ RenameOnError(InvalidFormat);
+ Failed(Message, Cfg);
+ return;
+ }
+
+ // remove the compressed version of the file
+ unlink(EraseFileName.c_str());
+
+ // Done, queue for rename on transaction finished
+ TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
+
+ return;
}
/*}}}*/
// AcqIndexTrans::pkgAcqIndexTrans - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The Translation file is added to the queue */
pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
- string URI,string URIDesc,string ShortDesc)
- : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
+ string URI,string URIDesc,string ShortDesc)
+ : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashStringList())
{
}
-pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const *Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
- : pkgAcqIndex(Owner, Target, ExpectedHash, MetaIndexParser)
+pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
+ IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser)
+ : pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser)
{
}
/*}}}*/
// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/
-// ---------------------------------------------------------------------
-string pkgAcqIndexTrans::Custom600Headers()
+string pkgAcqIndexTrans::Custom600Headers() const
{
- std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
- string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(RealURI);
- if (_config->FindB("Acquire::GzipIndexes",false))
- Final += compExt;
+ string Final = GetFinalFilename();
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
@@ -1229,265 +1500,447 @@ string pkgAcqIndexTrans::Custom600Headers()
}
/*}}}*/
// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/
-// ---------------------------------------------------------------------
-/* */
void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
{
- size_t const nextExt = CompressionExtension.find(' ');
+ size_t const nextExt = CompressionExtensions.find(' ');
if (nextExt != std::string::npos)
{
- CompressionExtension = CompressionExtension.substr(nextExt+1);
+ CompressionExtensions = CompressionExtensions.substr(nextExt+1);
Init(RealURI, Desc.Description, Desc.ShortDesc);
Status = StatIdle;
return;
}
- if (Cnf->LocalOnly == true ||
+ Item::Failed(Message,Cnf);
+
+ // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor
+ if (Cnf->LocalOnly == true ||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
- {
+ {
// Ignore this
Status = StatDone;
- Complete = false;
- Dequeue();
- return;
}
+}
+ /*}}}*/
+// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/
+void pkgAcqMetaBase::Add(Item *I)
+{
+ Transaction.push_back(I);
+}
+ /*}}}*/
+// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/
+void pkgAcqMetaBase::AbortTransaction()
+{
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "AbortTransaction: " << TransactionManager << std::endl;
- Item::Failed(Message,Cnf);
+ // ensure the toplevel is in error state too
+ for (std::vector<Item*>::iterator I = Transaction.begin();
+ I != Transaction.end(); ++I)
+ {
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << " Cancel: " << (*I)->DestFile << std::endl;
+ // the transaction will abort, so stop anything that is idle
+ if ((*I)->Status == pkgAcquire::Item::StatIdle)
+ (*I)->Status = pkgAcquire::Item::StatDone;
+
+ // kill failed files in partial
+ if ((*I)->Status == pkgAcquire::Item::StatError)
+ {
+ std::string const PartialFile = GetPartialFileName(flNotDir((*I)->DestFile));
+ if(FileExists(PartialFile))
+ Rename(PartialFile, PartialFile + ".FAILED");
+ }
+ }
+}
+ /*}}}*/
+// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/
+bool pkgAcqMetaBase::TransactionHasError()
+{
+ for (pkgAcquire::ItemIterator I = Transaction.begin();
+ I != Transaction.end(); ++I)
+ if((*I)->Status != pkgAcquire::Item::StatDone &&
+ (*I)->Status != pkgAcquire::Item::StatIdle)
+ return true;
+
+ return false;
+}
+ /*}}}*/
+// AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/
+void pkgAcqMetaBase::CommitTransaction()
+{
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "CommitTransaction: " << this << std::endl;
+
+ // move new files into place *and* remove files that are not
+ // part of the transaction but are still on disk
+ for (std::vector<Item*>::iterator I = Transaction.begin();
+ I != Transaction.end(); ++I)
+ {
+ if((*I)->PartialFile != "")
+ {
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "mv " << (*I)->PartialFile << " -> "<< (*I)->DestFile << " "
+ << (*I)->DescURI() << std::endl;
+
+ Rename((*I)->PartialFile, (*I)->DestFile);
+ ChangeOwnerAndPermissionOfFile("CommitTransaction", (*I)->DestFile.c_str(), "root", "root", 0644);
+
+ } else {
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "rm "
+ << (*I)->DestFile
+ << " "
+ << (*I)->DescURI()
+ << std::endl;
+ unlink((*I)->DestFile.c_str());
+ }
+ // mark that this transaction is finished
+ (*I)->TransactionManager = 0;
+ }
+}
+ /*}}}*/
+// AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/
+void pkgAcqMetaBase::TransactionStageCopy(Item *I,
+ const std::string &From,
+ const std::string &To)
+{
+ I->PartialFile = From;
+ I->DestFile = To;
+}
+ /*}}}*/
+// AcqMetaBase::TransactionStageRemoval - Sage a file for removal /*{{{*/
+void pkgAcqMetaBase::TransactionStageRemoval(Item *I,
+ const std::string &FinalFile)
+{
+ I->PartialFile = "";
+ I->DestFile = FinalFile;
}
/*}}}*/
-pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/
+// AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/
+bool pkgAcqMetaBase::CheckStopAuthentication(const std::string &RealURI,
+ const std::string &Message)
+{
+ // FIXME: this entire function can do now that we disallow going to
+ // a unauthenticated state and can cleanly rollback
+
+ string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+
+ if(FileExists(Final))
+ {
+ Status = StatTransientNetworkError;
+ _error->Warning(_("An error occurred during the signature "
+ "verification. The repository is not updated "
+ "and the previous index files will be used. "
+ "GPG error: %s: %s\n"),
+ Desc.Description.c_str(),
+ LookupTag(Message,"Message").c_str());
+ RunScripts("APT::Update::Auth-Failure");
+ return true;
+ } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) {
+ /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */
+ _error->Error(_("GPG error: %s: %s"),
+ Desc.Description.c_str(),
+ LookupTag(Message,"Message").c_str());
+ Status = StatError;
+ return true;
+ } else {
+ _error->Warning(_("GPG error: %s: %s"),
+ Desc.Description.c_str(),
+ LookupTag(Message,"Message").c_str());
+ }
+ // gpgv method failed
+ ReportMirrorFailure("GPGFailure");
+ return false;
+}
+ /*}}}*/
+// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/
+pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
string URI,string URIDesc,string ShortDesc,
- string MetaIndexURI, string MetaIndexURIDesc,
- string MetaIndexShortDesc,
+ string MetaIndexFile,
const vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser) :
- Item(Owner), RealURI(URI), MetaIndexURI(MetaIndexURI),
- MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
- MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets)
+ pkgAcqMetaBase(Owner, IndexTargets, MetaIndexParser,
+ HashStringList(), TransactionManager),
+ RealURI(URI), MetaIndexFile(MetaIndexFile), URIDesc(URIDesc),
+ ShortDesc(ShortDesc)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
+ DestFile += URItoFileName(RealURI);
- // remove any partial downloaded sig-file in partial/.
- // it may confuse proxies and is too small to warrant a
+ // remove any partial downloaded sig-file in partial/.
+ // it may confuse proxies and is too small to warrant a
// partial download anyway
unlink(DestFile.c_str());
+ // set the TransactionManager
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "New pkgAcqMetaSig with TransactionManager "
+ << TransactionManager << std::endl;
+
// Create the item
Desc.Description = URIDesc;
Desc.Owner = this;
Desc.ShortDesc = ShortDesc;
Desc.URI = URI;
-
- string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(RealURI);
- if (RealFileExists(Final) == true)
- {
- // File was already in place. It needs to be re-downloaded/verified
- // because Release might have changed, we do give it a different
- // name than DestFile because otherwise the http method will
- // send If-Range requests and there are too many broken servers
- // out there that do not understand them
- LastGoodSig = DestFile+".reverify";
- Rename(Final,LastGoodSig);
- }
QueueURI(Desc);
}
/*}}}*/
pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
{
- // if the file was never queued undo file-changes done in the constructor
- if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false &&
- LastGoodSig.empty() == false)
- {
- string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
- if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
- Rename(LastGoodSig, Final);
- }
-
}
/*}}}*/
// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
-/* The only header we use is the last-modified header. */
-string pkgAcqMetaSig::Custom600Headers()
+string pkgAcqMetaSig::Custom600Headers() const
{
- struct stat Buf;
- if (stat(LastGoodSig.c_str(),&Buf) != 0)
- return "\nIndex-File: true";
-
- return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ std::string Header = GetCustom600Headers(RealURI);
+ return Header;
}
-
-void pkgAcqMetaSig::Done(string Message,unsigned long long Size,string MD5,
+ /*}}}*/
+// pkgAcqMetaSig::Done - The signature was downloaded/verified /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+void pkgAcqMetaSig::Done(string Message,unsigned long long Size,
+ HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,MD5,Cfg);
+ Item::Done(Message, Size, Hashes, Cfg);
- string FileName = LookupTag(Message,"Filename");
- if (FileName.empty() == true)
+ if(AuthPass == false)
{
- Status = StatError;
- ErrorText = "Method gave a blank filename";
+ if(CheckDownloadDone(Message, RealURI) == true)
+ {
+ // destfile will be modified to point to MetaIndexFile for the
+ // gpgv method, so we need to save it here
+ MetaIndexFileSignature = DestFile;
+ QueueForSignatureVerify(MetaIndexFile, MetaIndexFileSignature);
+ }
return;
}
-
- if (FileName != DestFile)
+ else
{
- // We have to copy it into place
- Local = true;
- Desc.URI = "copy:" + FileName;
- QueueURI(Desc);
- return;
+ if(CheckAuthDone(Message, RealURI) == true)
+ {
+ std::string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ TransactionManager->TransactionStageCopy(this, MetaIndexFileSignature, FinalFile);
+ }
}
-
- Complete = true;
-
- // put the last known good file back on i-m-s hit (it will
- // be re-verified again)
- // Else do nothing, we have the new file in DestFile then
- if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
- Rename(LastGoodSig, DestFile);
-
- // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved
- new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc,
- MetaIndexShortDesc, DestFile, IndexTargets,
- MetaIndexParser);
-
}
/*}}}*/
void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/
{
string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
- // if we get a network error we fail gracefully
- if(Status == StatTransientNetworkError)
- {
- Item::Failed(Message,Cnf);
- // move the sigfile back on transient network failures
- if(FileExists(LastGoodSig))
- Rename(LastGoodSig,Final);
+ // check if we need to fail at this point
+ if (AuthPass == true && CheckStopAuthentication(RealURI, Message))
+ return;
- // set the status back to , Item::Failed likes to reset it
- Status = pkgAcquire::Item::StatTransientNetworkError;
- return;
- }
+ // FIXME: meh, this is not really elegant
+ string InReleaseURI = RealURI.replace(RealURI.rfind("Release.gpg"), 12,
+ "InRelease");
+ string FinalInRelease = _config->FindDir("Dir::State::lists") + URItoFileName(InReleaseURI);
- // Delete any existing sigfile when the acquire failed
- unlink(Final.c_str());
+ if (RealFileExists(Final) || RealFileExists(FinalInRelease))
+ {
+ std::string downgrade_msg;
+ strprintf(downgrade_msg, _("The repository '%s' is no longer signed."),
+ URIDesc.c_str());
+ if(_config->FindB("Acquire::AllowDowngradeToInsecureRepositories"))
+ {
+ // meh, the users wants to take risks (we still mark the packages
+ // from this repository as unauthenticated)
+ _error->Warning("%s", downgrade_msg.c_str());
+ _error->Warning(_("This is normally not allowed, but the option "
+ "Acquire::AllowDowngradeToInsecureRepositories was "
+ "given to override it."));
+
+ } else {
+ _error->Error("%s", downgrade_msg.c_str());
+ Rename(MetaIndexFile, MetaIndexFile+".FAILED");
+ Item::Failed("Message: " + downgrade_msg, Cnf);
+ TransactionManager->AbortTransaction();
+ return;
+ }
+ }
+ else
+ _error->Warning(_("The data from '%s' is not signed. Packages "
+ "from that repository can not be authenticated."),
+ URIDesc.c_str());
+
+ // this ensures that any file in the lists/ dir is removed by the
+ // transaction
+ DestFile = GetPartialFileNameFromURI(RealURI);
+ TransactionManager->TransactionStageRemoval(this, DestFile);
+
+ // only allow going further if the users explicitely wants it
+ if(MetaIndexParser->IsAlwaysTrusted() || _config->FindB("Acquire::AllowInsecureRepositories") == true)
+ {
+ // we parse the indexes here because at this point the user wanted
+ // a repository that may potentially harm him
+ MetaIndexParser->Load(MetaIndexFile);
+ QueueIndexes(true);
+ }
+ else
+ {
+ _error->Error("Use --allow-insecure-repositories to force the update");
+ }
- // queue a pkgAcqMetaIndex with no sigfile
- new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
- "", IndexTargets, MetaIndexParser);
+ Item::Failed(Message,Cnf);
- if (Cnf->LocalOnly == true ||
+ // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor
+ if (Cnf->LocalOnly == true ||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
- {
+ {
// Ignore this
Status = StatDone;
- Complete = false;
- Dequeue();
- return;
}
-
- Item::Failed(Message,Cnf);
}
/*}}}*/
pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/
+ pkgAcqMetaBase *TransactionManager,
string URI,string URIDesc,string ShortDesc,
- string SigFile,
- const vector<struct IndexTarget*>* IndexTargets,
+ string MetaIndexSigURI,string MetaIndexSigURIDesc, string MetaIndexSigShortDesc,
+ const vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser) :
- Item(Owner), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets),
- MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false)
+ pkgAcqMetaBase(Owner, IndexTargets, MetaIndexParser, HashStringList(),
+ TransactionManager),
+ RealURI(URI), URIDesc(URIDesc), ShortDesc(ShortDesc),
+ MetaIndexSigURI(MetaIndexSigURI), MetaIndexSigURIDesc(MetaIndexSigURIDesc),
+ MetaIndexSigShortDesc(MetaIndexSigShortDesc)
{
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
+ if(TransactionManager == NULL)
+ {
+ this->TransactionManager = this;
+ this->TransactionManager->Add(this);
+ }
+
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "New pkgAcqMetaIndex with TransactionManager "
+ << this->TransactionManager << std::endl;
+
+
+ Init(URIDesc, ShortDesc);
+}
+ /*}}}*/
+// pkgAcqMetaIndex::Init - Delayed constructor /*{{{*/
+void pkgAcqMetaIndex::Init(std::string URIDesc, std::string ShortDesc)
+{
+ DestFile = GetPartialFileNameFromURI(RealURI);
// Create the item
Desc.Description = URIDesc;
Desc.Owner = this;
Desc.ShortDesc = ShortDesc;
- Desc.URI = URI;
+ Desc.URI = RealURI;
+ // we expect more item
+ ExpectedAdditionalItems = IndexTargets->size();
QueueURI(Desc);
}
/*}}}*/
// pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
-/* The only header we use is the last-modified header. */
-string pkgAcqMetaIndex::Custom600Headers()
+string pkgAcqMetaIndex::Custom600Headers() const
{
- string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(RealURI);
-
- struct stat Buf;
- if (stat(Final.c_str(),&Buf) != 0)
- return "\nIndex-File: true";
-
- return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ return GetCustom600Headers(RealURI);
}
/*}}}*/
-void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,string Hash, /*{{{*/
+void pkgAcqMetaIndex::Done(string Message,unsigned long long Size, /*{{{*/
+ HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,Hash,Cfg);
+ Item::Done(Message,Size,Hashes,Cfg);
- // MetaIndexes are done in two passes: one to download the
- // metaindex with an appropriate method, and a second to verify it
- // with the gpgv method
-
- if (AuthPass == true)
+ if(CheckDownloadDone(Message, RealURI))
{
- AuthDone(Message);
+ // we have a Release file, now download the Signature, all further
+ // verify/queue for additional downloads will be done in the
+ // pkgAcqMetaSig::Done() code
+ std::string MetaIndexFile = DestFile;
+ new pkgAcqMetaSig(Owner, TransactionManager,
+ MetaIndexSigURI, MetaIndexSigURIDesc,
+ MetaIndexSigShortDesc, MetaIndexFile, IndexTargets,
+ MetaIndexParser);
- // all cool, move Release file into place
- Complete = true;
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
}
- else
- {
- RetrievalDone(Message);
- if (!Complete)
- // Still more retrieving to do
- return;
+}
+ /*}}}*/
+bool pkgAcqMetaBase::CheckAuthDone(string Message, const string &RealURI) /*{{{*/
+{
+ // At this point, the gpgv method has succeeded, so there is a
+ // valid signature from a key in the trusted keyring. We
+ // perform additional verification of its contents, and use them
+ // to verify the indexes we are about to download
- if (SigFile == "")
- {
- // There was no signature file, so we are finished. Download
- // the indexes and do only hashsum verification if possible
- MetaIndexParser->Load(DestFile);
- QueueIndexes(false);
- }
- else
- {
- // There was a signature file, so pass it to gpgv for
- // verification
-
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- std::cerr << "Metaindex acquired, queueing gpg verification ("
- << SigFile << "," << DestFile << ")\n";
- AuthPass = true;
- Desc.URI = "gpgv:" + SigFile;
- QueueURI(Desc);
- Mode = "gpgv";
- return;
- }
+ if (!MetaIndexParser->Load(DestFile))
+ {
+ Status = StatAuthError;
+ ErrorText = MetaIndexParser->ErrorText;
+ return false;
}
- if (Complete == true)
+ if (!VerifyVendor(Message, RealURI))
{
- string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(RealURI);
- if (SigFile == DestFile)
- SigFile = FinalFile;
- Rename(DestFile,FinalFile);
- chmod(FinalFile.c_str(),0644);
- DestFile = FinalFile;
+ return false;
}
+
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ std::cerr << "Signature verification succeeded: "
+ << DestFile << std::endl;
+
+ // Download further indexes with verification
+ //
+ // it would be really nice if we could simply do
+ // if (IMSHit == false) QueueIndexes(true)
+ // and skip the download if the Release file has not changed
+ // - but right now the list cleaner will needs to be tricked
+ // to not delete all our packages/source indexes in this case
+ QueueIndexes(true);
+
+ return true;
}
/*}}}*/
-void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/
+// pkgAcqMetaBase::GetCustom600Headers - Get header for AcqMetaBase /*{{{*/
+// ---------------------------------------------------------------------
+string pkgAcqMetaBase::GetCustom600Headers(const string &RealURI) const
+{
+ std::string Header = "\nIndex-File: true";
+ std::string MaximumSize;
+ strprintf(MaximumSize, "\nMaximum-Size: %i",
+ _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000));
+ Header += MaximumSize;
+
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+
+ struct stat Buf;
+ if (stat(FinalFile.c_str(),&Buf) == 0)
+ Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+
+ return Header;
+}
+ /*}}}*/
+// pkgAcqMetaBase::QueueForSignatureVerify /*{{{*/
+void pkgAcqMetaBase::QueueForSignatureVerify(const std::string &MetaIndexFile,
+ const std::string &MetaIndexFileSignature)
+{
+ AuthPass = true;
+ Desc.URI = "gpgv:" + MetaIndexFileSignature;
+ DestFile = MetaIndexFile;
+ QueueURI(Desc);
+ SetActiveSubprocess("gpgv");
+}
+ /*}}}*/
+// pkgAcqMetaBase::CheckDownloadDone /*{{{*/
+bool pkgAcqMetaBase::CheckDownloadDone(const std::string &Message,
+ const std::string &RealURI)
{
// We have just finished downloading a Release file (it is not
// verified yet)
@@ -1497,7 +1950,7 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/
{
Status = StatError;
ErrorText = "Method gave a blank filename";
- return;
+ return false;
}
if (FileName != DestFile)
@@ -1505,7 +1958,7 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/
Local = true;
Desc.URI = "copy:" + FileName;
QueueURI(Desc);
- return;
+ return false;
}
// make sure to verify against the right file on I-M-S hit
@@ -1514,95 +1967,17 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/
{
string FinalFile = _config->FindDir("Dir::State::lists");
FinalFile += URItoFileName(RealURI);
- if (SigFile == DestFile)
- {
- SigFile = FinalFile;
- // constructor of pkgAcqMetaClearSig moved it out of the way,
- // now move it back in on IMS hit for the 'old' file
- string const OldClearSig = DestFile + ".reverify";
- if (RealFileExists(OldClearSig) == true)
- Rename(OldClearSig, FinalFile);
- }
DestFile = FinalFile;
}
- Complete = true;
-}
- /*}}}*/
-void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/
-{
- // At this point, the gpgv method has succeeded, so there is a
- // valid signature from a key in the trusted keyring. We
- // perform additional verification of its contents, and use them
- // to verify the indexes we are about to download
-
- if (!MetaIndexParser->Load(DestFile))
- {
- Status = StatAuthError;
- ErrorText = MetaIndexParser->ErrorText;
- return;
- }
-
- if (!VerifyVendor(Message))
- {
- return;
- }
-
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- std::cerr << "Signature verification succeeded: "
- << DestFile << std::endl;
- // do not trust any previously unverified content that we may have
- string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI));
- if (DestFile != SigFile)
- LastGoodSigFile.append(".gpg");
- LastGoodSigFile.append(".reverify");
- if(IMSHit == false && RealFileExists(LastGoodSigFile) == false)
- {
- for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
- Target != IndexTargets->end();
- ++Target)
- {
- // remove old indexes
- std::string index = _config->FindDir("Dir::State::lists") +
- URItoFileName((*Target)->URI);
- unlink(index.c_str());
- // and also old gzipindexes
- std::vector<std::string> types = APT::Configuration::getCompressionTypes();
- for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
- {
- index += '.' + (*t);
- unlink(index.c_str());
- }
- }
- }
-
-
- // Download further indexes with verification
- QueueIndexes(true);
-
- // is it a clearsigned MetaIndex file?
- if (DestFile == SigFile)
- return;
+ // set Item to complete as the remaining work is all local (verify etc)
+ Complete = true;
- // Done, move signature file into position
- string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
- URItoFileName(RealURI) + ".gpg";
- Rename(SigFile,VerifiedSigFile);
- chmod(VerifiedSigFile.c_str(),0644);
+ return true;
}
/*}}}*/
-void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
+void pkgAcqMetaBase::QueueIndexes(bool verify) /*{{{*/
{
-#if 0
- /* Reject invalid, existing Release files (LP: #346386) (Closes: #627642)
- * FIXME: Disabled; it breaks unsigned repositories without hashes */
- if (!verify && FileExists(DestFile) && !MetaIndexParser->Load(DestFile))
- {
- Status = StatError;
- ErrorText = MetaIndexParser->ErrorText;
- return;
- }
-#endif
bool transInRelease = false;
{
std::vector<std::string> const keys = MetaIndexParser->MetaKeys();
@@ -1615,11 +1990,13 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
}
}
- for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
+ // at this point the real Items are loaded in the fetcher
+ ExpectedAdditionalItems = 0;
+ for (vector <IndexTarget*>::const_iterator Target = IndexTargets->begin();
Target != IndexTargets->end();
++Target)
{
- HashString ExpectedIndexHash;
+ HashStringList ExpectedIndexHashes;
const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
bool compressedAvailable = false;
if (Record == NULL)
@@ -1643,14 +2020,16 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
}
else
{
- ExpectedIndexHash = Record->Hash;
+ ExpectedIndexHashes = Record->Hashes;
if (_config->FindB("Debug::pkgAcquire::Auth", false))
{
- std::cerr << "Queueing: " << (*Target)->URI << std::endl;
- std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
+ std::cerr << "Queueing: " << (*Target)->URI << std::endl
+ << "Expected Hash:" << std::endl;
+ for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs)
+ std::cerr << "\t- " << hs->toStr() << std::endl;
std::cerr << "For: " << Record->MetaKeyFilename << std::endl;
}
- if (verify == true && ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false)
+ if (verify == true && ExpectedIndexHashes.empty() == true && (*Target)->IsOptional() == false)
{
Status = StatAuthError;
strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str());
@@ -1660,17 +2039,13 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
if ((*Target)->IsOptional() == true)
{
- if ((*Target)->IsSubIndex() == true)
- new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
- else if (transInRelease == false || Record != NULL || compressedAvailable == true)
+ if (transInRelease == false || Record != NULL || compressedAvailable == true)
{
if (_config->FindB("Acquire::PDiffs",true) == true && transInRelease == true &&
MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)
- new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ new pkgAcqDiffIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser);
else
- new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ new pkgAcqIndexTrans(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser);
}
continue;
}
@@ -1681,14 +2056,13 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
instead, but passing the required info to it is to much hassle */
if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true))
- new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ new pkgAcqDiffIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser);
else
- new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ new pkgAcqIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser);
}
}
/*}}}*/
-bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/
+bool pkgAcqMetaBase::VerifyVendor(string Message, const string &RealURI)/*{{{*/
{
string::size_type pos;
@@ -1765,147 +2139,174 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/
return true;
}
/*}}}*/
-// pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)
+// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/
+void pkgAcqMetaIndex::Failed(string Message,
+ pkgAcquire::MethodConfig * Cnf)
{
- if (AuthPass == true)
- {
- // gpgv method failed, if we have a good signature
- string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI));
- if (DestFile != SigFile)
- LastGoodSigFile.append(".gpg");
- LastGoodSigFile.append(".reverify");
+ pkgAcquire::Item::Failed(Message, Cnf);
+ Status = StatDone;
- if(FileExists(LastGoodSigFile))
- {
- string VerifiedSigFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
- if (DestFile != SigFile)
- VerifiedSigFile.append(".gpg");
- Rename(LastGoodSigFile, VerifiedSigFile);
- Status = StatTransientNetworkError;
- _error->Warning(_("An error occurred during the signature "
- "verification. The repository is not updated "
- "and the previous index files will be used. "
- "GPG error: %s: %s\n"),
- Desc.Description.c_str(),
- LookupTag(Message,"Message").c_str());
- RunScripts("APT::Update::Auth-Failure");
- return;
- } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) {
- /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */
- _error->Error(_("GPG error: %s: %s"),
- Desc.Description.c_str(),
- LookupTag(Message,"Message").c_str());
- return;
- } else {
- _error->Warning(_("GPG error: %s: %s"),
- Desc.Description.c_str(),
- LookupTag(Message,"Message").c_str());
- }
- // gpgv method failed
- ReportMirrorFailure("GPGFailure");
- }
+ string FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
- /* Always move the meta index, even if gpgv failed. This ensures
- * that PackageFile objects are correctly filled in */
- if (FileExists(DestFile)) {
- string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(RealURI);
- /* InRelease files become Release files, otherwise
- * they would be considered as trusted later on */
- if (SigFile == DestFile) {
- RealURI = RealURI.replace(RealURI.rfind("InRelease"), 9,
- "Release");
- FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9,
- "Release");
- SigFile = FinalFile;
- }
- Rename(DestFile,FinalFile);
- chmod(FinalFile.c_str(),0644);
+ _error->Warning(_("The repository '%s' does not have a Release file. "
+ "This is deprecated, please contact the owner of the "
+ "repository."), URIDesc.c_str());
- DestFile = FinalFile;
- }
-
- // No Release file was present, or verification failed, so fall
+ // No Release file was present so fall
// back to queueing Packages files without verification
- QueueIndexes(false);
+ // only allow going further if the users explicitely wants it
+ if(MetaIndexParser->IsAlwaysTrusted() || _config->FindB("Acquire::AllowInsecureRepositories") == true)
+ {
+ // Done, queue for rename on transaction finished
+ if (FileExists(DestFile))
+ TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
+
+ // queue without any kind of hashsum support
+ QueueIndexes(false);
+ } else {
+ // warn if the repository is unsinged
+ _error->Error("Use --allow-insecure-repositories to force the update");
+ TransactionManager->AbortTransaction();
+ Status = StatError;
+ return;
+ }
+}
+ /*}}}*/
+void pkgAcqMetaIndex::Finished() /*{{{*/
+{
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "Finished: " << DestFile <<std::endl;
+ if(TransactionManager != NULL &&
+ TransactionManager->TransactionHasError() == false)
+ TransactionManager->CommitTransaction();
}
/*}}}*/
pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/
string const &URI, string const &URIDesc, string const &ShortDesc,
string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc,
string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc,
- const vector<struct IndexTarget*>* IndexTargets,
+ const vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser) :
- pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser),
- MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
- MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc)
+ pkgAcqMetaIndex(Owner, NULL, URI, URIDesc, ShortDesc, MetaSigURI, MetaSigURIDesc,MetaSigShortDesc, IndexTargets, MetaIndexParser),
+ MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
+ MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc)
{
- SigFile = DestFile;
+ // index targets + (worst case:) Release/Release.gpg
+ ExpectedAdditionalItems = IndexTargets->size() + 2;
- // keep the old InRelease around in case of transistent network errors
- string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
- if (RealFileExists(Final) == true)
- {
- string const LastGoodSig = DestFile + ".reverify";
- Rename(Final,LastGoodSig);
- }
}
/*}}}*/
pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
{
- // if the file was never queued undo file-changes done in the constructor
- if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false)
- {
- string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
- string const LastGoodSig = DestFile + ".reverify";
- if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
- Rename(LastGoodSig, Final);
- }
}
/*}}}*/
// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
-// FIXME: this can go away once the InRelease file is used widely
-string pkgAcqMetaClearSig::Custom600Headers()
+string pkgAcqMetaClearSig::Custom600Headers() const
{
- string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(RealURI);
+ string Header = GetCustom600Headers(RealURI);
+ Header += "\nFail-Ignore: true";
+ return Header;
+}
+ /*}}}*/
+// pkgAcqMetaClearSig::Done - We got a file /*{{{*/
+// ---------------------------------------------------------------------
+void pkgAcqMetaClearSig::Done(std::string Message,unsigned long long /*Size*/,
+ HashStringList const &/*Hashes*/,
+ pkgAcquire::MethodConfig *Cnf)
+{
+ // if we expect a ClearTextSignature (InRelase), ensure that
+ // this is what we get and if not fail to queue a
+ // Release/Release.gpg, see #346386
+ if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile))
+ {
+ pkgAcquire::Item::Failed(Message, Cnf);
+ RenameOnError(NotClearsigned);
+ TransactionManager->AbortTransaction();
+ return;
+ }
- struct stat Buf;
- if (stat(Final.c_str(),&Buf) != 0)
+ if(AuthPass == false)
{
- Final = DestFile + ".reverify";
- if (stat(Final.c_str(),&Buf) != 0)
- return "\nIndex-File: true\nFail-Ignore: true\n";
+ if(CheckDownloadDone(Message, RealURI) == true)
+ QueueForSignatureVerify(DestFile, DestFile);
+ return;
}
+ else
+ {
+ if(CheckAuthDone(Message, RealURI) == true)
+ {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
- return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ // queue for copy in place
+ TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
+ }
+ }
}
/*}}}*/
void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
{
+ Item::Failed(Message, Cnf);
+
+ // we failed, we will not get additional items from this method
+ ExpectedAdditionalItems = 0;
+
if (AuthPass == false)
{
- // Remove the 'old' InRelease file if we try Release.gpg now as otherwise
- // the file will stay around and gives a false-auth impression (CVE-2012-0214)
+ // Queue the 'old' InRelease file for removal if we try Release.gpg
+ // as otherwise the file will stay around and gives a false-auth
+ // impression (CVE-2012-0214)
string FinalFile = _config->FindDir("Dir::State::lists");
FinalFile.append(URItoFileName(RealURI));
- if (FileExists(FinalFile))
- unlink(FinalFile.c_str());
+ TransactionManager->TransactionStageRemoval(this, FinalFile);
+ Status = StatDone;
- new pkgAcqMetaSig(Owner,
- MetaSigURI, MetaSigURIDesc, MetaSigShortDesc,
+ new pkgAcqMetaIndex(Owner, TransactionManager,
MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
+ MetaSigURI, MetaSigURIDesc, MetaSigShortDesc,
IndexTargets, MetaIndexParser);
- if (Cnf->LocalOnly == true ||
- StringToBool(LookupTag(Message, "Transient-Failure"), false) == false)
- Dequeue();
}
else
- pkgAcqMetaIndex::Failed(Message, Cnf);
+ {
+ if(CheckStopAuthentication(RealURI, Message))
+ return;
+
+ _error->Warning(_("The data from '%s' is not signed. Packages "
+ "from that repository can not be authenticated."),
+ URIDesc.c_str());
+
+ // No Release file was present, or verification failed, so fall
+ // back to queueing Packages files without verification
+ // only allow going further if the users explicitely wants it
+ if(MetaIndexParser->IsAlwaysTrusted() || _config->FindB("Acquire::AllowInsecureRepositories") == true)
+ {
+ Status = StatDone;
+
+ /* Always move the meta index, even if gpgv failed. This ensures
+ * that PackageFile objects are correctly filled in */
+ if (FileExists(DestFile))
+ {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ /* InRelease files become Release files, otherwise
+ * they would be considered as trusted later on */
+ RealURI = RealURI.replace(RealURI.rfind("InRelease"), 9,
+ "Release");
+ FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9,
+ "Release");
+
+ // Done, queue for rename on transaction finished
+ TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
+ }
+ QueueIndexes(false);
+ } else {
+ // warn if the repository is unsigned
+ _error->Error("Use --allow-insecure-repositories to force the update");
+ TransactionManager->AbortTransaction();
+ Status = StatError;
+ }
+ }
}
/*}}}*/
// AcqArchive::AcqArchive - Constructor /*{{{*/
@@ -1915,7 +2316,7 @@ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*
pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
pkgRecords *Recs,pkgCache::VerIterator const &Version,
string &StoreFilename) :
- Item(Owner), Version(Version), Sources(Sources), Recs(Recs),
+ Item(Owner, HashStringList()), Version(Version), Sources(Sources), Recs(Recs),
StoreFilename(StoreFilename), Vf(Version.FileList()),
Trusted(false)
{
@@ -2000,7 +2401,6 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
checking later. */
bool pkgAcqArchive::QueueNext()
{
- string const ForceHash = _config->Find("Acquire::ForceHash");
for (; Vf.end() == false; ++Vf)
{
// Ignore not source sources
@@ -2021,31 +2421,10 @@ bool pkgAcqArchive::QueueNext()
pkgRecords::Parser &Parse = Recs->Lookup(Vf);
if (_error->PendingError() == true)
return false;
-
+
string PkgFile = Parse.FileName();
- if (ForceHash.empty() == false)
- {
- if(stringcasecmp(ForceHash, "sha512") == 0)
- ExpectedHash = HashString("SHA512", Parse.SHA512Hash());
- else if(stringcasecmp(ForceHash, "sha256") == 0)
- ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
- else if (stringcasecmp(ForceHash, "sha1") == 0)
- ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
- else
- ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
- }
- else
- {
- string Hash;
- if ((Hash = Parse.SHA512Hash()).empty() == false)
- ExpectedHash = HashString("SHA512", Hash);
- else if ((Hash = Parse.SHA256Hash()).empty() == false)
- ExpectedHash = HashString("SHA256", Hash);
- else if ((Hash = Parse.SHA1Hash()).empty() == false)
- ExpectedHash = HashString("SHA1", Hash);
- else
- ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
- }
+ ExpectedHashes = Parse.Hashes();
+
if (PkgFile.empty() == true)
return _error->Error(_("The package index files are corrupted. No Filename: "
"field for package %s."),
@@ -2105,7 +2484,11 @@ bool pkgAcqArchive::QueueNext()
if ((unsigned long long)Buf.st_size > Version->Size)
unlink(DestFile.c_str());
else
+ {
PartialSize = Buf.st_size;
+ std::string SandboxUser = _config->Find("APT::Sandbox::User");
+ ChangeOwnerAndPermissionOfFile("pkgAcqArchive::QueueNext",DestFile.c_str(), SandboxUser.c_str(), "root", 0600);
+ }
}
// Disables download of archives - useful if no real installation follows,
@@ -2132,10 +2515,10 @@ bool pkgAcqArchive::QueueNext()
// AcqArchive::Done - Finished fetching /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
+void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList const &CalcHashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,CalcHash,Cfg);
+ Item::Done(Message, Size, CalcHashes, Cfg);
// Check the size
if (Size != Version->Size)
@@ -2143,11 +2526,12 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
RenameOnError(SizeMismatch);
return;
}
-
- // Check the hash
- if(ExpectedHash.toStr() != CalcHash)
+
+ // FIXME: could this empty() check impose *any* sort of security issue?
+ if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes)
{
RenameOnError(HashSumMismatch);
+ printHashSumComparision(DestFile, ExpectedHashes, CalcHashes);
return;
}
@@ -2160,21 +2544,20 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
return;
}
- Complete = true;
-
// Reference filename
if (FileName != DestFile)
{
StoreFilename = DestFile = FileName;
Local = true;
+ Complete = true;
return;
}
-
+
// Done, move it into position
string FinalFile = _config->FindDir("Dir::Cache::Archives");
FinalFile += flNotDir(StoreFilename);
Rename(DestFile,FinalFile);
-
+ ChangeOwnerAndPermissionOfFile("pkgAcqArchive::Done", FinalFile.c_str(), "root", "root", 0644);
StoreFilename = DestFile = FinalFile;
Complete = true;
}
@@ -2219,7 +2602,7 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
/*}}}*/
// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/
// ---------------------------------------------------------------------
-APT_PURE bool pkgAcqArchive::IsTrusted()
+APT_PURE bool pkgAcqArchive::IsTrusted() const
{
return Trusted;
}
@@ -2238,11 +2621,11 @@ void pkgAcqArchive::Finished()
// AcqFile::pkgAcqFile - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The file is added to the queue */
-pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
+pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashes,
unsigned long long Size,string Dsc,string ShortDesc,
const string &DestDir, const string &DestFilename,
bool IsIndexFile) :
- Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
+ Item(Owner, Hashes), IsIndexFile(IsIndexFile)
{
Retries = _config->FindI("Acquire::Retries",0);
@@ -2270,7 +2653,11 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
if ((Size > 0) && (unsigned long long)Buf.st_size > Size)
unlink(DestFile.c_str());
else
+ {
PartialSize = Buf.st_size;
+ std::string SandboxUser = _config->Find("APT::Sandbox::User");
+ ChangeOwnerAndPermissionOfFile("pkgAcqFile", DestFile.c_str(), SandboxUser.c_str(), "root", 0600);
+ }
}
QueueURI(Desc);
@@ -2279,15 +2666,16 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
// AcqFile::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash,
+void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList const &CalcHashes,
pkgAcquire::MethodConfig *Cnf)
{
- Item::Done(Message,Size,CalcHash,Cnf);
+ Item::Done(Message,Size,CalcHashes,Cnf);
// Check the hash
- if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
+ if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes)
{
RenameOnError(HashSumMismatch);
+ printHashSumComparision(DestFile, ExpectedHashes, CalcHashes);
return;
}
@@ -2358,7 +2746,7 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqFile::Custom600Headers()
+string pkgAcqFile::Custom600Headers() const
{
if (IsIndexFile)
return "\nIndex-File: true";
diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h
index 384c5ee2b..68d5a01ce 100644
--- a/apt-pkg/acquire-item.h
+++ b/apt-pkg/acquire-item.h
@@ -46,6 +46,8 @@
class indexRecords;
class pkgRecords;
class pkgSourceList;
+class IndexTarget;
+class pkgAcqMetaBase;
/** \brief Represents the process by which a pkgAcquire object should {{{
* retrieve a file or a collection of files.
@@ -61,6 +63,8 @@ class pkgSourceList;
*/
class pkgAcquire::Item : public WeakPointable
{
+ void *d;
+
protected:
/** \brief The acquire object with which this item is associated. */
@@ -71,12 +75,11 @@ class pkgAcquire::Item : public WeakPointable
* \param Item Metadata about this item (its URI and
* description).
*/
- inline void QueueURI(ItemDesc &Item)
- {Owner->Enqueue(Item);};
+ void QueueURI(ItemDesc &Item);
/** \brief Remove this item from its owner's queue. */
- inline void Dequeue() {Owner->Dequeue(this);};
-
+ void Dequeue();
+
/** \brief Rename a file without modifying its timestamp.
*
* Many item methods call this as their final action.
@@ -86,7 +89,7 @@ class pkgAcquire::Item : public WeakPointable
* \param To The new name of \a From. If \a To exists it will be
* overwritten.
*/
- void Rename(std::string From,std::string To);
+ bool Rename(std::string From,std::string To);
public:
@@ -115,7 +118,7 @@ class pkgAcquire::Item : public WeakPointable
/** \brief The item was could not be downloaded because of
* a transient network error (e.g. network down)
*/
- StatTransientNetworkError
+ StatTransientNetworkError,
} Status;
/** \brief Contains a textual description of the error encountered
@@ -132,7 +135,12 @@ class pkgAcquire::Item : public WeakPointable
/** \brief If not \b NULL, contains the name of a subprocess that
* is operating on this object (for instance, "gzip" or "gpgv").
*/
- const char *Mode;
+ APT_DEPRECATED const char *Mode;
+
+ /** \brief contains the name of the subprocess that is operating on this object
+ * (for instance, "gzip", "rred" or "gpgv"). This is obsoleting #Mode from above
+ * as it can manage the lifetime of included string properly. */
+ std::string ActiveSubprocess;
/** \brief A client-supplied unique identifier.
*
@@ -166,12 +174,28 @@ class pkgAcquire::Item : public WeakPointable
* \sa pkgAcquire
*/
unsigned int QueueCounter;
+
+ /** \brief TransactionManager */
+ pkgAcqMetaBase *TransactionManager;
+
+ /** \brief The number of additional fetch items that are expected
+ * once this item is done.
+ *
+ * Some items like pkgAcqMeta{Index,Sig} will queue additional
+ * items. This variable can be set by the methods if it knows
+ * in advance how many items to expect to get a more accurate
+ * progress.
+ */
+ unsigned int ExpectedAdditionalItems;
/** \brief The name of the file into which the retrieved object
* will be written.
*/
std::string DestFile;
+ /** \brief storge name until a transaction is finished */
+ std::string PartialFile;
+
/** \brief Invoked by the acquire worker when the object couldn't
* be fetched.
*
@@ -201,12 +225,12 @@ class pkgAcquire::Item : public WeakPointable
* \param Message Data from the acquire method. Use LookupTag()
* to parse it.
* \param Size The size of the object that was fetched.
- * \param Hash The HashSum of the object that was fetched.
+ * \param Hashes The HashSums of the object that was fetched.
* \param Cnf The method via which the object was fetched.
*
* \sa pkgAcqMethod
*/
- virtual void Done(std::string Message,unsigned long long Size,std::string Hash,
+ virtual void Done(std::string Message, unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
/** \brief Invoked when the worker starts to fetch this object.
@@ -228,7 +252,7 @@ class pkgAcquire::Item : public WeakPointable
* line, so they should (if nonempty) have a leading newline and
* no trailing newline.
*/
- virtual std::string Custom600Headers() {return std::string();};
+ virtual std::string Custom600Headers() const {return std::string();};
/** \brief A "descriptive" URI-like string.
*
@@ -244,20 +268,20 @@ class pkgAcquire::Item : public WeakPointable
/** \brief Invoked by the worker when the download is completely done. */
virtual void Finished() {};
- /** \brief HashSum
+ /** \brief HashSums
*
- * \return the HashSum of this object, if applicable; otherwise, an
- * empty string.
+ * \return the HashSums of this object, if applicable; otherwise, an
+ * empty list.
*/
- virtual std::string HashSum() {return std::string();};
+ HashStringList HashSums() const {return ExpectedHashes;};
+ std::string HashSum() const {HashStringList const hashes = HashSums(); HashString const * const hs = hashes.find(NULL); return hs != NULL ? hs->toStr() : ""; };
/** \return the acquire process with which this item is associated. */
- pkgAcquire *GetOwner() {return Owner;};
+ pkgAcquire *GetOwner() const {return Owner;};
/** \return \b true if this object is being fetched from a trusted source. */
- virtual bool IsTrusted() {return false;};
+ virtual bool IsTrusted() const {return false;};
- // report mirror problems
/** \brief Report mirror problem
*
* This allows reporting mirror failures back to a centralized
@@ -267,6 +291,11 @@ class pkgAcquire::Item : public WeakPointable
*/
void ReportMirrorFailure(std::string FailCode);
+ /** \brief Set the name of the current active subprocess
+ *
+ * See also #ActiveSubprocess
+ */
+ void SetActiveSubprocess(const std::string &subprocess);
/** \brief Initialize an item.
*
@@ -274,12 +303,12 @@ class pkgAcquire::Item : public WeakPointable
* process, but does not place it into any fetch queues (you must
* manually invoke QueueURI() to do so).
*
- * Initializes all fields of the item other than Owner to 0,
- * false, or the empty string.
- *
* \param Owner The new owner of this item.
+ * \param ExpectedHashes of the file represented by this item
*/
- Item(pkgAcquire *Owner);
+ Item(pkgAcquire *Owner,
+ HashStringList const &ExpectedHashes=HashStringList(),
+ pkgAcqMetaBase *TransactionManager=NULL);
/** \brief Remove this item from its owner's queue by invoking
* pkgAcquire::Remove.
@@ -291,7 +320,9 @@ class pkgAcquire::Item : public WeakPointable
enum RenameOnErrorState {
HashSumMismatch,
SizeMismatch,
- InvalidFormat
+ InvalidFormat,
+ SignatureError,
+ NotClearsigned,
};
/** \brief Rename failed file and set error
@@ -299,6 +330,12 @@ class pkgAcquire::Item : public WeakPointable
* \param state respresenting the error we encountered
*/
bool RenameOnError(RenameOnErrorState const state);
+
+ /** \brief The HashSums of the item is supposed to have than done */
+ HashStringList ExpectedHashes;
+
+ /** \brief The item that is currently being downloaded. */
+ pkgAcquire::ItemDesc Desc;
};
/*}}}*/
/** \brief Information about an index patch (aka diff). */ /*{{{*/
@@ -306,55 +343,301 @@ struct DiffInfo {
/** The filename of the diff. */
std::string file;
- /** The sha1 hash of the diff. */
- std::string sha1;
+ /** The hashes of the diff */
+ HashStringList result_hashes;
+
+ /** The hashes of the file after the diff is applied */
+ HashStringList patch_hashes;
+
+ /** The size of the file after the diff is applied */
+ unsigned long long result_size;
- /** The size of the diff. */
- unsigned long size;
+ /** The size of the diff itself */
+ unsigned long long patch_size;
};
/*}}}*/
-/** \brief An item that is responsible for fetching a SubIndex {{{
- *
- * The MetaIndex file includes only records for important indexes
- * and records for these SubIndex files so these can carry records
- * for addition files like PDiffs and Translations
- */
-class pkgAcqSubIndex : public pkgAcquire::Item
+ /*}}}*/
+
+class pkgAcqMetaBase : public pkgAcquire::Item
{
+ void *d;
+
protected:
- /** \brief If \b true, debugging information will be written to std::clog. */
- bool Debug;
+ std::vector<Item*> Transaction;
- /** \brief The item that is currently being downloaded. */
- pkgAcquire::ItemDesc Desc;
+ /** \brief A package-system-specific parser for the meta-index file. */
+ indexRecords *MetaIndexParser;
- /** \brief The Hash that this file should have after download
+ /** \brief The index files which should be looked up in the meta-index
+ * and then downloaded.
*/
- HashString ExpectedHash;
+ const std::vector<IndexTarget*>* IndexTargets;
- public:
- // Specialized action members
- virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
- pkgAcquire::MethodConfig *Cnf);
- virtual std::string DescURI() {return Desc.URI;};
- virtual std::string Custom600Headers();
- virtual bool ParseIndex(std::string const &IndexFile);
+ /** \brief If \b true, the index's signature is currently being verified.
+ */
+ bool AuthPass;
- /** \brief Create a new pkgAcqSubIndex.
+ // required to deal gracefully with problems caused by incorrect ims hits
+ bool IMSHit;
+
+ /** \brief Starts downloading the individual index files.
*
- * \param Owner The Acquire object that owns this item.
+ * \param verify If \b true, only indices whose expected hashsum
+ * can be determined from the meta-index will be downloaded, and
+ * the hashsums of indices will be checked (reporting
+ * #StatAuthError if there is a mismatch). If verify is \b false,
+ * no hashsum checking will be performed.
+ */
+ void QueueIndexes(bool verify);
+
+ /** \brief Called when a file is finished being retrieved.
*
- * \param URI The URI of the list file to download.
+ * If the file was not downloaded to DestFile, a copy process is
+ * set up to copy it to DestFile; otherwise, Complete is set to \b
+ * true and the file is moved to its final location.
*
- * \param URIDesc A long description of the list file to download.
+ * \param Message The message block received from the fetch
+ * subprocess.
+ */
+ bool CheckDownloadDone(const std::string &Message,
+ const std::string &RealURI);
+
+ /** \brief Queue the downloaded Signature for verification */
+ void QueueForSignatureVerify(const std::string &MetaIndexFile,
+ const std::string &MetaIndexFileSignature);
+
+ /** \brief get the custom600 header for all pkgAcqMeta */
+ std::string GetCustom600Headers(const std::string &RealURI) const;
+
+ /** \brief Called when authentication succeeded.
*
- * \param ShortDesc A short description of the list file to download.
+ * Sanity-checks the authenticated file, queues up the individual
+ * index files for download, and saves the signature in the lists
+ * directory next to the authenticated list file.
+ *
+ * \param Message The message block received from the fetch
+ * subprocess.
+ */
+ bool CheckAuthDone(std::string Message, const std::string &RealURI);
+
+ /** Check if the current item should fail at this point */
+ bool CheckStopAuthentication(const std::string &RealURI,
+ const std::string &Message);
+
+ /** \brief Check that the release file is a release file for the
+ * correct distribution.
*
- * \param ExpectedHash The list file's MD5 signature.
+ * \return \b true if no fatal errors were encountered.
+ */
+ bool VerifyVendor(std::string Message, const std::string &RealURI);
+
+ public:
+ // transaction code
+ void Add(Item *I);
+ void AbortTransaction();
+ bool TransactionHasError() APT_PURE;
+ void CommitTransaction();
+
+ /** \brief Stage (queue) a copy action when the transaction is commited
+ */
+ void TransactionStageCopy(Item *I,
+ const std::string &From,
+ const std::string &To);
+ /** \brief Stage (queue) a removal action when the transaction is commited
+ */
+ void TransactionStageRemoval(Item *I, const std::string &FinalFile);
+
+ pkgAcqMetaBase(pkgAcquire *Owner,
+ const std::vector<IndexTarget*>* IndexTargets,
+ indexRecords* MetaIndexParser,
+ HashStringList const &ExpectedHashes=HashStringList(),
+ pkgAcqMetaBase *TransactionManager=NULL)
+ : Item(Owner, ExpectedHashes, TransactionManager),
+ MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets),
+ AuthPass(false), IMSHit(false) {};
+};
+
+/** \brief An acquire item that downloads the detached signature {{{
+ * of a meta-index (Release) file, then queues up the release
+ * file itself.
+ *
+ * \todo Why protected members?
+ *
+ * \sa pkgAcqMetaIndex
+ */
+class pkgAcqMetaSig : public pkgAcqMetaBase
+{
+ void *d;
+
+ protected:
+
+ /** \brief The URI of the signature file. Unlike Desc.URI, this is
+ * never modified; it is used to determine the file that is being
+ * downloaded.
+ */
+ std::string RealURI;
+
+ /** \brief The file we need to verify */
+ std::string MetaIndexFile;
+
+ /** \brief The file we use to verify the MetaIndexFile with */
+ std::string MetaIndexFileSignature;
+
+ /** \brief Long URI description used in the acquire system */
+ std::string URIDesc;
+
+ /** \brief Short URI description used in the acquire system */
+ std::string ShortDesc;
+
+ public:
+
+ // Specialized action members
+ virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
+ virtual void Done(std::string Message,unsigned long long Size,
+ HashStringList const &Hashes,
+ pkgAcquire::MethodConfig *Cnf);
+ virtual std::string Custom600Headers() const;
+ virtual std::string DescURI() {return RealURI; };
+
+ /** \brief Create a new pkgAcqMetaSig. */
+ pkgAcqMetaSig(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
+ std::string URI,std::string URIDesc, std::string ShortDesc,
+ std::string MetaIndexFile,
+ const std::vector<IndexTarget*>* IndexTargets,
+ indexRecords* MetaIndexParser);
+ virtual ~pkgAcqMetaSig();
+};
+ /*}}}*/
+
+/** \brief An item that is responsible for downloading the meta-index {{{
+ * file (i.e., Release) itself and verifying its signature.
+ *
+ * Once the download and verification are complete, the downloads of
+ * the individual index files are queued up using pkgAcqDiffIndex.
+ * If the meta-index file had a valid signature, the expected hashsums
+ * of the index files will be the md5sums listed in the meta-index;
+ * otherwise, the expected hashsums will be "" (causing the
+ * authentication of the index files to be bypassed).
+ */
+class pkgAcqMetaIndex : public pkgAcqMetaBase
+{
+ void *d;
+
+ protected:
+ /** \brief The URI that is actually being downloaded; never
+ * modified by pkgAcqMetaIndex.
*/
- pkgAcqSubIndex(pkgAcquire *Owner, std::string const &URI,std::string const &URIDesc,
- std::string const &ShortDesc, HashString const &ExpectedHash);
+ std::string RealURI;
+
+ std::string URIDesc;
+ std::string ShortDesc;
+
+ /** \brief The URI of the meta-index file for the detached signature */
+ std::string MetaIndexSigURI;
+
+ /** \brief A "URI-style" description of the meta-index file */
+ std::string MetaIndexSigURIDesc;
+
+ /** \brief A brief description of the meta-index file */
+ std::string MetaIndexSigShortDesc;
+
+ /** \brief delayed constructor */
+ void Init(std::string URIDesc, std::string ShortDesc);
+
+ public:
+
+ // Specialized action members
+ virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
+ pkgAcquire::MethodConfig *Cnf);
+ virtual std::string Custom600Headers() const;
+ virtual std::string DescURI() {return RealURI; };
+ virtual void Finished();
+
+ /** \brief Create a new pkgAcqMetaIndex. */
+ pkgAcqMetaIndex(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
+ std::string URI,std::string URIDesc, std::string ShortDesc,
+ std::string MetaIndexSigURI, std::string MetaIndexSigURIDesc, std::string MetaIndexSigShortDesc,
+ const std::vector<IndexTarget*>* IndexTargets,
+ indexRecords* MetaIndexParser);
+};
+ /*}}}*/
+/** \brief An item repsonsible for downloading clearsigned metaindexes {{{*/
+class pkgAcqMetaClearSig : public pkgAcqMetaIndex
+{
+ void *d;
+
+ /** \brief The URI of the meta-index file for the detached signature */
+ std::string MetaIndexURI;
+
+ /** \brief A "URI-style" description of the meta-index file */
+ std::string MetaIndexURIDesc;
+
+ /** \brief A brief description of the meta-index file */
+ std::string MetaIndexShortDesc;
+
+ /** \brief The URI of the detached meta-signature file if the clearsigned one failed. */
+ std::string MetaSigURI;
+
+ /** \brief A "URI-style" description of the meta-signature file */
+ std::string MetaSigURIDesc;
+
+ /** \brief A brief description of the meta-signature file */
+ std::string MetaSigShortDesc;
+
+public:
+ virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
+ virtual std::string Custom600Headers() const;
+ virtual void Done(std::string Message,unsigned long long Size,
+ HashStringList const &Hashes,
+ pkgAcquire::MethodConfig *Cnf);
+
+ /** \brief Create a new pkgAcqMetaClearSig. */
+ pkgAcqMetaClearSig(pkgAcquire *Owner,
+ std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc,
+ std::string const &MetaIndexURI, std::string const &MetaIndexURIDesc, std::string const &MetaIndexShortDesc,
+ std::string const &MetaSigURI, std::string const &MetaSigURIDesc, std::string const &MetaSigShortDesc,
+ const std::vector<IndexTarget*>* IndexTargets,
+ indexRecords* MetaIndexParser);
+ virtual ~pkgAcqMetaClearSig();
+};
+ /*}}}*/
+
+
+/** \brief Common base class for all classes that deal with fetching {{{
+ indexes
+ */
+class pkgAcqBaseIndex : public pkgAcquire::Item
+{
+ void *d;
+
+ protected:
+ /** \brief Pointer to the IndexTarget data
+ */
+ const struct IndexTarget * Target;
+
+ /** \brief Pointer to the indexRecords parser */
+ indexRecords *MetaIndexParser;
+
+ /** \brief The MetaIndex Key */
+ std::string MetaKey;
+
+ /** \brief The URI of the index file to recreate at our end (either
+ * by downloading it or by applying partial patches).
+ */
+ std::string RealURI;
+
+ bool VerifyHashByMetaKey(HashStringList const &Hashes);
+
+ pkgAcqBaseIndex(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser)
+ : Item(Owner, ExpectedHashes, TransactionManager), Target(Target),
+ MetaIndexParser(MetaIndexParser) {};
};
/*}}}*/
/** \brief An item that is responsible for fetching an index file of {{{
@@ -366,25 +649,14 @@ class pkgAcqSubIndex : public pkgAcquire::Item
*
* \sa pkgAcqIndexDiffs, pkgAcqIndex
*/
-class pkgAcqDiffIndex : public pkgAcquire::Item
+class pkgAcqDiffIndex : public pkgAcqBaseIndex
{
+ void *d;
+
protected:
/** \brief If \b true, debugging information will be written to std::clog. */
bool Debug;
- /** \brief The item that is currently being downloaded. */
- pkgAcquire::ItemDesc Desc;
-
- /** \brief The URI of the index file to recreate at our end (either
- * by downloading it or by applying partial patches).
- */
- std::string RealURI;
-
- /** \brief The Hash that the real index file should have after
- * all patches have been applied.
- */
- HashString ExpectedHash;
-
/** \brief The index file which will be patched to generate the new
* file.
*/
@@ -395,13 +667,17 @@ class pkgAcqDiffIndex : public pkgAcquire::Item
*/
std::string Description;
+ /** \brief If the copy step of the packages file is done
+ */
+ bool PackagesFileReadyInPartial;
+
public:
// Specialized action members
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
virtual std::string DescURI() {return RealURI + "Index";};
- virtual std::string Custom600Headers();
+ virtual std::string Custom600Headers() const;
/** \brief Parse the Index file for a set of Packages diffs.
*
@@ -426,10 +702,13 @@ class pkgAcqDiffIndex : public pkgAcquire::Item
*
* \param ShortDesc A short description of the list file to download.
*
- * \param ExpectedHash The list file's MD5 signature.
+ * \param ExpectedHashes The list file's hashsums which are expected.
*/
- pkgAcqDiffIndex(pkgAcquire *Owner,std::string URI,std::string URIDesc,
- std::string ShortDesc, HashString ExpectedHash);
+ pkgAcqDiffIndex(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser);
};
/*}}}*/
/** \brief An item that is responsible for fetching client-merge patches {{{
@@ -443,8 +722,10 @@ class pkgAcqDiffIndex : public pkgAcquire::Item
*
* \sa pkgAcqDiffIndex, pkgAcqIndex
*/
-class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
+class pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex
{
+ void *d;
+
protected:
/** \brief If \b true, debugging output will be written to
@@ -452,21 +733,6 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
*/
bool Debug;
- /** \brief description of the item that is currently being
- * downloaded.
- */
- pkgAcquire::ItemDesc Desc;
-
- /** \brief URI of the package index file that is being
- * reconstructed.
- */
- std::string RealURI;
-
- /** \brief HashSum of the package index file that is being
- * reconstructed.
- */
- HashString ExpectedHash;
-
/** \brief description of the file being downloaded. */
std::string Description;
@@ -499,9 +765,8 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
* outright; its arguments are ignored.
*/
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
-
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
- pkgAcquire::MethodConfig *Cnf);
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
+ pkgAcquire::MethodConfig *Cnf);
virtual std::string DescURI() {return RealURI + "Index";};
/** \brief Create an index merge-diff item.
@@ -515,7 +780,7 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
*
* \param ShortDesc A brief description of this item.
*
- * \param ExpectedHash The expected md5sum of the completely
+ * \param ExpectedHashes The expected md5sum of the completely
* reconstructed package index file; the index file will be tested
* against this value when it is entirely reconstructed.
*
@@ -525,9 +790,13 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
* \param allPatches contains all related items so that each item can
* check if it was the last one to complete the download step
*/
- pkgAcqIndexMergeDiffs(pkgAcquire *Owner,std::string const &URI,std::string const &URIDesc,
- std::string const &ShortDesc, HashString const &ExpectedHash,
- DiffInfo const &patch, std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches);
+ pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHash,
+ indexRecords *MetaIndexParser,
+ DiffInfo const &patch,
+ std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches);
};
/*}}}*/
/** \brief An item that is responsible for fetching server-merge patches {{{
@@ -541,8 +810,10 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
*
* \sa pkgAcqDiffIndex, pkgAcqIndex
*/
-class pkgAcqIndexDiffs : public pkgAcquire::Item
+class pkgAcqIndexDiffs : public pkgAcqBaseIndex
{
+ void *d;
+
private:
/** \brief Queue up the next diff download.
@@ -554,20 +825,20 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
* \return \b true if an applicable diff was found, \b false
* otherwise.
*/
- bool QueueNextDiff();
+ APT_HIDDEN bool QueueNextDiff();
/** \brief Handle tasks that must be performed after the item
* finishes downloading.
*
- * Dequeues the item and checks the resulting file's md5sum
- * against ExpectedHash after the last patch was applied.
+ * Dequeues the item and checks the resulting file's hashsums
+ * against ExpectedHashes after the last patch was applied.
* There is no need to check the md5/sha1 after a "normal"
* patch because QueueNextDiff() will check the sha1 later.
*
* \param allDone If \b true, the file was entirely reconstructed,
* and its md5sum is verified.
*/
- void Finish(bool allDone=false);
+ APT_HIDDEN void Finish(bool allDone=false);
protected:
@@ -576,21 +847,6 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
*/
bool Debug;
- /** \brief A description of the item that is currently being
- * downloaded.
- */
- pkgAcquire::ItemDesc Desc;
-
- /** \brief The URI of the package index file that is being
- * reconstructed.
- */
- std::string RealURI;
-
- /** \brief The HashSum of the package index file that is being
- * reconstructed.
- */
- HashString ExpectedHash;
-
/** A description of the file being downloaded. */
std::string Description;
@@ -604,9 +860,6 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
*/
std::vector<DiffInfo> available_patches;
- /** Stop applying patches when reaching that sha1 */
- std::string ServerSha1;
-
/** The current status of this patch. */
enum DiffState
{
@@ -632,9 +885,9 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
*/
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string DescURI() {return RealURI + "Index";};
+ virtual std::string DescURI() {return RealURI + "IndexDiffs";};
/** \brief Create an index diff item.
*
@@ -650,19 +903,19 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
*
* \param ShortDesc A brief description of this item.
*
- * \param ExpectedHash The expected md5sum of the completely
+ * \param ExpectedHashes The expected hashsums of the completely
* reconstructed package index file; the index file will be tested
* against this value when it is entirely reconstructed.
*
- * \param ServerSha1 is the sha1sum of the current file on the server
- *
* \param diffs The remaining diffs from the index of diffs. They
* should be ordered so that each diff appears before any diff
* that depends on it.
*/
- pkgAcqIndexDiffs(pkgAcquire *Owner,std::string URI,std::string URIDesc,
- std::string ShortDesc, HashString ExpectedHash,
- std::string ServerSha1,
+ pkgAcqIndexDiffs(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHash,
+ indexRecords *MetaIndexParser,
std::vector<DiffInfo> diffs=std::vector<DiffInfo>());
};
/*}}}*/
@@ -673,56 +926,73 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item
*
* \todo Why does pkgAcqIndex have protected members?
*/
-class pkgAcqIndex : public pkgAcquire::Item
+class pkgAcqIndex : public pkgAcqBaseIndex
{
- protected:
+ void *d;
- /** \brief If \b true, the index file has been decompressed. */
- bool Decompression;
+ protected:
- /** \brief If \b true, the partially downloaded file will be
- * removed when the download completes.
+ /** \brief The stages the method goes through
+ *
+ * The method first downloads the indexfile, then its decompressed (or
+ * copied) and verified
*/
- bool Erase;
+ enum AllStages {
+ STAGE_DOWNLOAD,
+ STAGE_DECOMPRESS_AND_VERIFY,
+ };
+ AllStages Stage;
- // Unused, used to be used to verify that "Packages: " header was there
- bool __DELME_ON_NEXT_ABI_BREAK_Verify;
+ /** \brief Handle what needs to be done when the download is done */
+ void StageDownloadDone(std::string Message,
+ HashStringList const &Hashes,
+ pkgAcquire::MethodConfig *Cfg);
- /** \brief The download request that is currently being
- * processed.
+ /** \brief Handle what needs to be done when the decompression/copy is
+ * done
*/
- pkgAcquire::ItemDesc Desc;
+ void StageDecompressDone(std::string Message,
+ HashStringList const &Hashes,
+ pkgAcquire::MethodConfig *Cfg);
- /** \brief The object that is actually being fetched (minus any
- * compression-related extensions).
+ /** \brief If \b set, this partially downloaded file will be
+ * removed when the download completes.
*/
- std::string RealURI;
-
- /** \brief The expected hashsum of the decompressed index file. */
- HashString ExpectedHash;
+ std::string EraseFileName;
/** \brief The compression-related file extensions that are being
* added to the downloaded file one by one if first fails (e.g., "gz bz2").
*/
- std::string CompressionExtension;
+ std::string CompressionExtensions;
+
+ /** \brief The actual compression extension currently used */
+ std::string CurrentCompressionExtension;
+
+ /** \brief Do the changes needed to fetch via AptByHash (if needed) */
+ void InitByHashIfNeeded(const std::string MetaKey);
+
+ /** \brief Auto select the right compression to use */
+ void AutoSelectCompression();
- /** \brief Get the full pathname of the final file for the given URI
+ /** \brief Get the full pathname of the final file for the current URI
*/
- std::string GetFinalFilename(std::string const &URI,
- std::string const &compExt);
+ std::string GetFinalFilename() const;
/** \brief Schedule file for verification after a IMS hit */
- void ReverifyAfterIMS(std::string const &FileName);
+ void ReverifyAfterIMS();
+
+ /** \brief Validate the downloaded index file */
+ bool ValidateFile(const std::string &FileName);
public:
// Specialized action members
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
+ virtual void Done(std::string Message,unsigned long long Size,
+ HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
+ virtual std::string Custom600Headers() const;
virtual std::string DescURI() {return Desc.URI;};
- virtual std::string HashSum() {return ExpectedHash.toStr(); };
/** \brief Create a pkgAcqIndex.
*
@@ -735,7 +1005,7 @@ class pkgAcqIndex : public pkgAcquire::Item
*
* \param ShortDesc A brief description of this index file.
*
- * \param ExpectedHash The expected hashsum of this index file.
+ * \param ExpectedHashes The expected hashsum of this index file.
*
* \param compressExt The compression-related extension with which
* this index file should be downloaded, or "" to autodetect
@@ -744,11 +1014,14 @@ class pkgAcqIndex : public pkgAcquire::Item
* fallback is ".gz" or none.
*/
pkgAcqIndex(pkgAcquire *Owner,std::string URI,std::string URIDesc,
- std::string ShortDesc, HashString ExpectedHash,
- std::string compressExt="");
- pkgAcqIndex(pkgAcquire *Owner, struct IndexTarget const * const Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser);
- void Init(std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc);
+ std::string ShortDesc, HashStringList const &ExpectedHashes);
+ pkgAcqIndex(pkgAcquire *Owner, pkgAcqMetaBase *TransactionManager,
+ IndexTarget const * const Target,
+ HashStringList const &ExpectedHash,
+ indexRecords *MetaIndexParser);
+
+ void Init(std::string const &URI, std::string const &URIDesc,
+ std::string const &ShortDesc);
};
/*}}}*/
/** \brief An acquire item that is responsible for fetching a {{{
@@ -760,10 +1033,12 @@ class pkgAcqIndex : public pkgAcquire::Item
*/
class pkgAcqIndexTrans : public pkgAcqIndex
{
+ void *d;
+
public:
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
+ virtual std::string Custom600Headers() const;
/** \brief Create a pkgAcqIndexTrans.
*
@@ -776,15 +1051,21 @@ class pkgAcqIndexTrans : public pkgAcqIndex
*
* \param ShortDesc A brief description of this index file.
*/
- pkgAcqIndexTrans(pkgAcquire *Owner,std::string URI,std::string URIDesc,
+ pkgAcqIndexTrans(pkgAcquire *Owner,
+ std::string URI,std::string URIDesc,
std::string ShortDesc);
- pkgAcqIndexTrans(pkgAcquire *Owner, struct IndexTarget const * const Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser);
+ pkgAcqIndexTrans(pkgAcquire *Owner,
+ pkgAcqMetaBase *TransactionManager,
+ IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser);
};
/*}}}*/
/** \brief Information about an index file. */ /*{{{*/
class IndexTarget
{
+ void *d;
+
public:
/** \brief A URI from which the index file can be downloaded. */
std::string URI;
@@ -803,230 +1084,18 @@ class IndexTarget
virtual bool IsOptional() const {
return false;
}
- virtual bool IsSubIndex() const {
- return false;
- }
};
/*}}}*/
/** \brief Information about an optional index file. */ /*{{{*/
class OptionalIndexTarget : public IndexTarget
{
+ void *d;
+
virtual bool IsOptional() const {
return true;
}
};
/*}}}*/
-/** \brief Information about an subindex index file. */ /*{{{*/
-class SubIndexTarget : public IndexTarget
-{
- virtual bool IsSubIndex() const {
- return true;
- }
-};
- /*}}}*/
-/** \brief Information about an subindex index file. */ /*{{{*/
-class OptionalSubIndexTarget : public OptionalIndexTarget
-{
- virtual bool IsSubIndex() const {
- return true;
- }
-};
- /*}}}*/
-
-/** \brief An acquire item that downloads the detached signature {{{
- * of a meta-index (Release) file, then queues up the release
- * file itself.
- *
- * \todo Why protected members?
- *
- * \sa pkgAcqMetaIndex
- */
-class pkgAcqMetaSig : public pkgAcquire::Item
-{
- protected:
- /** \brief The last good signature file */
- std::string LastGoodSig;
-
- /** \brief The fetch request that is currently being processed. */
- pkgAcquire::ItemDesc Desc;
-
- /** \brief The URI of the signature file. Unlike Desc.URI, this is
- * never modified; it is used to determine the file that is being
- * downloaded.
- */
- std::string RealURI;
-
- /** \brief The URI of the meta-index file to be fetched after the signature. */
- std::string MetaIndexURI;
-
- /** \brief A "URI-style" description of the meta-index file to be
- * fetched after the signature.
- */
- std::string MetaIndexURIDesc;
-
- /** \brief A brief description of the meta-index file to be fetched
- * after the signature.
- */
- std::string MetaIndexShortDesc;
-
- /** \brief A package-system-specific parser for the meta-index file. */
- indexRecords* MetaIndexParser;
-
- /** \brief The index files which should be looked up in the meta-index
- * and then downloaded.
- *
- * \todo Why a list of pointers instead of a list of structs?
- */
- const std::vector<struct IndexTarget*>* IndexTargets;
-
- public:
-
- // Specialized action members
- virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
- pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
- virtual std::string DescURI() {return RealURI; };
-
- /** \brief Create a new pkgAcqMetaSig. */
- pkgAcqMetaSig(pkgAcquire *Owner,std::string URI,std::string URIDesc, std::string ShortDesc,
- std::string MetaIndexURI, std::string MetaIndexURIDesc, std::string MetaIndexShortDesc,
- const std::vector<struct IndexTarget*>* IndexTargets,
- indexRecords* MetaIndexParser);
- virtual ~pkgAcqMetaSig();
-};
- /*}}}*/
-/** \brief An item that is responsible for downloading the meta-index {{{
- * file (i.e., Release) itself and verifying its signature.
- *
- * Once the download and verification are complete, the downloads of
- * the individual index files are queued up using pkgAcqDiffIndex.
- * If the meta-index file had a valid signature, the expected hashsums
- * of the index files will be the md5sums listed in the meta-index;
- * otherwise, the expected hashsums will be "" (causing the
- * authentication of the index files to be bypassed).
- */
-class pkgAcqMetaIndex : public pkgAcquire::Item
-{
- protected:
- /** \brief The fetch command that is currently being processed. */
- pkgAcquire::ItemDesc Desc;
-
- /** \brief The URI that is actually being downloaded; never
- * modified by pkgAcqMetaIndex.
- */
- std::string RealURI;
-
- /** \brief The file in which the signature for this index was stored.
- *
- * If empty, the signature and the md5sums of the individual
- * indices will not be checked.
- */
- std::string SigFile;
-
- /** \brief The index files to download. */
- const std::vector<struct IndexTarget*>* IndexTargets;
-
- /** \brief The parser for the meta-index file. */
- indexRecords* MetaIndexParser;
-
- /** \brief If \b true, the index's signature is currently being verified.
- */
- bool AuthPass;
- // required to deal gracefully with problems caused by incorrect ims hits
- bool IMSHit;
-
- /** \brief Check that the release file is a release file for the
- * correct distribution.
- *
- * \return \b true if no fatal errors were encountered.
- */
- bool VerifyVendor(std::string Message);
-
- /** \brief Called when a file is finished being retrieved.
- *
- * If the file was not downloaded to DestFile, a copy process is
- * set up to copy it to DestFile; otherwise, Complete is set to \b
- * true and the file is moved to its final location.
- *
- * \param Message The message block received from the fetch
- * subprocess.
- */
- void RetrievalDone(std::string Message);
-
- /** \brief Called when authentication succeeded.
- *
- * Sanity-checks the authenticated file, queues up the individual
- * index files for download, and saves the signature in the lists
- * directory next to the authenticated list file.
- *
- * \param Message The message block received from the fetch
- * subprocess.
- */
- void AuthDone(std::string Message);
-
- /** \brief Starts downloading the individual index files.
- *
- * \param verify If \b true, only indices whose expected hashsum
- * can be determined from the meta-index will be downloaded, and
- * the hashsums of indices will be checked (reporting
- * #StatAuthError if there is a mismatch). If verify is \b false,
- * no hashsum checking will be performed.
- */
- void QueueIndexes(bool verify);
-
- public:
-
- // Specialized action members
- virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size, std::string Hash,
- pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
- virtual std::string DescURI() {return RealURI; };
-
- /** \brief Create a new pkgAcqMetaIndex. */
- pkgAcqMetaIndex(pkgAcquire *Owner,
- std::string URI,std::string URIDesc, std::string ShortDesc,
- std::string SigFile,
- const std::vector<struct IndexTarget*>* IndexTargets,
- indexRecords* MetaIndexParser);
-};
- /*}}}*/
-/** \brief An item repsonsible for downloading clearsigned metaindexes {{{*/
-class pkgAcqMetaClearSig : public pkgAcqMetaIndex
-{
- /** \brief The URI of the meta-index file for the detached signature */
- std::string MetaIndexURI;
-
- /** \brief A "URI-style" description of the meta-index file */
- std::string MetaIndexURIDesc;
-
- /** \brief A brief description of the meta-index file */
- std::string MetaIndexShortDesc;
-
- /** \brief The URI of the detached meta-signature file if the clearsigned one failed. */
- std::string MetaSigURI;
-
- /** \brief A "URI-style" description of the meta-signature file */
- std::string MetaSigURIDesc;
-
- /** \brief A brief description of the meta-signature file */
- std::string MetaSigShortDesc;
-
-public:
- void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual std::string Custom600Headers();
-
- /** \brief Create a new pkgAcqMetaClearSig. */
- pkgAcqMetaClearSig(pkgAcquire *Owner,
- std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc,
- std::string const &MetaIndexURI, std::string const &MetaIndexURIDesc, std::string const &MetaIndexShortDesc,
- std::string const &MetaSigURI, std::string const &MetaSigURIDesc, std::string const &MetaSigShortDesc,
- const std::vector<struct IndexTarget*>* IndexTargets,
- indexRecords* MetaIndexParser);
- virtual ~pkgAcqMetaClearSig();
-};
- /*}}}*/
/** \brief An item that is responsible for fetching a package file. {{{
*
* If the package file already exists in the cache, nothing will be
@@ -1034,13 +1103,12 @@ public:
*/
class pkgAcqArchive : public pkgAcquire::Item
{
+ void *d;
+
protected:
/** \brief The package version being fetched. */
pkgCache::VerIterator Version;
- /** \brief The fetch command that is currently being processed. */
- pkgAcquire::ItemDesc Desc;
-
/** \brief The list of sources from which to pick archives to
* download this package from.
*/
@@ -1051,9 +1119,6 @@ class pkgAcqArchive : public pkgAcquire::Item
*/
pkgRecords *Recs;
- /** \brief The hashsum of this package. */
- HashString ExpectedHash;
-
/** \brief A location in which the actual filename of the package
* should be stored.
*/
@@ -1080,13 +1145,12 @@ class pkgAcqArchive : public pkgAcquire::Item
public:
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string Hash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cnf);
virtual std::string DescURI() {return Desc.URI;};
virtual std::string ShortDesc() {return Desc.ShortDesc;};
virtual void Finished();
- virtual std::string HashSum() {return ExpectedHash.toStr(); };
- virtual bool IsTrusted();
+ virtual bool IsTrusted() const;
/** \brief Create a new pkgAcqArchive.
*
@@ -1119,11 +1183,7 @@ class pkgAcqArchive : public pkgAcquire::Item
*/
class pkgAcqFile : public pkgAcquire::Item
{
- /** \brief The currently active download process. */
- pkgAcquire::ItemDesc Desc;
-
- /** \brief The hashsum of the file to download, if it is known. */
- HashString ExpectedHash;
+ void *d;
/** \brief How many times to retry the download, set from
* Acquire::Retries.
@@ -1137,11 +1197,10 @@ class pkgAcqFile : public pkgAcquire::Item
// Specialized action members
virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
- virtual void Done(std::string Message,unsigned long long Size,std::string CalcHash,
+ virtual void Done(std::string Message,unsigned long long Size, HashStringList const &CalcHashes,
pkgAcquire::MethodConfig *Cnf);
virtual std::string DescURI() {return Desc.URI;};
- virtual std::string HashSum() {return ExpectedHash.toStr(); };
- virtual std::string Custom600Headers();
+ virtual std::string Custom600Headers() const;
/** \brief Create a new pkgAcqFile object.
*
@@ -1150,8 +1209,8 @@ class pkgAcqFile : public pkgAcquire::Item
*
* \param URI The URI to download.
*
- * \param Hash The hashsum of the file to download, if it is known;
- * otherwise "".
+ * \param Hashes The hashsums of the file to download, if they are known;
+ * otherwise empty list.
*
* \param Size The size of the file to download, if it is known;
* otherwise 0.
@@ -1174,7 +1233,7 @@ class pkgAcqFile : public pkgAcquire::Item
* is the absolute name to which the file should be downloaded.
*/
- pkgAcqFile(pkgAcquire *Owner, std::string URI, std::string Hash, unsigned long long Size,
+ pkgAcqFile(pkgAcquire *Owner, std::string URI, HashStringList const &Hashes, unsigned long long Size,
std::string Desc, std::string ShortDesc,
const std::string &DestDir="", const std::string &DestFilename="",
bool IsIndexFile=false);
diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc
index 746c553f1..c29ef469e 100644
--- a/apt-pkg/acquire-method.cc
+++ b/apt-pkg/acquire-method.cc
@@ -102,7 +102,10 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
if (Queue != 0)
{
std::cout << "400 URI Failure\nURI: " << Queue->Uri << "\n"
- << "Message: " << Err << " " << IP << "\n";
+ << "Message: " << Err;
+ if (IP.empty() == false && _config->FindB("Acquire::Failure::ShowIP", true) == true)
+ std::cout << " " << IP;
+ std::cout << "\n";
Dequeue();
}
else
@@ -119,6 +122,18 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
std::cout << "\n" << std::flush;
}
/*}}}*/
+// AcqMethod::DropPrivsOrDie - Drop privileges or die /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgAcqMethod::DropPrivsOrDie()
+{
+ if (!DropPrivileges()) {
+ Fail(false);
+ exit(112); /* call the european emergency number */
+ }
+}
+
+ /*}}}*/
// AcqMethod::URIStart - Indicate a download is starting /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -147,6 +162,16 @@ void pkgAcqMethod::URIStart(FetchResult &Res)
// AcqMethod::URIDone - A URI is finished /*{{{*/
// ---------------------------------------------------------------------
/* */
+static void printHashStringList(HashStringList const * const list)
+{
+ for (HashStringList::const_iterator hash = list->begin(); hash != list->end(); ++hash)
+ {
+ // very old compatibility name for MD5Sum
+ if (hash->HashType() == "MD5Sum")
+ std::cout << "MD5-Hash: " << hash->HashValue() << "\n";
+ std::cout << hash->HashType() << "-Hash: " << hash->HashValue() << "\n";
+ }
+}
void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
{
if (Queue == 0)
@@ -164,15 +189,8 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
if (Res.LastModified != 0)
std::cout << "Last-Modified: " << TimeRFC1123(Res.LastModified) << "\n";
- if (Res.MD5Sum.empty() == false)
- std::cout << "MD5-Hash: " << Res.MD5Sum << "\n"
- << "MD5Sum-Hash: " << Res.MD5Sum << "\n";
- if (Res.SHA1Sum.empty() == false)
- std::cout << "SHA1-Hash: " << Res.SHA1Sum << "\n";
- if (Res.SHA256Sum.empty() == false)
- std::cout << "SHA256-Hash: " << Res.SHA256Sum << "\n";
- if (Res.SHA512Sum.empty() == false)
- std::cout << "SHA512-Hash: " << Res.SHA512Sum << "\n";
+ printHashStringList(&Res.Hashes);
+
if (UsedMirror.empty() == false)
std::cout << "UsedMirror: " << UsedMirror << "\n";
if (Res.GPGVOutput.empty() == false)
@@ -200,15 +218,8 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt)
if (Alt->LastModified != 0)
std::cout << "Alt-Last-Modified: " << TimeRFC1123(Alt->LastModified) << "\n";
- if (Alt->MD5Sum.empty() == false)
- std::cout << "Alt-MD5-Hash: " << Alt->MD5Sum << "\n";
- if (Alt->SHA1Sum.empty() == false)
- std::cout << "Alt-SHA1-Hash: " << Alt->SHA1Sum << "\n";
- if (Alt->SHA256Sum.empty() == false)
- std::cout << "Alt-SHA256-Hash: " << Alt->SHA256Sum << "\n";
- if (Alt->SHA512Sum.empty() == false)
- std::cout << "Alt-SHA512-Hash: " << Alt->SHA512Sum << "\n";
-
+ printHashStringList(&Alt->Hashes);
+
if (Alt->IMSHit == true)
std::cout << "Alt-IMS-Hit: true\n";
}
@@ -355,6 +366,17 @@ int pkgAcqMethod::Run(bool Single)
Tmp->LastModified = 0;
Tmp->IndexFile = StringToBool(LookupTag(Message,"Index-File"),false);
Tmp->FailIgnore = StringToBool(LookupTag(Message,"Fail-Ignore"),false);
+ Tmp->ExpectedHashes = HashStringList();
+ for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t)
+ {
+ std::string tag = "Expected-";
+ tag.append(*t);
+ std::string const hash = LookupTag(Message, tag.c_str());
+ if (hash.empty() == false)
+ Tmp->ExpectedHashes.push_back(HashString(*t, hash));
+ }
+ char *End;
+ Tmp->MaximumSize = strtoll(LookupTag(Message, "Maximum-Size", "0").c_str(), &End, 10);
Tmp->Next = 0;
// Append it to the list
@@ -442,12 +464,9 @@ pkgAcqMethod::FetchResult::FetchResult() : LastModified(0),
// ---------------------------------------------------------------------
/* This hides the number of hashes we are supporting from the caller.
It just deals with the hash class. */
-void pkgAcqMethod::FetchResult::TakeHashes(Hashes &Hash)
+void pkgAcqMethod::FetchResult::TakeHashes(class Hashes &Hash)
{
- MD5Sum = Hash.MD5.Result();
- SHA1Sum = Hash.SHA1.Result();
- SHA256Sum = Hash.SHA256.Result();
- SHA512Sum = Hash.SHA512.Result();
+ Hashes = Hash.GetHashStringList();
}
/*}}}*/
void pkgAcqMethod::Dequeue() { /*{{{*/
@@ -458,3 +477,5 @@ void pkgAcqMethod::Dequeue() { /*{{{*/
delete Tmp;
}
/*}}}*/
+
+pkgAcqMethod::~pkgAcqMethod() {}
diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h
index 221ccf273..48bd95672 100644
--- a/apt-pkg/acquire-method.h
+++ b/apt-pkg/acquire-method.h
@@ -20,6 +20,7 @@
#ifndef PKGLIB_ACQUIRE_METHOD_H
#define PKGLIB_ACQUIRE_METHOD_H
+#include <apt-pkg/hashes.h>
#include <apt-pkg/macros.h>
#include <stdarg.h>
@@ -33,7 +34,6 @@
#include <apt-pkg/strutl.h>
#endif
-class Hashes;
class pkgAcqMethod
{
protected:
@@ -47,14 +47,16 @@ class pkgAcqMethod
time_t LastModified;
bool IndexFile;
bool FailIgnore;
+ HashStringList ExpectedHashes;
+ // a maximum size we will download, this can be the exact filesize
+ // for when we know it or a arbitrary limit when we don't know the
+ // filesize (like a InRelease file)
+ unsigned long long MaximumSize;
};
struct FetchResult
{
- std::string MD5Sum;
- std::string SHA1Sum;
- std::string SHA256Sum;
- std::string SHA512Sum;
+ HashStringList Hashes;
std::vector<std::string> GPGVOutput;
time_t LastModified;
bool IMSHit;
@@ -62,7 +64,7 @@ class pkgAcqMethod
unsigned long long Size;
unsigned long long ResumePoint;
- void TakeHashes(Hashes &Hash);
+ void TakeHashes(class Hashes &Hash);
FetchResult();
};
@@ -106,8 +108,8 @@ class pkgAcqMethod
inline void SetIP(std::string aIP) {IP = aIP;};
pkgAcqMethod(const char *Ver,unsigned long Flags = 0);
- virtual ~pkgAcqMethod() {};
-
+ virtual ~pkgAcqMethod();
+ void DropPrivsOrDie();
private:
APT_HIDDEN void Dequeue();
};
diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc
index 047a655ce..64df3c80f 100644
--- a/apt-pkg/acquire-worker.cc
+++ b/apt-pkg/acquire-worker.cc
@@ -326,25 +326,30 @@ bool pkgAcquire::Worker::RunMessages()
Owner->DestFile.c_str(), LookupTag(Message,"Size","0").c_str(),TotalSize);
// see if there is a hash to verify
- string RecivedHash;
- HashString expectedHash(Owner->HashSum());
- if(!expectedHash.empty())
+ HashStringList RecivedHashes;
+ HashStringList expectedHashes = Owner->HashSums();
+ for (HashStringList::const_iterator hs = expectedHashes.begin(); hs != expectedHashes.end(); ++hs)
{
- string hashTag = expectedHash.HashType()+"-Hash";
- string hashSum = LookupTag(Message, hashTag.c_str());
- if(!hashSum.empty())
- RecivedHash = expectedHash.HashType() + ":" + hashSum;
- if(_config->FindB("Debug::pkgAcquire::Auth", false) == true)
- {
- clog << "201 URI Done: " << Owner->DescURI() << endl
- << "RecivedHash: " << RecivedHash << endl
- << "ExpectedHash: " << expectedHash.toStr()
- << endl << endl;
- }
+ std::string const tagname = hs->HashType() + "-Hash";
+ std::string const hashsum = LookupTag(Message, tagname.c_str());
+ if (hashsum.empty() == false)
+ RecivedHashes.push_back(HashString(hs->HashType(), hashsum));
+ }
+
+ if(_config->FindB("Debug::pkgAcquire::Auth", false) == true)
+ {
+ std::clog << "201 URI Done: " << Owner->DescURI() << endl
+ << "RecivedHash:" << endl;
+ for (HashStringList::const_iterator hs = RecivedHashes.begin(); hs != RecivedHashes.end(); ++hs)
+ std::clog << "\t- " << hs->toStr() << std::endl;
+ std::clog << "ExpectedHash:" << endl;
+ for (HashStringList::const_iterator hs = expectedHashes.begin(); hs != expectedHashes.end(); ++hs)
+ std::clog << "\t- " << hs->toStr() << std::endl;
+ std::clog << endl;
}
- Owner->Done(Message, ServerSize, RecivedHash.c_str(), Config);
+ Owner->Done(Message, ServerSize, RecivedHashes, Config);
ItemDone();
-
+
// Log that we are done
if (Log != 0)
{
@@ -366,7 +371,8 @@ bool pkgAcquire::Worker::RunMessages()
{
if (Itm == 0)
{
- _error->Error("Method gave invalid 400 URI Failure message");
+ std::string const msg = LookupTag(Message,"Message");
+ _error->Error("Method gave invalid 400 URI Failure message: %s", msg.c_str());
break;
}
@@ -525,6 +531,15 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item)
Message.reserve(300);
Message += "URI: " + Item->URI;
Message += "\nFilename: " + Item->Owner->DestFile;
+ HashStringList const hsl = Item->Owner->HashSums();
+ for (HashStringList::const_iterator hs = hsl.begin(); hs != hsl.end(); ++hs)
+ Message += "\nExpected-" + hs->HashType() + ": " + hs->HashValue();
+ if(Item->Owner->FileSize > 0)
+ {
+ string MaximumSize;
+ strprintf(MaximumSize, "%llu", Item->Owner->FileSize);
+ Message += "\nMaximum-Size: " + MaximumSize;
+ }
Message += Item->Owner->Custom600Headers();
Message += "\n\n";
diff --git a/apt-pkg/acquire.cc b/apt-pkg/acquire.cc
index 057bc24cd..1aa709381 100644
--- a/apt-pkg/acquire.cc
+++ b/apt-pkg/acquire.cc
@@ -27,15 +27,20 @@
#include <vector>
#include <iostream>
#include <sstream>
+#include <iomanip>
+
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
-
+#include <pwd.h>
+#include <grp.h>
#include <dirent.h>
#include <sys/time.h>
#include <sys/select.h>
#include <errno.h>
+#include <sys/stat.h>
+#include <sys/types.h>
#include <apti18n.h>
/*}}}*/
@@ -55,8 +60,8 @@ pkgAcquire::pkgAcquire() : LockFD(-1), Queues(0), Workers(0), Configs(0), Log(NU
if (strcasecmp(Mode.c_str(),"access") == 0)
QueueMode = QueueAccess;
}
-pkgAcquire::pkgAcquire(pkgAcquireStatus *Progress) : LockFD(-1), Queues(0), Workers(0),
- Configs(0), Log(Progress), ToFetch(0),
+pkgAcquire::pkgAcquire(pkgAcquireStatus *Progress) : LockFD(-1), Queues(0), Workers(0),
+ Configs(0), Log(NULL), ToFetch(0),
Debug(_config->FindB("Debug::pkgAcquire",false)),
Running(false)
{
@@ -65,36 +70,70 @@ pkgAcquire::pkgAcquire(pkgAcquireStatus *Progress) : LockFD(-1), Queues(0), Wor
QueueMode = QueueHost;
if (strcasecmp(Mode.c_str(),"access") == 0)
QueueMode = QueueAccess;
- Setup(Progress, "");
+ SetLog(Progress);
}
/*}}}*/
-// Acquire::Setup - Delayed Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* Do everything needed to be a complete Acquire object and report the
- success (or failure) back so the user knows that something is wrong… */
+// Acquire::GetLock - lock directory and prepare for action /*{{{*/
+static bool SetupAPTPartialDirectory(std::string const &grand, std::string const &parent)
+{
+ std::string const partial = parent + "partial";
+ if (CreateAPTDirectoryIfNeeded(grand, partial) == false &&
+ CreateAPTDirectoryIfNeeded(parent, partial) == false)
+ return false;
+
+ if (getuid() == 0) // if we aren't root, we can't chown, so don't try it
+ {
+ std::string SandboxUser = _config->Find("APT::Sandbox::User");
+ struct passwd *pw = getpwnam(SandboxUser.c_str());
+ struct group *gr = getgrnam("root");
+ if (pw != NULL && gr != NULL && chown(partial.c_str(), pw->pw_uid, gr->gr_gid) != 0)
+ _error->WarningE("SetupAPTPartialDirectory", "chown to %s:root of directory %s failed", SandboxUser.c_str(), partial.c_str());
+ }
+ if (chmod(partial.c_str(), 0700) != 0)
+ _error->WarningE("SetupAPTPartialDirectory", "chmod 0700 of directory %s failed", partial.c_str());
+
+ return true;
+}
bool pkgAcquire::Setup(pkgAcquireStatus *Progress, string const &Lock)
{
Log = Progress;
+ if (Lock.empty())
+ {
+ string const listDir = _config->FindDir("Dir::State::lists");
+ if (SetupAPTPartialDirectory(_config->FindDir("Dir::State"), listDir) == false)
+ return _error->Errno("Acquire", _("List directory %spartial is missing."), listDir.c_str());
+ string const archivesDir = _config->FindDir("Dir::Cache::Archives");
+ if (SetupAPTPartialDirectory(_config->FindDir("Dir::Cache"), archivesDir) == false)
+ return _error->Errno("Acquire", _("Archives directory %spartial is missing."), archivesDir.c_str());
+ return true;
+ }
+ return GetLock(Lock);
+}
+bool pkgAcquire::GetLock(std::string const &Lock)
+{
+ if (Lock.empty() == true)
+ return false;
// check for existence and possibly create auxiliary directories
string const listDir = _config->FindDir("Dir::State::lists");
- string const partialListDir = listDir + "partial/";
string const archivesDir = _config->FindDir("Dir::Cache::Archives");
- string const partialArchivesDir = archivesDir + "partial/";
- if (CreateAPTDirectoryIfNeeded(_config->FindDir("Dir::State"), partialListDir) == false &&
- CreateAPTDirectoryIfNeeded(listDir, partialListDir) == false)
- return _error->Errno("Acquire", _("List directory %spartial is missing."), listDir.c_str());
-
- if (CreateAPTDirectoryIfNeeded(_config->FindDir("Dir::Cache"), partialArchivesDir) == false &&
- CreateAPTDirectoryIfNeeded(archivesDir, partialArchivesDir) == false)
- return _error->Errno("Acquire", _("Archives directory %spartial is missing."), archivesDir.c_str());
+ if (Lock == listDir)
+ {
+ if (SetupAPTPartialDirectory(_config->FindDir("Dir::State"), listDir) == false)
+ return _error->Errno("Acquire", _("List directory %spartial is missing."), listDir.c_str());
+ }
+ if (Lock == archivesDir)
+ {
+ if (SetupAPTPartialDirectory(_config->FindDir("Dir::Cache"), archivesDir) == false)
+ return _error->Errno("Acquire", _("Archives directory %spartial is missing."), archivesDir.c_str());
+ }
- if (Lock.empty() == true || _config->FindB("Debug::NoLocking", false) == true)
+ if (_config->FindB("Debug::NoLocking", false) == true)
return true;
// Lock the directory this acquire object will work in
- LockFD = GetLock(flCombine(Lock, "lock"));
+ LockFD = ::GetLock(flCombine(Lock, "lock"));
if (LockFD == -1)
return _error->Error(_("Unable to lock directory %s"), Lock.c_str());
@@ -580,27 +619,18 @@ pkgAcquire::UriIterator pkgAcquire::UriEnd()
// Acquire::MethodConfig::MethodConfig - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgAcquire::MethodConfig::MethodConfig()
+pkgAcquire::MethodConfig::MethodConfig() : d(NULL), Next(0), SingleInstance(false),
+ Pipeline(false), SendConfig(false), LocalOnly(false), NeedsCleanup(false),
+ Removable(false)
{
- SingleInstance = false;
- Pipeline = false;
- SendConfig = false;
- LocalOnly = false;
- Removable = false;
- Next = 0;
}
/*}}}*/
// Queue::Queue - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgAcquire::Queue::Queue(string Name,pkgAcquire *Owner) : Name(Name),
- Owner(Owner)
+pkgAcquire::Queue::Queue(string Name,pkgAcquire *Owner) : d(NULL), Next(0),
+ Name(Name), Items(0), Workers(0), Owner(Owner), PipeDepth(0), MaxPipeDepth(1)
{
- Items = 0;
- Next = 0;
- Workers = 0;
- MaxPipeDepth = 1;
- PipeDepth = 0;
}
/*}}}*/
// Queue::~Queue - Destructor /*{{{*/
@@ -804,7 +834,7 @@ void pkgAcquire::Queue::Bump()
// AcquireStatus::pkgAcquireStatus - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgAcquireStatus::pkgAcquireStatus() : d(NULL), Update(true), MorePulses(false)
+pkgAcquireStatus::pkgAcquireStatus() : d(NULL), Percent(0), Update(true), MorePulses(false)
{
Start();
}
@@ -824,7 +854,9 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
// Compute the total number of bytes to fetch
unsigned int Unknown = 0;
unsigned int Count = 0;
- for (pkgAcquire::ItemCIterator I = Owner->ItemsBegin(); I != Owner->ItemsEnd();
+ bool UnfetchedReleaseFiles = false;
+ for (pkgAcquire::ItemCIterator I = Owner->ItemsBegin();
+ I != Owner->ItemsEnd();
++I, ++Count)
{
TotalItems++;
@@ -835,6 +867,13 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
if ((*I)->Local == true)
continue;
+ // see if the method tells us to expect more
+ TotalItems += (*I)->ExpectedAdditionalItems;
+
+ // check if there are unfetched Release files
+ if ((*I)->Complete == false && (*I)->ExpectedAdditionalItems > 0)
+ UnfetchedReleaseFiles = true;
+
TotalBytes += (*I)->FileSize;
if ((*I)->Complete == true)
CurrentBytes += (*I)->FileSize;
@@ -846,6 +885,7 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
unsigned long long ResumeSize = 0;
for (pkgAcquire::Worker *I = Owner->WorkersBegin(); I != 0;
I = Owner->WorkerStep(I))
+ {
if (I->CurrentItem != 0 && I->CurrentItem->Owner->Complete == false)
{
CurrentBytes += I->CurrentSize;
@@ -856,6 +896,7 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
I->CurrentItem->Owner->Complete == false)
TotalBytes += I->CurrentSize;
}
+ }
// Normalize the figures and account for unknown size downloads
if (TotalBytes <= 0)
@@ -866,6 +907,12 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
// Wha?! Is not supposed to happen.
if (CurrentBytes > TotalBytes)
CurrentBytes = TotalBytes;
+
+ // debug
+ if (_config->FindB("Debug::acquire::progress", false) == true)
+ std::clog << " Bytes: "
+ << SizeToStr(CurrentBytes) << " / " << SizeToStr(TotalBytes)
+ << std::endl;
// Compute the CPS
struct timeval NewTime;
@@ -886,6 +933,14 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
Time = NewTime;
}
+ // calculate the percentage, if we have too little data assume 1%
+ if (TotalBytes > 0 && UnfetchedReleaseFiles)
+ Percent = 0;
+ else
+ // use both files and bytes because bytes can be unreliable
+ Percent = (0.8 * (CurrentBytes/float(TotalBytes)*100.0) +
+ 0.2 * (CurrentItems/float(TotalItems)*100.0));
+
int fd = _config->FindI("APT::Status-Fd",-1);
if(fd > 0)
{
@@ -903,13 +958,11 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
else
snprintf(msg,sizeof(msg), _("Retrieving file %li of %li"), i, TotalItems);
-
-
// build the status str
status << "dlstatus:" << i
- << ":" << (CurrentBytes/float(TotalBytes)*100.0)
- << ":" << msg
- << endl;
+ << ":" << std::setprecision(3) << Percent
+ << ":" << msg
+ << endl;
std::string const dlstatus = status.str();
FileFd::Write(fd, dlstatus.c_str(), dlstatus.size());
@@ -964,3 +1017,7 @@ void pkgAcquireStatus::Fetched(unsigned long long Size,unsigned long long Resume
FetchedBytes += Size - Resume;
}
/*}}}*/
+
+APT_CONST pkgAcquire::UriIterator::~UriIterator() {}
+APT_CONST pkgAcquire::MethodConfig::~MethodConfig() {}
+APT_CONST pkgAcquireStatus::~pkgAcquireStatus() {}
diff --git a/apt-pkg/acquire.h b/apt-pkg/acquire.h
index ef16d8556..a1a192d5f 100644
--- a/apt-pkg/acquire.h
+++ b/apt-pkg/acquire.h
@@ -351,14 +351,24 @@ class pkgAcquire
* long as the pkgAcquire object does.
* \param Lock defines a lock file that should be acquired to ensure
* only one Acquire class is in action at the time or an empty string
- * if no lock file should be used.
+ * if no lock file should be used. If set also all needed directories
+ * will be created.
*/
- bool Setup(pkgAcquireStatus *Progress = NULL, std::string const &Lock = "");
+ APT_DEPRECATED bool Setup(pkgAcquireStatus *Progress = NULL, std::string const &Lock = "");
void SetLog(pkgAcquireStatus *Progress) { Log = Progress; }
+ /** \brief acquire lock and perform directory setup
+ *
+ * \param Lock defines a lock file that should be acquired to ensure
+ * only one Acquire class is in action at the time or an empty string
+ * if no lock file should be used. If set also all needed directories
+ * will be created and setup.
+ */
+ bool GetLock(std::string const &Lock);
+
/** \brief Construct a new pkgAcquire. */
- pkgAcquire(pkgAcquireStatus *Log) APT_DEPRECATED;
+ pkgAcquire(pkgAcquireStatus *Log);
pkgAcquire();
/** \brief Destroy this pkgAcquire object.
@@ -585,7 +595,7 @@ class pkgAcquire::UriIterator
*
* \param Q The queue over which this UriIterator should iterate.
*/
- UriIterator(pkgAcquire::Queue *Q) : CurQ(Q), CurItem(0)
+ UriIterator(pkgAcquire::Queue *Q) : d(NULL), CurQ(Q), CurItem(0)
{
while (CurItem == 0 && CurQ != 0)
{
@@ -593,7 +603,7 @@ class pkgAcquire::UriIterator
CurQ = CurQ->Next;
}
}
- virtual ~UriIterator() {};
+ virtual ~UriIterator();
};
/*}}}*/
/** \brief Information about the properties of a single acquire method. {{{*/
@@ -651,8 +661,7 @@ struct pkgAcquire::MethodConfig
*/
MethodConfig();
- /* \brief Destructor, empty currently */
- virtual ~MethodConfig() {};
+ virtual ~MethodConfig();
};
/*}}}*/
/** \brief A monitor object for downloads controlled by the pkgAcquire class. {{{
@@ -714,6 +723,10 @@ class pkgAcquireStatus
/** \brief The number of items that have been successfully downloaded. */
unsigned long CurrentItems;
+ /** \brief The estimated percentage of the download (0-100)
+ */
+ double Percent;
+
public:
/** \brief If \b true, the download scheduler should call Pulse()
@@ -794,7 +807,7 @@ class pkgAcquireStatus
/** \brief Initialize all counters to 0 and the time to the current time. */
pkgAcquireStatus();
- virtual ~pkgAcquireStatus() {};
+ virtual ~pkgAcquireStatus();
};
/*}}}*/
/** @} */
diff --git a/apt-pkg/algorithms.cc b/apt-pkg/algorithms.cc
index 608ec7fce..71b5ac2c1 100644
--- a/apt-pkg/algorithms.cc
+++ b/apt-pkg/algorithms.cc
@@ -640,13 +640,11 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
// ProblemResolver::Resolve - calls a resolver to fix the situation /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool pkgProblemResolver::Resolve(bool BrokenFix)
+bool pkgProblemResolver::Resolve(bool BrokenFix, OpProgress * const Progress)
{
std::string const solver = _config->Find("APT::Solver", "internal");
- if (solver != "internal") {
- OpTextProgress Prog(*_config);
- return EDSP::ResolveExternal(solver.c_str(), Cache, false, false, false, &Prog);
- }
+ if (solver != "internal")
+ return EDSP::ResolveExternal(solver.c_str(), Cache, false, false, false, Progress);
return ResolveInternal(BrokenFix);
}
/*}}}*/
@@ -1140,13 +1138,11 @@ bool pkgProblemResolver::InstOrNewPolicyBroken(pkgCache::PkgIterator I)
/* This is the work horse of the soft upgrade routine. It is very gental
in that it does not install or remove any packages. It is assumed that the
system was non-broken previously. */
-bool pkgProblemResolver::ResolveByKeep()
+bool pkgProblemResolver::ResolveByKeep(OpProgress * const Progress)
{
std::string const solver = _config->Find("APT::Solver", "internal");
- if (solver != "internal") {
- OpTextProgress Prog(*_config);
- return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, &Prog);
- }
+ if (solver != "internal")
+ return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, Progress);
return ResolveByKeepInternal();
}
/*}}}*/
diff --git a/apt-pkg/algorithms.h b/apt-pkg/algorithms.h
index f35bd9a13..b6da1f2bf 100644
--- a/apt-pkg/algorithms.h
+++ b/apt-pkg/algorithms.h
@@ -82,9 +82,9 @@ class pkgSimulate : public pkgPackageManager /*{{{*/
virtual bool Remove(PkgIterator Pkg,bool Purge);
private:
- void ShortBreaks();
- void Describe(PkgIterator iPkg,std::ostream &out,bool Current,bool Candidate);
-
+ APT_HIDDEN void ShortBreaks();
+ APT_HIDDEN void Describe(PkgIterator iPkg,std::ostream &out,bool Current,bool Candidate);
+
public:
pkgSimulate(pkgDepCache *Cache);
@@ -114,7 +114,7 @@ class pkgProblemResolver /*{{{*/
// Sort stuff
static pkgProblemResolver *This;
- static int ScoreSort(const void *a,const void *b) APT_PURE;
+ APT_HIDDEN static int ScoreSort(const void *a,const void *b) APT_PURE;
struct PackageKill
{
@@ -122,12 +122,12 @@ class pkgProblemResolver /*{{{*/
DepIterator Dep;
};
- void MakeScores();
- bool DoUpgrade(pkgCache::PkgIterator Pkg);
+ APT_HIDDEN void MakeScores();
+ APT_HIDDEN bool DoUpgrade(pkgCache::PkgIterator Pkg);
+
+ APT_HIDDEN bool ResolveInternal(bool const BrokenFix = false);
+ APT_HIDDEN bool ResolveByKeepInternal();
- bool ResolveInternal(bool const BrokenFix = false);
- bool ResolveByKeepInternal();
-
protected:
bool InstOrNewPolicyBroken(pkgCache::PkgIterator Pkg);
@@ -136,12 +136,12 @@ class pkgProblemResolver /*{{{*/
inline void Protect(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] |= Protected; Cache.MarkProtected(Pkg);};
inline void Remove(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] |= ToRemove;};
inline void Clear(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] &= ~(Protected | ToRemove);};
-
- // Try to intelligently resolve problems by installing and removing packages
- bool Resolve(bool BrokenFix = false);
-
+
+ // Try to intelligently resolve problems by installing and removing packages
+ bool Resolve(bool BrokenFix = false, OpProgress * const Progress = NULL);
+
// Try to resolve problems only by using keep
- bool ResolveByKeep();
+ bool ResolveByKeep(OpProgress * const Progress = NULL);
APT_DEPRECATED void InstallProtect();
diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc
index 9982759c6..01b85a74e 100644
--- a/apt-pkg/aptconfiguration.cc
+++ b/apt-pkg/aptconfiguration.cc
@@ -32,6 +32,35 @@
#include <apti18n.h>
/*}}}*/
namespace APT {
+// setDefaultConfigurationForCompressors /*{{{*/
+static void setDefaultConfigurationForCompressors() {
+ // Set default application paths to check for optional compression types
+ _config->CndSet("Dir::Bin::bzip2", "/bin/bzip2");
+ _config->CndSet("Dir::Bin::xz", "/usr/bin/xz");
+ if (FileExists(_config->FindFile("Dir::Bin::xz")) == true) {
+ _config->Set("Dir::Bin::lzma", _config->FindFile("Dir::Bin::xz"));
+ _config->Set("APT::Compressor::lzma::Binary", "xz");
+ if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) {
+ _config->Set("APT::Compressor::lzma::CompressArg::", "--format=lzma");
+ _config->Set("APT::Compressor::lzma::CompressArg::", "-9");
+ }
+ if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) {
+ _config->Set("APT::Compressor::lzma::UncompressArg::", "--format=lzma");
+ _config->Set("APT::Compressor::lzma::UncompressArg::", "-d");
+ }
+ } else {
+ _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma");
+ if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) {
+ _config->Set("APT::Compressor::lzma::CompressArg::", "--suffix=");
+ _config->Set("APT::Compressor::lzma::CompressArg::", "-9");
+ }
+ if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) {
+ _config->Set("APT::Compressor::lzma::UncompressArg::", "--suffix=");
+ _config->Set("APT::Compressor::lzma::UncompressArg::", "-d");
+ }
+ }
+}
+ /*}}}*/
// getCompressionTypes - Return Vector of usable compressiontypes /*{{{*/
// ---------------------------------------------------------------------
/* return a vector of compression types in the preferred order. */
@@ -402,35 +431,6 @@ bool Configuration::checkArchitecture(std::string const &Arch) {
return (std::find(archs.begin(), archs.end(), Arch) != archs.end());
}
/*}}}*/
-// setDefaultConfigurationForCompressors /*{{{*/
-void Configuration::setDefaultConfigurationForCompressors() {
- // Set default application paths to check for optional compression types
- _config->CndSet("Dir::Bin::bzip2", "/bin/bzip2");
- _config->CndSet("Dir::Bin::xz", "/usr/bin/xz");
- if (FileExists(_config->FindFile("Dir::Bin::xz")) == true) {
- _config->Set("Dir::Bin::lzma", _config->FindFile("Dir::Bin::xz"));
- _config->Set("APT::Compressor::lzma::Binary", "xz");
- if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) {
- _config->Set("APT::Compressor::lzma::CompressArg::", "--format=lzma");
- _config->Set("APT::Compressor::lzma::CompressArg::", "-9");
- }
- if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) {
- _config->Set("APT::Compressor::lzma::UncompressArg::", "--format=lzma");
- _config->Set("APT::Compressor::lzma::UncompressArg::", "-d");
- }
- } else {
- _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma");
- if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) {
- _config->Set("APT::Compressor::lzma::CompressArg::", "--suffix=");
- _config->Set("APT::Compressor::lzma::CompressArg::", "-9");
- }
- if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) {
- _config->Set("APT::Compressor::lzma::UncompressArg::", "--suffix=");
- _config->Set("APT::Compressor::lzma::UncompressArg::", "-d");
- }
- }
-}
- /*}}}*/
// getCompressors - Return Vector of usealbe compressors /*{{{*/
// ---------------------------------------------------------------------
/* return a vector of compressors used by apt-ftparchive in the
@@ -540,7 +540,7 @@ std::string const Configuration::getBuildProfilesString() {
return "";
std::vector<std::string>::const_iterator p = profiles.begin();
std::string list = *p;
- for (; p != profiles.end(); ++p)
+ for (++p; p != profiles.end(); ++p)
list.append(",").append(*p);
return list;
}
diff --git a/apt-pkg/aptconfiguration.h b/apt-pkg/aptconfiguration.h
index dfed194ae..c7b8d2d73 100644
--- a/apt-pkg/aptconfiguration.h
+++ b/apt-pkg/aptconfiguration.h
@@ -123,9 +123,6 @@ public: /*{{{*/
/** \return Return a comma-separated list of enabled build profile specifications */
std::string static const getBuildProfilesString();
/*}}}*/
- private: /*{{{*/
- void static setDefaultConfigurationForCompressors();
- /*}}}*/
};
/*}}}*/
}
diff --git a/apt-pkg/cachefilter.cc b/apt-pkg/cachefilter.cc
index e388f2450..4362f43e3 100644
--- a/apt-pkg/cachefilter.cc
+++ b/apt-pkg/cachefilter.cc
@@ -6,6 +6,7 @@
// Include Files /*{{{*/
#include <config.h>
+#include <apt-pkg/cachefile.h>
#include <apt-pkg/cachefilter.h>
#include <apt-pkg/error.h>
#include <apt-pkg/pkgcache.h>
@@ -22,7 +23,11 @@
/*}}}*/
namespace APT {
namespace CacheFilter {
-PackageNameMatchesRegEx::PackageNameMatchesRegEx(std::string const &Pattern) : d(NULL) {/*{{{*/
+APT_CONST Matcher::~Matcher() {}
+APT_CONST PackageMatcher::~PackageMatcher() {}
+
+// Name matches RegEx /*{{{*/
+PackageNameMatchesRegEx::PackageNameMatchesRegEx(std::string const &Pattern) {
pattern = new regex_t;
int const Res = regcomp(pattern, Pattern.c_str(), REG_EXTENDED | REG_ICASE | REG_NOSUB);
if (Res == 0)
@@ -34,41 +39,36 @@ PackageNameMatchesRegEx::PackageNameMatchesRegEx(std::string const &Pattern) : d
regerror(Res, pattern, Error, sizeof(Error));
_error->Error(_("Regex compilation error - %s"), Error);
}
- /*}}}*/
-bool PackageNameMatchesRegEx::operator() (pkgCache::PkgIterator const &Pkg) {/*{{{*/
+bool PackageNameMatchesRegEx::operator() (pkgCache::PkgIterator const &Pkg) {
if (unlikely(pattern == NULL))
return false;
else
return regexec(pattern, Pkg.Name(), 0, 0, 0) == 0;
}
- /*}}}*/
-bool PackageNameMatchesRegEx::operator() (pkgCache::GrpIterator const &Grp) {/*{{{*/
+bool PackageNameMatchesRegEx::operator() (pkgCache::GrpIterator const &Grp) {
if (unlikely(pattern == NULL))
return false;
else
return regexec(pattern, Grp.Name(), 0, 0, 0) == 0;
}
- /*}}}*/
-PackageNameMatchesRegEx::~PackageNameMatchesRegEx() { /*{{{*/
+PackageNameMatchesRegEx::~PackageNameMatchesRegEx() {
if (pattern == NULL)
return;
regfree(pattern);
delete pattern;
}
/*}}}*/
-
-// Fnmatch support /*{{{*/
-//----------------------------------------------------------------------
-bool PackageNameMatchesFnmatch::operator() (pkgCache::PkgIterator const &Pkg) {/*{{{*/
+// Name matches Fnmatch /*{{{*/
+PackageNameMatchesFnmatch::PackageNameMatchesFnmatch(std::string const &Pattern) :
+ Pattern(Pattern) {}
+bool PackageNameMatchesFnmatch::operator() (pkgCache::PkgIterator const &Pkg) {
return fnmatch(Pattern.c_str(), Pkg.Name(), FNM_CASEFOLD) == 0;
}
- /*}}}*/
-bool PackageNameMatchesFnmatch::operator() (pkgCache::GrpIterator const &Grp) {/*{{{*/
+bool PackageNameMatchesFnmatch::operator() (pkgCache::GrpIterator const &Grp) {
return fnmatch(Pattern.c_str(), Grp.Name(), FNM_CASEFOLD) == 0;
}
/*}}}*/
-
-// CompleteArch to <kernel>-<cpu> tuple /*{{{*/
+// Architecture matches <kernel>-<cpu> specification /*{{{*/
//----------------------------------------------------------------------
/* The complete architecture, consisting of <kernel>-<cpu>. */
static std::string CompleteArch(std::string const &arch) {
@@ -82,12 +82,10 @@ static std::string CompleteArch(std::string const &arch) {
else if (arch == "any") return "*-*";
else return "linux-" + arch;
}
- /*}}}*/
-PackageArchitectureMatchesSpecification::PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern) :/*{{{*/
- literal(pattern), complete(CompleteArch(pattern)), isPattern(isPattern), d(NULL) {
+PackageArchitectureMatchesSpecification::PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern) :
+ literal(pattern), complete(CompleteArch(pattern)), isPattern(isPattern) {
}
- /*}}}*/
-bool PackageArchitectureMatchesSpecification::operator() (char const * const &arch) {/*{{{*/
+bool PackageArchitectureMatchesSpecification::operator() (char const * const &arch) {
if (strcmp(literal.c_str(), arch) == 0 ||
strcmp(complete.c_str(), arch) == 0)
return true;
@@ -96,16 +94,112 @@ bool PackageArchitectureMatchesSpecification::operator() (char const * const &ar
return fnmatch(complete.c_str(), pkgarch.c_str(), 0) == 0;
return fnmatch(pkgarch.c_str(), complete.c_str(), 0) == 0;
}
- /*}}}*/
-bool PackageArchitectureMatchesSpecification::operator() (pkgCache::PkgIterator const &Pkg) {/*{{{*/
+bool PackageArchitectureMatchesSpecification::operator() (pkgCache::PkgIterator const &Pkg) {
return (*this)(Pkg.Arch());
}
+PackageArchitectureMatchesSpecification::~PackageArchitectureMatchesSpecification() {
+}
/*}}}*/
-bool PackageArchitectureMatchesSpecification::operator() (pkgCache::VerIterator const &Ver) {/*{{{*/
- return (*this)(Ver.ParentPkg());
+// Package is new install /*{{{*/
+PackageIsNewInstall::PackageIsNewInstall(pkgCacheFile * const Cache) : Cache(Cache) {}
+APT_PURE bool PackageIsNewInstall::operator() (pkgCache::PkgIterator const &Pkg) {
+ return (*Cache)[Pkg].NewInstall();
}
+PackageIsNewInstall::~PackageIsNewInstall() {}
/*}}}*/
-PackageArchitectureMatchesSpecification::~PackageArchitectureMatchesSpecification() { /*{{{*/
+// Generica like True, False, NOT, AND, OR /*{{{*/
+APT_CONST bool TrueMatcher::operator() (pkgCache::PkgIterator const &) { return true; }
+APT_CONST bool TrueMatcher::operator() (pkgCache::GrpIterator const &) { return true; }
+APT_CONST bool TrueMatcher::operator() (pkgCache::VerIterator const &) { return true; }
+
+APT_CONST bool FalseMatcher::operator() (pkgCache::PkgIterator const &) { return false; }
+APT_CONST bool FalseMatcher::operator() (pkgCache::GrpIterator const &) { return false; }
+APT_CONST bool FalseMatcher::operator() (pkgCache::VerIterator const &) { return false; }
+
+NOTMatcher::NOTMatcher(Matcher * const matcher) : matcher(matcher) {}
+bool NOTMatcher::operator() (pkgCache::PkgIterator const &Pkg) { return ! (*matcher)(Pkg); }
+bool NOTMatcher::operator() (pkgCache::GrpIterator const &Grp) { return ! (*matcher)(Grp); }
+bool NOTMatcher::operator() (pkgCache::VerIterator const &Ver) { return ! (*matcher)(Ver); }
+NOTMatcher::~NOTMatcher() { delete matcher; }
+
+ANDMatcher::ANDMatcher() {}
+ANDMatcher::ANDMatcher(Matcher * const matcher1) {
+ AND(matcher1);
+}
+ANDMatcher::ANDMatcher(Matcher * const matcher1, Matcher * const matcher2) {
+ AND(matcher1).AND(matcher2);
+}
+ANDMatcher::ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3) {
+ AND(matcher1).AND(matcher2).AND(matcher3);
+}
+ANDMatcher::ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4) {
+ AND(matcher1).AND(matcher2).AND(matcher3).AND(matcher4);
+}
+ANDMatcher::ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4, Matcher * const matcher5) {
+ AND(matcher1).AND(matcher2).AND(matcher3).AND(matcher4).AND(matcher5);
+}
+ANDMatcher& ANDMatcher::AND(Matcher * const matcher) { matchers.push_back(matcher); return *this; }
+bool ANDMatcher::operator() (pkgCache::PkgIterator const &Pkg) {
+ for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M)
+ if ((**M)(Pkg) == false)
+ return false;
+ return true;
+}
+bool ANDMatcher::operator() (pkgCache::GrpIterator const &Grp) {
+ for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M)
+ if ((**M)(Grp) == false)
+ return false;
+ return true;
+}
+bool ANDMatcher::operator() (pkgCache::VerIterator const &Ver) {
+ for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M)
+ if ((**M)(Ver) == false)
+ return false;
+ return true;
+}
+ANDMatcher::~ANDMatcher() {
+ for (std::vector<Matcher *>::iterator M = matchers.begin(); M != matchers.end(); ++M)
+ delete *M;
+}
+
+ORMatcher::ORMatcher() {}
+ORMatcher::ORMatcher(Matcher * const matcher1) {
+ OR(matcher1);
+}
+ORMatcher::ORMatcher(Matcher * const matcher1, Matcher * const matcher2) {
+ OR(matcher1).OR(matcher2);
+}
+ORMatcher::ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3) {
+ OR(matcher1).OR(matcher2).OR(matcher3);
+}
+ORMatcher::ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4) {
+ OR(matcher1).OR(matcher2).OR(matcher3).OR(matcher4);
+}
+ORMatcher::ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4, Matcher * const matcher5) {
+ OR(matcher1).OR(matcher2).OR(matcher3).OR(matcher4).OR(matcher5);
+}
+ORMatcher& ORMatcher::OR(Matcher * const matcher) { matchers.push_back(matcher); return *this; }
+bool ORMatcher::operator() (pkgCache::PkgIterator const &Pkg) {
+ for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M)
+ if ((**M)(Pkg) == true)
+ return true;
+ return false;
+}
+bool ORMatcher::operator() (pkgCache::GrpIterator const &Grp) {
+ for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M)
+ if ((**M)(Grp) == true)
+ return true;
+ return false;
+}
+bool ORMatcher::operator() (pkgCache::VerIterator const &Ver) {
+ for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M)
+ if ((**M)(Ver) == true)
+ return true;
+ return false;
+}
+ORMatcher::~ORMatcher() {
+ for (std::vector<Matcher *>::iterator M = matchers.begin(); M != matchers.end(); ++M)
+ delete *M;
}
/*}}}*/
diff --git a/apt-pkg/cachefilter.h b/apt-pkg/cachefilter.h
index 6d10d1163..b4697b773 100644
--- a/apt-pkg/cachefilter.h
+++ b/apt-pkg/cachefilter.h
@@ -10,86 +10,90 @@
#include <apt-pkg/cacheiterators.h>
#include <string>
+#include <vector>
#include <regex.h>
+
+class pkgCacheFile;
/*}}}*/
namespace APT {
namespace CacheFilter {
-#define PACKAGE_MATCHER_ABI_COMPAT 1
-#ifdef PACKAGE_MATCHER_ABI_COMPAT
-
-// PackageNameMatchesRegEx /*{{{*/
-class PackageNameMatchesRegEx {
- /** \brief dpointer placeholder (for later in case we need it) */
- void *d;
- regex_t* pattern;
+class Matcher {
public:
- PackageNameMatchesRegEx(std::string const &Pattern);
- bool operator() (pkgCache::PkgIterator const &Pkg);
- bool operator() (pkgCache::GrpIterator const &Grp);
- ~PackageNameMatchesRegEx();
+ virtual bool operator() (pkgCache::PkgIterator const &/*Pkg*/) = 0;
+ virtual bool operator() (pkgCache::GrpIterator const &/*Grp*/) = 0;
+ virtual bool operator() (pkgCache::VerIterator const &/*Ver*/) = 0;
+ virtual ~Matcher();
};
- /*}}}*/
-// PackageNameMatchesFnmatch /*{{{*/
- class PackageNameMatchesFnmatch {
- /** \brief dpointer placeholder (for later in case we need it) */
- void *d;
- const std::string Pattern;
+
+class PackageMatcher : public Matcher {
public:
- PackageNameMatchesFnmatch(std::string const &Pattern)
- : Pattern(Pattern) {};
- bool operator() (pkgCache::PkgIterator const &Pkg);
- bool operator() (pkgCache::GrpIterator const &Grp);
- ~PackageNameMatchesFnmatch() {};
+ virtual bool operator() (pkgCache::PkgIterator const &Pkg) = 0;
+ virtual bool operator() (pkgCache::VerIterator const &Ver) { return (*this)(Ver.ParentPkg()); }
+ virtual bool operator() (pkgCache::GrpIterator const &/*Grp*/) { return false; }
+ virtual ~PackageMatcher();
};
- /*}}}*/
-// PackageArchitectureMatchesSpecification /*{{{*/
-/** \class PackageArchitectureMatchesSpecification
- \brief matching against architecture specification strings
- The strings are of the format \<kernel\>-\<cpu\> where either component,
- or the whole string, can be the wildcard "any" as defined in
- debian-policy §11.1 "Architecture specification strings".
-
- Examples: i386, mipsel, linux-any, any-amd64, any */
-class PackageArchitectureMatchesSpecification {
- std::string literal;
- std::string complete;
- bool isPattern;
- /** \brief dpointer placeholder (for later in case we need it) */
- void *d;
+// Generica like True, False, NOT, AND, OR /*{{{*/
+class TrueMatcher : public Matcher {
public:
- /** \brief matching against architecture specification strings
- *
- * @param pattern is the architecture specification string
- * @param isPattern defines if the given \b pattern is a
- * architecture specification pattern to match others against
- * or if it is the fixed string and matched against patterns
- */
- PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern = true);
- bool operator() (char const * const &arch);
- bool operator() (pkgCache::PkgIterator const &Pkg);
- bool operator() (pkgCache::VerIterator const &Ver);
- ~PackageArchitectureMatchesSpecification();
+ virtual bool operator() (pkgCache::PkgIterator const &Pkg);
+ virtual bool operator() (pkgCache::GrpIterator const &Grp);
+ virtual bool operator() (pkgCache::VerIterator const &Ver);
};
- /*}}}*/
-#else
+class FalseMatcher : public Matcher {
+public:
+ virtual bool operator() (pkgCache::PkgIterator const &Pkg);
+ virtual bool operator() (pkgCache::GrpIterator const &Grp);
+ virtual bool operator() (pkgCache::VerIterator const &Ver);
+};
-class PackageMatcher {
- public:
- virtual bool operator() (pkgCache::PkgIterator const &Pkg) { return false; };
- virtual bool operator() (pkgCache::GrpIterator const &Grp) { return false; };
- virtual bool operator() (pkgCache::VerIterator const &Ver) { return false; };
-
- virtual ~PackageMatcher() {};
+class NOTMatcher : public Matcher {
+ Matcher * const matcher;
+public:
+ NOTMatcher(Matcher * const matcher);
+ virtual bool operator() (pkgCache::PkgIterator const &Pkg);
+ virtual bool operator() (pkgCache::GrpIterator const &Grp);
+ virtual bool operator() (pkgCache::VerIterator const &Ver);
+ virtual ~NOTMatcher();
};
-// PackageNameMatchesRegEx /*{{{*/
-class PackageNameMatchesRegEx : public PackageMatcher {
- /** \brief dpointer placeholder (for later in case we need it) */
- void *d;
+class ANDMatcher : public Matcher {
+ std::vector<Matcher *> matchers;
+public:
+ // 5 ought to be enough for everybody… c++11 variadic templates would be nice
+ ANDMatcher();
+ ANDMatcher(Matcher * const matcher1);
+ ANDMatcher(Matcher * const matcher1, Matcher * const matcher2);
+ ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3);
+ ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4);
+ ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4, Matcher * const matcher5);
+ ANDMatcher& AND(Matcher * const matcher);
+ virtual bool operator() (pkgCache::PkgIterator const &Pkg);
+ virtual bool operator() (pkgCache::GrpIterator const &Grp);
+ virtual bool operator() (pkgCache::VerIterator const &Ver);
+ virtual ~ANDMatcher();
+};
+class ORMatcher : public Matcher {
+ std::vector<Matcher *> matchers;
+public:
+ // 5 ought to be enough for everybody… c++11 variadic templates would be nice
+ ORMatcher();
+ ORMatcher(Matcher * const matcher1);
+ ORMatcher(Matcher * const matcher1, Matcher * const matcher2);
+ ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3);
+ ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4);
+ ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4, Matcher * const matcher5);
+ ORMatcher& OR(Matcher * const matcher);
+ virtual bool operator() (pkgCache::PkgIterator const &Pkg);
+ virtual bool operator() (pkgCache::GrpIterator const &Grp);
+ virtual bool operator() (pkgCache::VerIterator const &Ver);
+ virtual ~ORMatcher();
+};
+ /*}}}*/
+class PackageNameMatchesRegEx : public PackageMatcher { /*{{{*/
regex_t* pattern;
public:
PackageNameMatchesRegEx(std::string const &Pattern);
@@ -98,20 +102,16 @@ public:
virtual ~PackageNameMatchesRegEx();
};
/*}}}*/
-// PackageNameMatchesFnmatch /*{{{*/
- class PackageNameMatchesFnmatch : public PackageMatcher{
- /** \brief dpointer placeholder (for later in case we need it) */
- void *d;
- const std::string Pattern;
+class PackageNameMatchesFnmatch : public PackageMatcher { /*{{{*/
+ const std::string Pattern;
public:
- PackageNameMatchesFnmatch(std::string const &Pattern)
- : Pattern(Pattern) {};
- virtual bool operator() (pkgCache::PkgIterator const &Pkg);
+ PackageNameMatchesFnmatch(std::string const &Pattern);
+ virtual bool operator() (pkgCache::PkgIterator const &Pkg);
virtual bool operator() (pkgCache::GrpIterator const &Grp);
virtual ~PackageNameMatchesFnmatch() {};
};
/*}}}*/
-// PackageArchitectureMatchesSpecification /*{{{*/
+class PackageArchitectureMatchesSpecification : public PackageMatcher { /*{{{*/
/** \class PackageArchitectureMatchesSpecification
\brief matching against architecture specification strings
@@ -120,12 +120,9 @@ public:
debian-policy §11.1 "Architecture specification strings".
Examples: i386, mipsel, linux-any, any-amd64, any */
-class PackageArchitectureMatchesSpecification : public PackageMatcher {
std::string literal;
std::string complete;
bool isPattern;
- /** \brief dpointer placeholder (for later in case we need it) */
- void *d;
public:
/** \brief matching against architecture specification strings
*
@@ -137,11 +134,18 @@ public:
PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern = true);
bool operator() (char const * const &arch);
virtual bool operator() (pkgCache::PkgIterator const &Pkg);
- virtual bool operator() (pkgCache::VerIterator const &Ver);
virtual ~PackageArchitectureMatchesSpecification();
};
-#endif
/*}}}*/
+class PackageIsNewInstall : public PackageMatcher { /*{{{*/
+ pkgCacheFile * const Cache;
+public:
+ PackageIsNewInstall(pkgCacheFile * const Cache);
+ virtual bool operator() (pkgCache::PkgIterator const &Pkg);
+ virtual ~PackageIsNewInstall();
+};
+ /*}}}*/
+
}
}
#endif
diff --git a/apt-pkg/cacheiterators.h b/apt-pkg/cacheiterators.h
index 2fdf8404d..b0c02d4a2 100644
--- a/apt-pkg/cacheiterators.h
+++ b/apt-pkg/cacheiterators.h
@@ -159,8 +159,12 @@ class pkgCache::PkgIterator: public Iterator<Package, PkgIterator> {
enum OkState {NeedsNothing,NeedsUnpack,NeedsConfigure};
// Accessors
- inline const char *Name() const {return S->Name == 0?0:Owner->StrP + S->Name;}
- inline const char *Section() const {return S->Section == 0?0:Owner->StrP + S->Section;}
+ inline const char *Name() const { return Group().Name(); }
+ // Versions have sections - and packages can have different versions with different sections
+ // so this interface is broken by design. It used to return the section of the "first parsed
+ // package stanza", but as this can potentially be anything it now returns the section of the
+ // newest version instead (if any). aka: Run as fast as you can to Version.Section().
+ APT_DEPRECATED const char *Section() const;
inline bool Purge() const {return S->CurrentState == pkgCache::State::Purge ||
(S->CurrentVer == 0 && S->CurrentState == pkgCache::State::NotInstalled);}
inline const char *Arch() const {return S->Arch == 0?0:Owner->StrP + S->Arch;}
@@ -211,6 +215,12 @@ class pkgCache::VerIterator : public Iterator<Version, VerIterator> {
// Accessors
inline const char *VerStr() const {return S->VerStr == 0?0:Owner->StrP + S->VerStr;}
inline const char *Section() const {return S->Section == 0?0:Owner->StrP + S->Section;}
+ /** \brief source package name this version comes from
+ Always contains the name, even if it is the same as the binary name */
+ inline const char *SourcePkgName() const {return Owner->StrP + S->SourcePkgName;}
+ /** \brief source version this version comes from
+ Always contains the version string, even if it is the same as the binary version */
+ inline const char *SourceVerStr() const {return Owner->StrP + S->SourceVerStr;}
inline const char *Arch() const {
if ((S->MultiArch & pkgCache::Version::All) == pkgCache::Version::All)
return "all";
@@ -332,7 +342,7 @@ class pkgCache::PrvIterator : public Iterator<Provides, PrvIterator> {
inline void operator ++() {operator ++(0);}
// Accessors
- inline const char *Name() const {return Owner->StrP + Owner->PkgP[S->ParentPkg].Name;}
+ inline const char *Name() const {return ParentPkg().Name();}
inline const char *ProvideVersion() const {return S->ProvideVersion == 0?0:Owner->StrP + S->ProvideVersion;}
inline PkgIterator ParentPkg() const {return PkgIterator(*Owner,Owner->PkgP + S->ParentPkg);}
inline VerIterator OwnerVer() const {return VerIterator(*Owner,Owner->VerP + S->Version);}
diff --git a/apt-pkg/cacheset.cc b/apt-pkg/cacheset.cc
index 2ed6a96da..76a7e717e 100644
--- a/apt-pkg/cacheset.cc
+++ b/apt-pkg/cacheset.cc
@@ -24,6 +24,7 @@
#include <apt-pkg/depcache.h>
#include <apt-pkg/macros.h>
#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/fileutl.h>
#include <stddef.h>
#include <stdio.h>
@@ -36,8 +37,23 @@
#include <apti18n.h>
/*}}}*/
namespace APT {
-// FromTask - Return all packages in the cache from a specific task /*{{{*/
-bool PackageContainerInterface::FromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) {
+
+// PackageFrom - selecting the appropriate method for package selection /*{{{*/
+bool CacheSetHelper::PackageFrom(enum PkgSelector const select, PackageContainerInterface * const pci,
+ pkgCacheFile &Cache, std::string const &pattern) {
+ switch (select) {
+ case UNKNOWN: return false;
+ case REGEX: return PackageFromRegEx(pci, Cache, pattern);
+ case TASK: return PackageFromTask(pci, Cache, pattern);
+ case FNMATCH: return PackageFromFnmatch(pci, Cache, pattern);
+ case PACKAGENAME: return PackageFromPackageName(pci, Cache, pattern);
+ case STRING: return PackageFromString(pci, Cache, pattern);
+ }
+ return false;
+}
+ /*}}}*/
+// PackageFromTask - Return all packages in the cache from a specific task /*{{{*/
+bool CacheSetHelper::PackageFromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern) {
size_t const archfound = pattern.find_last_of(':');
std::string arch = "native";
if (archfound != std::string::npos) {
@@ -54,7 +70,7 @@ bool PackageContainerInterface::FromTask(PackageContainerInterface * const pci,
bool const wasEmpty = pci->empty();
if (wasEmpty == true)
- pci->setConstructor(TASK);
+ pci->setConstructor(CacheSetHelper::TASK);
// get the records
pkgRecords Recs(Cache);
@@ -90,32 +106,32 @@ bool PackageContainerInterface::FromTask(PackageContainerInterface * const pci,
continue;
pci->insert(Pkg);
- helper.showTaskSelection(Pkg, pattern);
+ showPackageSelection(Pkg, CacheSetHelper::TASK, pattern);
found = true;
}
regfree(&Pattern);
if (found == false) {
- helper.canNotFindTask(pci, Cache, pattern);
- pci->setConstructor(UNKNOWN);
+ canNotFindPackage(CacheSetHelper::TASK, pci, Cache, pattern);
+ pci->setConstructor(CacheSetHelper::UNKNOWN);
return false;
}
- if (wasEmpty == false && pci->getConstructor() != UNKNOWN)
- pci->setConstructor(UNKNOWN);
+ if (wasEmpty == false && pci->getConstructor() != CacheSetHelper::UNKNOWN)
+ pci->setConstructor(CacheSetHelper::UNKNOWN);
return true;
}
/*}}}*/
-// FromRegEx - Return all packages in the cache matching a pattern /*{{{*/
-bool PackageContainerInterface::FromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) {
+// PackageFromRegEx - Return all packages in the cache matching a pattern /*{{{*/
+bool CacheSetHelper::PackageFromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern) {
static const char * const isregex = ".?+*|[^$";
if (pattern.find_first_of(isregex) == std::string::npos)
return false;
bool const wasEmpty = pci->empty();
if (wasEmpty == true)
- pci->setConstructor(REGEX);
+ pci->setConstructor(CacheSetHelper::REGEX);
size_t archfound = pattern.find_last_of(':');
std::string arch = "native";
@@ -149,28 +165,25 @@ bool PackageContainerInterface::FromRegEx(PackageContainerInterface * const pci,
}
pci->insert(Pkg);
- helper.showRegExSelection(Pkg, pattern);
+ showPackageSelection(Pkg, CacheSetHelper::REGEX, pattern);
found = true;
}
if (found == false) {
- helper.canNotFindRegEx(pci, Cache, pattern);
- pci->setConstructor(UNKNOWN);
+ canNotFindPackage(CacheSetHelper::REGEX, pci, Cache, pattern);
+ pci->setConstructor(CacheSetHelper::UNKNOWN);
return false;
}
- if (wasEmpty == false && pci->getConstructor() != UNKNOWN)
- pci->setConstructor(UNKNOWN);
+ if (wasEmpty == false && pci->getConstructor() != CacheSetHelper::UNKNOWN)
+ pci->setConstructor(CacheSetHelper::UNKNOWN);
return true;
}
/*}}}*/
-// FromFnmatch - Returns the package defined by this fnmatch /*{{{*/
-bool
-PackageContainerInterface::FromFnmatch(PackageContainerInterface * const pci,
- pkgCacheFile &Cache,
- std::string pattern,
- CacheSetHelper &helper)
+// PackageFromFnmatch - Returns the package defined by this fnmatch /*{{{*/
+bool CacheSetHelper::PackageFromFnmatch(PackageContainerInterface * const pci,
+ pkgCacheFile &Cache, std::string pattern)
{
static const char * const isfnmatch = ".?*[]!";
if (pattern.find_first_of(isfnmatch) == std::string::npos)
@@ -178,7 +191,7 @@ PackageContainerInterface::FromFnmatch(PackageContainerInterface * const pci,
bool const wasEmpty = pci->empty();
if (wasEmpty == true)
- pci->setConstructor(FNMATCH);
+ pci->setConstructor(CacheSetHelper::FNMATCH);
size_t archfound = pattern.find_last_of(':');
std::string arch = "native";
@@ -212,33 +225,25 @@ PackageContainerInterface::FromFnmatch(PackageContainerInterface * const pci,
}
pci->insert(Pkg);
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
- helper.showFnmatchSelection(Pkg, pattern);
-#else
- helper.showRegExSelection(Pkg, pattern);
-#endif
+ showPackageSelection(Pkg, CacheSetHelper::FNMATCH, pattern);
found = true;
}
if (found == false) {
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
- helper.canNotFindFnmatch(pci, Cache, pattern);
-#else
- helper.canNotFindRegEx(pci, Cache, pattern);
-#endif
- pci->setConstructor(UNKNOWN);
+ canNotFindPackage(CacheSetHelper::FNMATCH, pci, Cache, pattern);
+ pci->setConstructor(CacheSetHelper::UNKNOWN);
return false;
}
- if (wasEmpty == false && pci->getConstructor() != UNKNOWN)
- pci->setConstructor(UNKNOWN);
+ if (wasEmpty == false && pci->getConstructor() != CacheSetHelper::UNKNOWN)
+ pci->setConstructor(CacheSetHelper::UNKNOWN);
return true;
}
/*}}}*/
-// FromName - Returns the package defined by this string /*{{{*/
-pkgCache::PkgIterator PackageContainerInterface::FromName(pkgCacheFile &Cache,
- std::string const &str, CacheSetHelper &helper) {
+// PackageFromName - Returns the package defined by this string /*{{{*/
+pkgCache::PkgIterator CacheSetHelper::PackageFromName(pkgCacheFile &Cache,
+ std::string const &str) {
std::string pkg = str;
size_t archfound = pkg.find_last_of(':');
std::string arch;
@@ -259,13 +264,13 @@ pkgCache::PkgIterator PackageContainerInterface::FromName(pkgCacheFile &Cache,
Pkg = Cache.GetPkgCache()->FindPkg(pkg, arch);
if (Pkg.end() == true)
- return helper.canNotFindPkgName(Cache, str);
+ return canNotFindPkgName(Cache, str);
return Pkg;
}
/*}}}*/
-// FromGroup - Returns the package defined by this string /*{{{*/
-bool PackageContainerInterface::FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache,
- std::string pkg, CacheSetHelper &helper) {
+// PackageFromPackageName - Returns the package defined by this string /*{{{*/
+bool CacheSetHelper::PackageFromPackageName(PackageContainerInterface * const pci, pkgCacheFile &Cache,
+ std::string pkg) {
if (unlikely(Cache.GetPkgCache() == 0))
return false;
@@ -305,7 +310,7 @@ bool PackageContainerInterface::FromGroup(PackageContainerInterface * const pci,
}
}
- pkgCache::PkgIterator Pkg = helper.canNotFindPkgName(Cache, pkg);
+ pkgCache::PkgIterator Pkg = canNotFindPkgName(Cache, pkg);
if (Pkg.end() == true)
return false;
@@ -313,19 +318,18 @@ bool PackageContainerInterface::FromGroup(PackageContainerInterface * const pci,
return true;
}
/*}}}*/
-// FromString - Return all packages matching a specific string /*{{{*/
-bool PackageContainerInterface::FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str, CacheSetHelper &helper) {
+// PackageFromString - Return all packages matching a specific string /*{{{*/
+bool CacheSetHelper::PackageFromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str) {
bool found = true;
_error->PushToStack();
- if (FromGroup(pci, Cache, str, helper) == false &&
- FromTask(pci, Cache, str, helper) == false &&
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
- FromFnmatch(pci, Cache, str, helper) == false)
-#endif
- FromRegEx(pci, Cache, str, helper) == false)
+ if (PackageFrom(CacheSetHelper::PACKAGENAME, pci, Cache, str) == false &&
+ PackageFrom(CacheSetHelper::TASK, pci, Cache, str) == false &&
+ // FIXME: hm, hm, regexp/fnmatch incompatible?
+ PackageFrom(CacheSetHelper::FNMATCH, pci, Cache, str) == false &&
+ PackageFrom(CacheSetHelper::REGEX, pci, Cache, str) == false)
{
- helper.canNotFindPackage(pci, Cache, str);
+ canNotFindPackage(CacheSetHelper::PACKAGENAME, pci, Cache, str);
found = false;
}
@@ -336,51 +340,50 @@ bool PackageContainerInterface::FromString(PackageContainerInterface * const pci
return found;
}
/*}}}*/
-// FromCommandLine - Return all packages specified on commandline /*{{{*/
-bool PackageContainerInterface::FromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper) {
+// PackageFromCommandLine - Return all packages specified on commandline /*{{{*/
+bool CacheSetHelper::PackageFromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline) {
bool found = false;
for (const char **I = cmdline; *I != 0; ++I)
- found |= PackageContainerInterface::FromString(pci, Cache, *I, helper);
+ found |= PackageFrom(CacheSetHelper::PACKAGENAME, pci, Cache, *I);
return found;
}
/*}}}*/
// FromModifierCommandLine - helper doing the work for PKG:GroupedFromCommandLine /*{{{*/
-bool PackageContainerInterface::FromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci,
+bool CacheSetHelper::PackageFromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci,
pkgCacheFile &Cache, const char * cmdline,
- std::list<Modifier> const &mods, CacheSetHelper &helper) {
+ std::list<PkgModifier> const &mods) {
std::string str = cmdline;
unsigned short fallback = modID;
bool modifierPresent = false;
- for (std::list<Modifier>::const_iterator mod = mods.begin();
+ for (std::list<PkgModifier>::const_iterator mod = mods.begin();
mod != mods.end(); ++mod) {
size_t const alength = strlen(mod->Alias);
switch(mod->Pos) {
- case Modifier::POSTFIX:
+ case PkgModifier::POSTFIX:
if (str.compare(str.length() - alength, alength,
mod->Alias, 0, alength) != 0)
continue;
str.erase(str.length() - alength);
modID = mod->ID;
break;
- case Modifier::PREFIX:
+ case PkgModifier::PREFIX:
continue;
- case Modifier::NONE:
+ case PkgModifier::NONE:
continue;
}
modifierPresent = true;
break;
}
if (modifierPresent == true) {
- bool const errors = helper.showErrors(false);
- pkgCache::PkgIterator Pkg = FromName(Cache, cmdline, helper);
- helper.showErrors(errors);
- if (Pkg.end() == false) {
- pci->insert(Pkg);
+ bool const errors = showErrors(false);
+ bool const found = PackageFrom(PACKAGENAME, pci, Cache, cmdline);
+ showErrors(errors);
+ if (found == true) {
modID = fallback;
return true;
}
}
- return FromString(pci, Cache, str, helper);
+ return PackageFrom(CacheSetHelper::PACKAGENAME, pci, Cache, str);
}
/*}}}*/
// FromModifierCommandLine - helper doing the work for VER:GroupedFromCommandLine /*{{{*/
@@ -389,7 +392,7 @@ bool VersionContainerInterface::FromModifierCommandLine(unsigned short &modID,
pkgCacheFile &Cache, const char * cmdline,
std::list<Modifier> const &mods,
CacheSetHelper &helper) {
- Version select = NEWEST;
+ CacheSetHelper::VerSelector select = CacheSetHelper::NEWEST;
std::string str = cmdline;
if (unlikely(str.empty() == true))
return false;
@@ -432,7 +435,8 @@ bool VersionContainerInterface::FromModifierCommandLine(unsigned short &modID,
// FromCommandLine - Return all versions specified on commandline /*{{{*/
bool VersionContainerInterface::FromCommandLine(VersionContainerInterface * const vci,
pkgCacheFile &Cache, const char **cmdline,
- Version const &fallback, CacheSetHelper &helper) {
+ CacheSetHelper::VerSelector const fallback,
+ CacheSetHelper &helper) {
bool found = false;
for (const char **I = cmdline; *I != 0; ++I)
found |= VersionContainerInterface::FromString(vci, Cache, *I, fallback, helper);
@@ -442,8 +446,17 @@ bool VersionContainerInterface::FromCommandLine(VersionContainerInterface * cons
// FromString - Returns all versions spedcified by a string /*{{{*/
bool VersionContainerInterface::FromString(VersionContainerInterface * const vci,
pkgCacheFile &Cache, std::string pkg,
- Version const &fallback, CacheSetHelper &helper,
+ CacheSetHelper::VerSelector const fallback,
+ CacheSetHelper &helper,
bool const onlyFromName) {
+ PackageSet pkgset;
+ if(FileExists(pkg)) {
+ helper.PackageFrom(CacheSetHelper::STRING, &pkgset, Cache, pkg);
+ if(pkgset.empty() == true)
+ return false;
+ return VersionContainerInterface::FromPackage(vci, Cache, pkgset.begin(), fallback, helper);
+ }
+
std::string ver;
bool verIsRel = false;
size_t const vertag = pkg.find_last_of("/=");
@@ -452,15 +465,14 @@ bool VersionContainerInterface::FromString(VersionContainerInterface * const vci
verIsRel = (pkg[vertag] == '/');
pkg.erase(vertag);
}
- PackageSet pkgset;
if (onlyFromName == false)
- PackageContainerInterface::FromString(&pkgset, Cache, pkg, helper);
+ helper.PackageFrom(CacheSetHelper::STRING, &pkgset, Cache, pkg);
else {
- pkgset.insert(PackageContainerInterface::FromName(Cache, pkg, helper));
+ helper.PackageFrom(CacheSetHelper::PACKAGENAME, &pkgset, Cache, pkg);
}
bool errors = true;
- if (pkgset.getConstructor() != PackageSet::UNKNOWN)
+ if (pkgset.getConstructor() != CacheSetHelper::UNKNOWN)
errors = helper.showErrors(false);
bool found = false;
@@ -479,7 +491,7 @@ bool VersionContainerInterface::FromString(VersionContainerInterface * const vci
if (P->VersionList != 0)
V = P.VersionList();
else
- V = helper.canNotFindNewestVer(Cache, P);
+ V = helper.canNotGetVersion(CacheSetHelper::NEWEST, Cache, P);
} else {
pkgVersionMatch Match(ver, (verIsRel == true ? pkgVersionMatch::Release :
pkgVersionMatch::Version));
@@ -496,11 +508,14 @@ bool VersionContainerInterface::FromString(VersionContainerInterface * const vci
}
if (V.end() == true)
continue;
- helper.showSelectedVersion(P, V, ver, verIsRel);
+ if (verIsRel == true)
+ helper.showVersionSelection(P, V, CacheSetHelper::RELEASE, ver);
+ else
+ helper.showVersionSelection(P, V, CacheSetHelper::VERSIONNUMBER, ver);
vci->insert(V);
found = true;
}
- if (pkgset.getConstructor() != PackageSet::UNKNOWN)
+ if (pkgset.getConstructor() != CacheSetHelper::UNKNOWN)
helper.showErrors(errors);
return found;
}
@@ -509,30 +524,30 @@ bool VersionContainerInterface::FromString(VersionContainerInterface * const vci
bool VersionContainerInterface::FromPackage(VersionContainerInterface * const vci,
pkgCacheFile &Cache,
pkgCache::PkgIterator const &P,
- Version const &fallback,
+ CacheSetHelper::VerSelector const fallback,
CacheSetHelper &helper) {
pkgCache::VerIterator V;
bool showErrors;
bool found = false;
switch(fallback) {
- case ALL:
+ case CacheSetHelper::ALL:
if (P->VersionList != 0)
for (V = P.VersionList(); V.end() != true; ++V)
found |= vci->insert(V);
else
- helper.canNotFindAllVer(vci, Cache, P);
+ helper.canNotFindVersion(CacheSetHelper::ALL, vci, Cache, P);
break;
- case CANDANDINST:
+ case CacheSetHelper::CANDANDINST:
found |= vci->insert(getInstalledVer(Cache, P, helper));
found |= vci->insert(getCandidateVer(Cache, P, helper));
break;
- case CANDIDATE:
+ case CacheSetHelper::CANDIDATE:
found |= vci->insert(getCandidateVer(Cache, P, helper));
break;
- case INSTALLED:
+ case CacheSetHelper::INSTALLED:
found |= vci->insert(getInstalledVer(Cache, P, helper));
break;
- case CANDINST:
+ case CacheSetHelper::CANDINST:
showErrors = helper.showErrors(false);
V = getCandidateVer(Cache, P, helper);
if (V.end() == true)
@@ -541,9 +556,9 @@ bool VersionContainerInterface::FromPackage(VersionContainerInterface * const vc
if (V.end() == false)
found |= vci->insert(V);
else
- helper.canNotFindInstCandVer(vci, Cache, P);
+ helper.canNotFindVersion(CacheSetHelper::CANDINST, vci, Cache, P);
break;
- case INSTCAND:
+ case CacheSetHelper::INSTCAND:
showErrors = helper.showErrors(false);
V = getInstalledVer(Cache, P, helper);
if (V.end() == true)
@@ -552,14 +567,18 @@ bool VersionContainerInterface::FromPackage(VersionContainerInterface * const vc
if (V.end() == false)
found |= vci->insert(V);
else
- helper.canNotFindInstCandVer(vci, Cache, P);
+ helper.canNotFindVersion(CacheSetHelper::INSTCAND, vci, Cache, P);
break;
- case NEWEST:
+ case CacheSetHelper::NEWEST:
if (P->VersionList != 0)
found |= vci->insert(P.VersionList());
else
- helper.canNotFindNewestVer(Cache, P);
+ helper.canNotFindVersion(CacheSetHelper::NEWEST, vci, Cache, P);
break;
+ case CacheSetHelper::RELEASE:
+ case CacheSetHelper::VERSIONNUMBER:
+ // both make no sense here, so always false
+ return false;
}
return found;
}
@@ -576,7 +595,7 @@ pkgCache::VerIterator VersionContainerInterface::getCandidateVer(pkgCacheFile &C
Cand = Cache[Pkg].CandidateVerIter(Cache);
}
if (Cand.end() == true)
- return helper.canNotFindCandidateVer(Cache, Pkg);
+ return helper.canNotGetVersion(CacheSetHelper::CANDIDATE, Cache, Pkg);
return Cand;
}
/*}}}*/
@@ -584,19 +603,31 @@ pkgCache::VerIterator VersionContainerInterface::getCandidateVer(pkgCacheFile &C
pkgCache::VerIterator VersionContainerInterface::getInstalledVer(pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg, CacheSetHelper &helper) {
if (Pkg->CurrentVer == 0)
- return helper.canNotFindInstalledVer(Cache, Pkg);
+ return helper.canNotGetVersion(CacheSetHelper::INSTALLED, Cache, Pkg);
return Pkg.CurrentVer();
}
/*}}}*/
-// canNotFindPkgName - handle the case no package has this name /*{{{*/
-pkgCache::PkgIterator CacheSetHelper::canNotFindPkgName(pkgCacheFile &Cache,
- std::string const &str) {
- if (ShowError == true)
- _error->Insert(ErrorType, _("Unable to locate package %s"), str.c_str());
- return pkgCache::PkgIterator(Cache, 0);
+// canNotFindPackage - with the given selector and pattern /*{{{*/
+void CacheSetHelper::canNotFindPackage(enum PkgSelector const select,
+ PackageContainerInterface * const pci, pkgCacheFile &Cache,
+ std::string const &pattern) {
+ switch (select) {
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ case REGEX: canNotFindRegEx(pci, Cache, pattern); break;
+ case TASK: canNotFindTask(pci, Cache, pattern); break;
+ case FNMATCH: canNotFindFnmatch(pci, Cache, pattern); break;
+ case PACKAGENAME: canNotFindPackage(pci, Cache, pattern); break;
+ case STRING: canNotFindPackage(pci, Cache, pattern); break;
+ case UNKNOWN: break;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ }
}
- /*}}}*/
// canNotFindTask - handle the case no package is found for a task /*{{{*/
void CacheSetHelper::canNotFindTask(PackageContainerInterface * const /*pci*/, pkgCacheFile &/*Cache*/, std::string pattern) {
if (ShowError == true)
@@ -608,17 +639,50 @@ void CacheSetHelper::canNotFindRegEx(PackageContainerInterface * const /*pci*/,
if (ShowError == true)
_error->Insert(ErrorType, _("Couldn't find any package by regex '%s'"), pattern.c_str());
}
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
+ /*}}}*/
// canNotFindFnmatch - handle the case no package is found by a fnmatch /*{{{*/
-void CacheSetHelper::canNotFindFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern) {
+ void CacheSetHelper::canNotFindFnmatch(PackageContainerInterface * const /*pci*/, pkgCacheFile &/*Cache*/, std::string pattern) {
if (ShowError == true)
_error->Insert(ErrorType, _("Couldn't find any package by glob '%s'"), pattern.c_str());
}
-#endif /*}}}*/
+ /*}}}*/
// canNotFindPackage - handle the case no package is found from a string/*{{{*/
APT_CONST void CacheSetHelper::canNotFindPackage(PackageContainerInterface * const /*pci*/, pkgCacheFile &/*Cache*/, std::string const &/*str*/) {
}
/*}}}*/
+ /*}}}*/
+// canNotFindPkgName - handle the case no package has this name /*{{{*/
+pkgCache::PkgIterator CacheSetHelper::canNotFindPkgName(pkgCacheFile &Cache,
+ std::string const &str) {
+ if (ShowError == true)
+ _error->Insert(ErrorType, _("Unable to locate package %s"), str.c_str());
+ return pkgCache::PkgIterator(Cache, 0);
+}
+ /*}}}*/
+// canNotFindVersion - for package by selector /*{{{*/
+void CacheSetHelper::canNotFindVersion(enum VerSelector const select, VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg)
+{
+ switch (select) {
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ case ALL: canNotFindAllVer(vci, Cache, Pkg); break;
+ case INSTCAND: canNotFindInstCandVer(vci, Cache, Pkg); break;
+ case CANDINST: canNotFindCandInstVer(vci, Cache, Pkg); break;
+ case NEWEST: canNotFindNewestVer(Cache, Pkg); break;
+ case CANDIDATE: canNotFindCandidateVer(Cache, Pkg); break;
+ case INSTALLED: canNotFindInstalledVer(Cache, Pkg); break;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ case CANDANDINST: canNotGetCandInstVer(Cache, Pkg); break;
+ case RELEASE:
+ case VERSIONNUMBER:
+ // invalid in this branch
+ break;
+ }
+}
// canNotFindAllVer /*{{{*/
void CacheSetHelper::canNotFindAllVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &/*Cache*/,
pkgCache::PkgIterator const &Pkg) {
@@ -627,19 +691,42 @@ void CacheSetHelper::canNotFindAllVer(VersionContainerInterface * const /*vci*/,
}
/*}}}*/
// canNotFindInstCandVer /*{{{*/
-void CacheSetHelper::canNotFindInstCandVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &/*Cache*/,
+void CacheSetHelper::canNotFindInstCandVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg) {
- if (ShowError == true)
- _error->Insert(ErrorType, _("Can't select installed nor candidate version from package '%s' as it has neither of them"), Pkg.FullName(true).c_str());
+ canNotGetInstCandVer(Cache, Pkg);
}
/*}}}*/
// canNotFindInstCandVer /*{{{*/
-void CacheSetHelper::canNotFindCandInstVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &/*Cache*/,
+void CacheSetHelper::canNotFindCandInstVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg) {
- if (ShowError == true)
- _error->Insert(ErrorType, _("Can't select installed nor candidate version from package '%s' as it has neither of them"), Pkg.FullName(true).c_str());
+ canNotGetCandInstVer(Cache, Pkg);
}
/*}}}*/
+ /*}}}*/
+// canNotGetVersion - for package by selector /*{{{*/
+pkgCache::VerIterator CacheSetHelper::canNotGetVersion(enum VerSelector const select, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) {
+ switch (select) {
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ case NEWEST: return canNotFindNewestVer(Cache, Pkg);
+ case CANDIDATE: return canNotFindCandidateVer(Cache, Pkg);
+ case INSTALLED: return canNotFindInstalledVer(Cache, Pkg);
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ case CANDINST: return canNotGetCandInstVer(Cache, Pkg);
+ case INSTCAND: return canNotGetInstCandVer(Cache, Pkg);
+ case ALL:
+ case CANDANDINST:
+ case RELEASE:
+ case VERSIONNUMBER:
+ // invalid in this branch
+ return pkgCache::VerIterator(Cache, 0);
+ }
+ return pkgCache::VerIterator(Cache, 0);
+}
// canNotFindNewestVer /*{{{*/
pkgCache::VerIterator CacheSetHelper::canNotFindNewestVer(pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg) {
@@ -664,6 +751,42 @@ pkgCache::VerIterator CacheSetHelper::canNotFindInstalledVer(pkgCacheFile &Cache
return pkgCache::VerIterator(Cache, 0);
}
/*}}}*/
+// canNotFindInstCandVer /*{{{*/
+pkgCache::VerIterator CacheSetHelper::canNotGetInstCandVer(pkgCacheFile &Cache,
+ pkgCache::PkgIterator const &Pkg) {
+ if (ShowError == true)
+ _error->Insert(ErrorType, _("Can't select installed nor candidate version from package '%s' as it has neither of them"), Pkg.FullName(true).c_str());
+ return pkgCache::VerIterator(Cache, 0);
+}
+ /*}}}*/
+// canNotFindInstCandVer /*{{{*/
+pkgCache::VerIterator CacheSetHelper::canNotGetCandInstVer(pkgCacheFile &Cache,
+ pkgCache::PkgIterator const &Pkg) {
+ if (ShowError == true)
+ _error->Insert(ErrorType, _("Can't select installed nor candidate version from package '%s' as it has neither of them"), Pkg.FullName(true).c_str());
+ return pkgCache::VerIterator(Cache, 0);
+}
+ /*}}}*/
+ /*}}}*/
+// showPackageSelection - by selector and given pattern /*{{{*/
+APT_CONST void CacheSetHelper::showPackageSelection(pkgCache::PkgIterator const &pkg, enum PkgSelector const select,
+ std::string const &pattern) {
+ switch (select) {
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ case REGEX: showRegExSelection(pkg, pattern); break;
+ case TASK: showTaskSelection(pkg, pattern); break;
+ case FNMATCH: showFnmatchSelection(pkg, pattern); break;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ case PACKAGENAME: /* no suprises here */ break;
+ case STRING: /* handled by the special cases */ break;
+ case UNKNOWN: break;
+ }
+}
// showTaskSelection /*{{{*/
APT_CONST void CacheSetHelper::showTaskSelection(pkgCache::PkgIterator const &/*pkg*/,
std::string const &/*pattern*/) {
@@ -674,14 +797,41 @@ APT_CONST void CacheSetHelper::showRegExSelection(pkgCache::PkgIterator const &/
std::string const &/*pattern*/) {
}
/*}}}*/
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
// showFnmatchSelection /*{{{*/
-APT_CONST void CacheSetHelper::showFnmatchSelection(pkgCache::PkgIterator const &pkg,
- std::string const &pattern) {
+APT_CONST void CacheSetHelper::showFnmatchSelection(pkgCache::PkgIterator const &/*pkg*/,
+ std::string const &/*pattern*/) {
}
/*}}}*/
+ /*}}}*/
+// showVersionSelection /*{{{*/
+APT_CONST void CacheSetHelper::showVersionSelection(pkgCache::PkgIterator const &Pkg,
+ pkgCache::VerIterator const &Ver, enum VerSelector const select, std::string const &pattern) {
+ switch (select) {
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#endif
-// showSelectedVersion /*{{{*/
+ case RELEASE:
+ showSelectedVersion(Pkg, Ver, pattern, true);
+ break;
+ case VERSIONNUMBER:
+ showSelectedVersion(Pkg, Ver, pattern, false);
+ break;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ case NEWEST:
+ case CANDIDATE:
+ case INSTALLED:
+ case CANDINST:
+ case INSTCAND:
+ case ALL:
+ case CANDANDINST:
+ // not really suprises, but in fact: just not implemented
+ break;
+ }
+}
APT_CONST void CacheSetHelper::showSelectedVersion(pkgCache::PkgIterator const &/*Pkg*/,
pkgCache::VerIterator const /*Ver*/,
std::string const &/*ver*/,
diff --git a/apt-pkg/cacheset.h b/apt-pkg/cacheset.h
index 16a3daa42..f3f1d1fc6 100644
--- a/apt-pkg/cacheset.h
+++ b/apt-pkg/cacheset.h
@@ -13,8 +13,10 @@
#include <map>
#include <set>
#include <list>
+#include <vector>
#include <string>
#include <iterator>
+#include <algorithm>
#include <stddef.h>
@@ -51,36 +53,127 @@ public: /*{{{*/
ShowError(ShowError), ErrorType(ErrorType) {}
virtual ~CacheSetHelper() {}
- virtual void showTaskSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern);
- virtual void showRegExSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern);
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
- virtual void showFnmatchSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern);
-#endif
- virtual void showSelectedVersion(pkgCache::PkgIterator const &Pkg, pkgCache::VerIterator const Ver,
- std::string const &ver, bool const verIsRel);
+ enum PkgSelector { UNKNOWN, REGEX, TASK, FNMATCH, PACKAGENAME, STRING };
- virtual void canNotFindTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
- virtual void canNotFindRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
- virtual void canNotFindFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
-#endif
- virtual void canNotFindPackage(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str);
+ virtual bool PackageFrom(enum PkgSelector const select, PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern);
+
+ virtual bool PackageFromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline);
+
+ struct PkgModifier {
+ enum Position { NONE, PREFIX, POSTFIX };
+ unsigned short ID;
+ const char * const Alias;
+ Position Pos;
+ PkgModifier (unsigned short const &id, const char * const alias, Position const &pos) : ID(id), Alias(alias), Pos(pos) {}
+ };
+ virtual bool PackageFromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci,
+ pkgCacheFile &Cache, const char * cmdline,
+ std::list<PkgModifier> const &mods);
+
+ // use PackageFrom(PACKAGENAME, …) instead
+ APT_DEPRECATED pkgCache::PkgIterator PackageFromName(pkgCacheFile &Cache, std::string const &pattern);
+
+ /** \brief be notified about the package being selected via pattern
+ *
+ * Main use is probably to show a message to the user what happened
+ *
+ * \param pkg is the package which was selected
+ * \param select is the selection method which choose the package
+ * \param pattern is the string used by the selection method to pick the package
+ */
+ virtual void showPackageSelection(pkgCache::PkgIterator const &pkg, PkgSelector const select, std::string const &pattern);
+ // use the method above instead, react only on the type you need and let the base handle the rest if need be
+ // this allows use to add new selection methods without breaking the ABI constantly with new virtual methods
+ APT_DEPRECATED virtual void showTaskSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern);
+ APT_DEPRECATED virtual void showRegExSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern);
+ APT_DEPRECATED virtual void showFnmatchSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern);
+
+ /** \brief be notified if a package can't be found via pattern
+ *
+ * Can be used to show a message as well as to try something else to make it match
+ *
+ * \param select is the method tried for selection
+ * \param pci is the container the package should be inserted in
+ * \param Cache is the package universe available
+ * \param pattern is the string not matching anything
+ */
+ virtual void canNotFindPackage(enum PkgSelector const select, PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern);
+ // same as above for showPackageSelection
+ APT_DEPRECATED virtual void canNotFindTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
+ APT_DEPRECATED virtual void canNotFindRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
+ APT_DEPRECATED virtual void canNotFindFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
+ APT_DEPRECATED virtual void canNotFindPackage(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str);
+
+ /** \brief specifies which version(s) we want to refer to */
+ enum VerSelector {
+ /** by release string */
+ RELEASE,
+ /** by version number string */
+ VERSIONNUMBER,
+ /** All versions */
+ ALL,
+ /** Candidate and installed version */
+ CANDANDINST,
+ /** Candidate version */
+ CANDIDATE,
+ /** Installed version */
+ INSTALLED,
+ /** Candidate or if non installed version */
+ CANDINST,
+ /** Installed or if non candidate version */
+ INSTCAND,
+ /** Newest version */
+ NEWEST
+ };
+
+ /** \brief be notified about the version being selected via pattern
+ *
+ * Main use is probably to show a message to the user what happened
+ * Note that at the moment this method is only called for RELEASE
+ * and VERSION selections, not for the others.
+ *
+ * \param Pkg is the package which was selected for
+ * \param Ver is the version selected
+ * \param select is the selection method which choose the version
+ * \param pattern is the string used by the selection method to pick the version
+ */
+ virtual void showVersionSelection(pkgCache::PkgIterator const &Pkg, pkgCache::VerIterator const &Ver,
+ enum VerSelector const select, std::string const &pattern);
+ // renamed to have a similar interface to showPackageSelection
+ APT_DEPRECATED virtual void showSelectedVersion(pkgCache::PkgIterator const &Pkg, pkgCache::VerIterator const Ver,
+ std::string const &ver, bool const verIsRel);
- virtual void canNotFindAllVer(VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg);
- virtual void canNotFindInstCandVer(VersionContainerInterface * const vci, pkgCacheFile &Cache,
+ /** \brief be notified if a version can't be found for a package
+ *
+ * Main use is probably to show a message to the user what happened
+ *
+ * \param select is the method tried for selection
+ * \param vci is the container the version should be inserted in
+ * \param Cache is the package universe available
+ * \param Pkg is the package we wanted a version from
+ */
+ virtual void canNotFindVersion(enum VerSelector const select, VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg);
+ // same as above for showPackageSelection
+ APT_DEPRECATED virtual void canNotFindAllVer(VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg);
+ APT_DEPRECATED virtual void canNotFindInstCandVer(VersionContainerInterface * const vci, pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg);
- virtual void canNotFindCandInstVer(VersionContainerInterface * const vci,
+ APT_DEPRECATED virtual void canNotFindCandInstVer(VersionContainerInterface * const vci,
pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg);
- virtual pkgCache::PkgIterator canNotFindPkgName(pkgCacheFile &Cache, std::string const &str);
- virtual pkgCache::VerIterator canNotFindNewestVer(pkgCacheFile &Cache,
+ // the difference between canNotFind and canNotGet is that the later is more low-level
+ // and called from other places: In this case looking into the code is the only real answer…
+ virtual pkgCache::VerIterator canNotGetVersion(enum VerSelector const select, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg);
+ // same as above for showPackageSelection
+ APT_DEPRECATED virtual pkgCache::VerIterator canNotFindNewestVer(pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg);
- virtual pkgCache::VerIterator canNotFindCandidateVer(pkgCacheFile &Cache,
+ APT_DEPRECATED virtual pkgCache::VerIterator canNotFindCandidateVer(pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg);
- virtual pkgCache::VerIterator canNotFindInstalledVer(pkgCacheFile &Cache,
+ APT_DEPRECATED virtual pkgCache::VerIterator canNotFindInstalledVer(pkgCacheFile &Cache,
pkgCache::PkgIterator const &Pkg);
+ virtual pkgCache::PkgIterator canNotFindPkgName(pkgCacheFile &Cache, std::string const &str);
+
bool showErrors() const { return ShowError; }
bool showErrors(bool const newValue) { if (ShowError == newValue) return ShowError; else return ((ShowError = newValue) == false); }
GlobalError::MsgType errorType() const { return ErrorType; }
@@ -98,7 +191,19 @@ public: /*{{{*/
protected:
bool ShowError;
GlobalError::MsgType ErrorType;
+
+ pkgCache::VerIterator canNotGetInstCandVer(pkgCacheFile &Cache,
+ pkgCache::PkgIterator const &Pkg);
+ pkgCache::VerIterator canNotGetCandInstVer(pkgCacheFile &Cache,
+ pkgCache::PkgIterator const &Pkg);
+
+ bool PackageFromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
+ bool PackageFromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
+ bool PackageFromFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
+ bool PackageFromPackageName(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern);
+ bool PackageFromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern);
}; /*}}}*/
+
class PackageContainerInterface { /*{{{*/
/** \class PackageContainerInterface
@@ -118,7 +223,16 @@ public:
inline const char *Name() const {return getPkg().Name(); }
inline std::string FullName(bool const Pretty) const { return getPkg().FullName(Pretty); }
inline std::string FullName() const { return getPkg().FullName(); }
- inline const char *Section() const {return getPkg().Section(); }
+ APT_DEPRECATED inline const char *Section() const {
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ return getPkg().Section();
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ }
inline bool Purge() const {return getPkg().Purge(); }
inline const char *Arch() const {return getPkg().Arch(); }
inline pkgCache::GrpIterator Group() const { return getPkg().Group(); }
@@ -142,29 +256,56 @@ public:
virtual bool empty() const = 0;
virtual void clear() = 0;
- enum Constructor { UNKNOWN, REGEX, TASK, FNMATCH };
- virtual void setConstructor(Constructor const &con) = 0;
- virtual Constructor getConstructor() const = 0;
-
- static bool FromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
- static bool FromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
- static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper);
- static bool FromFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
- static bool FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper);
- static bool FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper);
- static bool FromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper);
+ // FIXME: This is a bloody hack removed soon. Use CacheSetHelper::PkgSelector !
+ enum APT_DEPRECATED Constructor { UNKNOWN = CacheSetHelper::UNKNOWN,
+ REGEX = CacheSetHelper::REGEX,
+ TASK = CacheSetHelper::TASK,
+ FNMATCH = CacheSetHelper::FNMATCH };
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ void setConstructor(Constructor const by) { ConstructedBy = (CacheSetHelper::PkgSelector)by; }
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
- struct Modifier {
- enum Position { NONE, PREFIX, POSTFIX };
- unsigned short ID;
- const char * const Alias;
- Position Pos;
- Modifier (unsigned short const &id, const char * const alias, Position const &pos) : ID(id), Alias(alias), Pos(pos) {}
- };
+ void setConstructor(CacheSetHelper::PkgSelector const by) { ConstructedBy = by; }
+ CacheSetHelper::PkgSelector getConstructor() const { return ConstructedBy; }
+ PackageContainerInterface() : ConstructedBy(CacheSetHelper::UNKNOWN) {}
+ PackageContainerInterface(CacheSetHelper::PkgSelector const by) : ConstructedBy(by) {}
+
+ APT_DEPRECATED static bool FromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) {
+ return helper.PackageFrom(CacheSetHelper::TASK, pci, Cache, pattern); }
+ APT_DEPRECATED static bool FromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) {
+ return helper.PackageFrom(CacheSetHelper::REGEX, pci, Cache, pattern); }
+ APT_DEPRECATED static bool FromFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) {
+ return helper.PackageFrom(CacheSetHelper::FNMATCH, pci, Cache, pattern); }
+ APT_DEPRECATED static bool FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) {
+ return helper.PackageFrom(CacheSetHelper::PACKAGENAME, pci, Cache, pattern); }
+ APT_DEPRECATED static bool FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) {
+ return helper.PackageFrom(CacheSetHelper::STRING, pci, Cache, pattern); }
+ APT_DEPRECATED static bool FromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper) {
+ return helper.PackageFromCommandLine(pci, Cache, cmdline); }
+
+ APT_DEPRECATED typedef CacheSetHelper::PkgModifier Modifier;
+
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ APT_DEPRECATED static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) {
+ return helper.PackageFromName(Cache, pattern); }
+ APT_DEPRECATED static bool FromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci,
+ pkgCacheFile &Cache, const char * cmdline,
+ std::list<Modifier> const &mods, CacheSetHelper &helper) {
+ return helper.PackageFromModifierCommandLine(modID, pci, Cache, cmdline, mods); }
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
- static bool FromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci,
- pkgCacheFile &Cache, const char * cmdline,
- std::list<Modifier> const &mods, CacheSetHelper &helper);
+private:
+ CacheSetHelper::PkgSelector ConstructedBy;
};
/*}}}*/
template<class Container> class PackageContainer : public PackageContainerInterface {/*{{{*/
@@ -228,11 +369,28 @@ public: /*{{{*/
iterator end() { return iterator(_cont.end()); }
const_iterator find(pkgCache::PkgIterator const &P) const { return const_iterator(_cont.find(P)); }
- void setConstructor(Constructor const &by) { ConstructedBy = by; }
- Constructor getConstructor() const { return ConstructedBy; }
+ PackageContainer() : PackageContainerInterface() {}
+ PackageContainer(CacheSetHelper::PkgSelector const &by) : PackageContainerInterface(by) {}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ APT_DEPRECATED PackageContainer(Constructor const &by) : PackageContainerInterface((CacheSetHelper::PkgSelector)by) {}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+
+ /** \brief sort all included versions with given comparer
- PackageContainer() : ConstructedBy(UNKNOWN) {}
- PackageContainer(Constructor const &by) : ConstructedBy(by) {}
+ Some containers are sorted by default, some are not and can't be,
+ but a few like std::vector can be sorted if need be, so this can be
+ specialized in later on. The default is that this will fail though.
+ Specifically, already sorted containers like std::set will return
+ false as well as there is no easy way to check that the given comparer
+ would sort in the same way the set is currently sorted
+
+ \return \b true if the set was sorted, \b false if not. */
+ template<class Compare> bool sort(Compare /*Comp*/) { return false; }
/** \brief returns all packages in the cache who belong to the given task
@@ -243,8 +401,8 @@ public: /*{{{*/
\param pattern name of the task
\param helper responsible for error and message handling */
static PackageContainer FromTask(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) {
- PackageContainer cont(TASK);
- PackageContainerInterface::FromTask(&cont, Cache, pattern, helper);
+ PackageContainer cont(CacheSetHelper::TASK);
+ helper.PackageFrom(CacheSetHelper::TASK, &cont, Cache, pattern);
return cont;
}
static PackageContainer FromTask(pkgCacheFile &Cache, std::string const &pattern) {
@@ -260,9 +418,9 @@ public: /*{{{*/
\param Cache the packages are in
\param pattern regular expression for package names
\param helper responsible for error and message handling */
- static PackageContainer FromRegEx(pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) {
- PackageContainer cont(REGEX);
- PackageContainerInterface::FromRegEx(&cont, Cache, pattern, helper);
+ static PackageContainer FromRegEx(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) {
+ PackageContainer cont(CacheSetHelper::REGEX);
+ helper.PackageFrom(CacheSetHelper::REGEX, &cont, Cache, pattern);
return cont;
}
@@ -271,9 +429,9 @@ public: /*{{{*/
return FromRegEx(Cache, pattern, helper);
}
- static PackageContainer FromFnmatch(pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) {
- PackageContainer cont(FNMATCH);
- PackageContainerInterface::FromFnmatch(&cont, Cache, pattern, helper);
+ static PackageContainer FromFnmatch(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) {
+ PackageContainer cont(CacheSetHelper::FNMATCH);
+ helper.PackageFrom(CacheSetHelper::FNMATCH, &cont, Cache, pattern);
return cont;
}
static PackageContainer FromFnMatch(pkgCacheFile &Cache, std::string const &pattern) {
@@ -281,18 +439,25 @@ public: /*{{{*/
return FromFnmatch(Cache, pattern, helper);
}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
/** \brief returns a package specified by a string
\param Cache the package is in
\param pattern String the package name should be extracted from
\param helper responsible for error and message handling */
- static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) {
- return PackageContainerInterface::FromName(Cache, pattern, helper);
+ APT_DEPRECATED static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) {
+ return helper.PackageFromName(Cache, pattern);
}
- static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern) {
+ APT_DEPRECATED static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern) {
CacheSetHelper helper;
- return PackageContainerInterface::FromName(Cache, pattern, helper);
+ return FromName(Cache, pattern, helper);
}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
/** \brief returns all packages specified by a string
@@ -301,7 +466,7 @@ public: /*{{{*/
\param helper responsible for error and message handling */
static PackageContainer FromString(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) {
PackageContainer cont;
- PackageContainerInterface::FromString(&cont, Cache, pattern, helper);
+ helper.PackageFrom(CacheSetHelper::PACKAGENAME, &cont, Cache, pattern);
return cont;
}
static PackageContainer FromString(pkgCacheFile &Cache, std::string const &pattern) {
@@ -318,7 +483,7 @@ public: /*{{{*/
\param helper responsible for error and message handling */
static PackageContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper) {
PackageContainer cont;
- PackageContainerInterface::FromCommandLine(&cont, Cache, cmdline, helper);
+ helper.PackageFromCommandLine(&cont, Cache, cmdline);
return cont;
}
static PackageContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline) {
@@ -340,14 +505,14 @@ public: /*{{{*/
static std::map<unsigned short, PackageContainer> GroupedFromCommandLine(
pkgCacheFile &Cache,
const char **cmdline,
- std::list<Modifier> const &mods,
+ std::list<CacheSetHelper::PkgModifier> const &mods,
unsigned short const &fallback,
CacheSetHelper &helper) {
std::map<unsigned short, PackageContainer> pkgsets;
for (const char **I = cmdline; *I != 0; ++I) {
unsigned short modID = fallback;
PackageContainer pkgset;
- PackageContainerInterface::FromModifierCommandLine(modID, &pkgset, Cache, *I, mods, helper);
+ helper.PackageFromModifierCommandLine(modID, &pkgset, Cache, *I, mods);
pkgsets[modID].insert(pkgset);
}
return pkgsets;
@@ -355,22 +520,23 @@ public: /*{{{*/
static std::map<unsigned short, PackageContainer> GroupedFromCommandLine(
pkgCacheFile &Cache,
const char **cmdline,
- std::list<Modifier> const &mods,
+ std::list<CacheSetHelper::PkgModifier> const &mods,
unsigned short const &fallback) {
CacheSetHelper helper;
return GroupedFromCommandLine(Cache, cmdline,
mods, fallback, helper);
}
/*}}}*/
-private: /*{{{*/
- Constructor ConstructedBy;
- /*}}}*/
}; /*}}}*/
-
+// specialisations for push_back containers: std::list & std::vector /*{{{*/
template<> template<class Cont> void PackageContainer<std::list<pkgCache::PkgIterator> >::insert(PackageContainer<Cont> const &pkgcont) {
for (typename PackageContainer<Cont>::const_iterator p = pkgcont.begin(); p != pkgcont.end(); ++p)
_cont.push_back(*p);
}
+template<> template<class Cont> void PackageContainer<std::vector<pkgCache::PkgIterator> >::insert(PackageContainer<Cont> const &pkgcont) {
+ for (typename PackageContainer<Cont>::const_iterator p = pkgcont.begin(); p != pkgcont.end(); ++p)
+ _cont.push_back(*p);
+}
// these two are 'inline' as otherwise the linker has problems with seeing these untemplated
// specializations again and again - but we need to see them, so that library users can use them
template<> inline bool PackageContainer<std::list<pkgCache::PkgIterator> >::insert(pkgCache::PkgIterator const &P) {
@@ -379,12 +545,65 @@ template<> inline bool PackageContainer<std::list<pkgCache::PkgIterator> >::inse
_cont.push_back(P);
return true;
}
+template<> inline bool PackageContainer<std::vector<pkgCache::PkgIterator> >::insert(pkgCache::PkgIterator const &P) {
+ if (P.end() == true)
+ return false;
+ _cont.push_back(P);
+ return true;
+}
template<> inline void PackageContainer<std::list<pkgCache::PkgIterator> >::insert(const_iterator begin, const_iterator end) {
for (const_iterator p = begin; p != end; ++p)
_cont.push_back(*p);
}
+template<> inline void PackageContainer<std::vector<pkgCache::PkgIterator> >::insert(const_iterator begin, const_iterator end) {
+ for (const_iterator p = begin; p != end; ++p)
+ _cont.push_back(*p);
+}
+ /*}}}*/
+
+template<> template<class Compare> inline bool PackageContainer<std::vector<pkgCache::PkgIterator> >::sort(Compare Comp) {
+ std::sort(_cont.begin(), _cont.end(), Comp);
+ return true;
+}
+
+// class PackageUniverse - pkgCache as PackageContainerInterface /*{{{*/
+/** \class PackageUniverse
+
+ Wraps around our usual pkgCache, so that it can be stuffed into methods
+ expecting a PackageContainer.
+
+ The wrapping is read-only in practice modeled by making erase and co
+ private methods. */
+class APT_HIDDEN PackageUniverse : public PackageContainerInterface {
+ pkgCache * const _cont;
+public:
+ typedef pkgCache::PkgIterator iterator;
+ typedef pkgCache::PkgIterator const_iterator;
+
+ APT_PUBLIC bool empty() const { return false; }
+ APT_PUBLIC size_t size() const { return _cont->Head().PackageCount; }
+
+ APT_PUBLIC const_iterator begin() const { return _cont->PkgBegin(); }
+ APT_PUBLIC const_iterator end() const { return _cont->PkgEnd(); }
+ APT_PUBLIC iterator begin() { return _cont->PkgBegin(); }
+ APT_PUBLIC iterator end() { return _cont->PkgEnd(); }
+
+ APT_PUBLIC PackageUniverse(pkgCache * const Owner) : _cont(Owner) { }
+
+private:
+ bool insert(pkgCache::PkgIterator const &) { return true; }
+ template<class Cont> void insert(PackageContainer<Cont> const &) { }
+ void insert(const_iterator, const_iterator) { }
+
+ void clear() { }
+ iterator& erase(iterator &iter) { return iter; }
+ size_t erase(const pkgCache::PkgIterator) { return 0; }
+ void erase(iterator, iterator) { }
+};
+ /*}}}*/
typedef PackageContainer<std::set<pkgCache::PkgIterator> > PackageSet;
typedef PackageContainer<std::list<pkgCache::PkgIterator> > PackageList;
+typedef PackageContainer<std::vector<pkgCache::PkgIterator> > PackageVector;
class VersionContainerInterface { /*{{{*/
/** \class APT::VersionContainerInterface
@@ -426,45 +645,83 @@ public:
virtual void clear() = 0;
/** \brief specifies which version(s) will be returned if non is given */
- enum Version {
- /** All versions */
- ALL,
- /** Candidate and installed version */
- CANDANDINST,
- /** Candidate version */
- CANDIDATE,
- /** Installed version */
- INSTALLED,
- /** Candidate or if non installed version */
- CANDINST,
- /** Installed or if non candidate version */
- INSTCAND,
- /** Newest version */
- NEWEST
+ enum APT_DEPRECATED Version {
+ ALL = CacheSetHelper::ALL,
+ CANDANDINST = CacheSetHelper::CANDANDINST,
+ CANDIDATE = CacheSetHelper::CANDIDATE,
+ INSTALLED = CacheSetHelper::INSTALLED,
+ CANDINST = CacheSetHelper::CANDINST,
+ INSTCAND = CacheSetHelper::INSTCAND,
+ NEWEST = CacheSetHelper::NEWEST
};
struct Modifier {
- enum Position { NONE, PREFIX, POSTFIX };
- unsigned short ID;
+ unsigned short const ID;
const char * const Alias;
- Position Pos;
- Version SelectVersion;
+ enum Position { NONE, PREFIX, POSTFIX } const Pos;
+ enum CacheSetHelper::VerSelector const SelectVersion;
Modifier (unsigned short const &id, const char * const alias, Position const &pos,
- Version const &select) : ID(id), Alias(alias), Pos(pos),
+ enum CacheSetHelper::VerSelector const select) : ID(id), Alias(alias), Pos(pos),
SelectVersion(select) {}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ APT_DEPRECATED Modifier(unsigned short const &id, const char * const alias, Position const &pos,
+ Version const &select) : ID(id), Alias(alias), Pos(pos),
+ SelectVersion((CacheSetHelper::VerSelector)select) {}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
};
static bool FromCommandLine(VersionContainerInterface * const vci, pkgCacheFile &Cache,
- const char **cmdline, Version const &fallback,
+ const char **cmdline, CacheSetHelper::VerSelector const fallback,
CacheSetHelper &helper);
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ APT_DEPRECATED static bool FromCommandLine(VersionContainerInterface * const vci, pkgCacheFile &Cache,
+ const char **cmdline, Version const &fallback,
+ CacheSetHelper &helper) {
+ return FromCommandLine(vci, Cache, cmdline, (CacheSetHelper::VerSelector)fallback, helper);
+ }
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
static bool FromString(VersionContainerInterface * const vci, pkgCacheFile &Cache,
- std::string pkg, Version const &fallback, CacheSetHelper &helper,
+ std::string pkg, CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper,
bool const onlyFromName = false);
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ APT_DEPRECATED static bool FromString(VersionContainerInterface * const vci, pkgCacheFile &Cache,
+ std::string pkg, Version const &fallback, CacheSetHelper &helper,
+ bool const onlyFromName = false) {
+ return FromString(vci, Cache, pkg, (CacheSetHelper::VerSelector)fallback, helper, onlyFromName);
+ }
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
static bool FromPackage(VersionContainerInterface * const vci, pkgCacheFile &Cache,
- pkgCache::PkgIterator const &P, Version const &fallback,
+ pkgCache::PkgIterator const &P, CacheSetHelper::VerSelector const fallback,
CacheSetHelper &helper);
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ APT_DEPRECATED static bool FromPackage(VersionContainerInterface * const vci, pkgCacheFile &Cache,
+ pkgCache::PkgIterator const &P, Version const &fallback,
+ CacheSetHelper &helper) {
+ return FromPackage(vci, Cache, P, (CacheSetHelper::VerSelector)fallback, helper);
+ }
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
static bool FromModifierCommandLine(unsigned short &modID,
VersionContainerInterface * const vci,
@@ -476,8 +733,22 @@ public:
static bool FromDependency(VersionContainerInterface * const vci,
pkgCacheFile &Cache,
pkgCache::DepIterator const &D,
- Version const &selector,
+ CacheSetHelper::VerSelector const selector,
CacheSetHelper &helper);
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ APT_DEPRECATED static bool FromDependency(VersionContainerInterface * const vci,
+ pkgCacheFile &Cache,
+ pkgCache::DepIterator const &D,
+ Version const &selector,
+ CacheSetHelper &helper) {
+ return FromDependency(vci, Cache, D, (CacheSetHelper::VerSelector)selector, helper);
+ }
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
protected: /*{{{*/
@@ -559,6 +830,18 @@ public: /*{{{*/
iterator end() { return iterator(_cont.end()); }
const_iterator find(pkgCache::VerIterator const &V) const { return const_iterator(_cont.find(V)); }
+ /** \brief sort all included versions with given comparer
+
+ Some containers are sorted by default, some are not and can't be,
+ but a few like std::vector can be sorted if need be, so this can be
+ specialized in later on. The default is that this will fail though.
+ Specifically, already sorted containers like std::set will return
+ false as well as there is no easy way to check that the given comparer
+ would sort in the same way the set is currently sorted
+
+ \return \b true if the set was sorted, \b false if not. */
+ template<class Compare> bool sort(Compare /*Comp*/) { return false; }
+
/** \brief returns all versions specified on the commandline
Get all versions from the commandline, uses given default version if
@@ -568,35 +851,64 @@ public: /*{{{*/
\param fallback version specification
\param helper responsible for error and message handling */
static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline,
- Version const &fallback, CacheSetHelper &helper) {
+ CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper) {
VersionContainer vercon;
VersionContainerInterface::FromCommandLine(&vercon, Cache, cmdline, fallback, helper);
return vercon;
}
static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline,
- Version const &fallback) {
+ CacheSetHelper::VerSelector const fallback) {
CacheSetHelper helper;
return FromCommandLine(Cache, cmdline, fallback, helper);
}
static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline) {
- return FromCommandLine(Cache, cmdline, CANDINST);
+ return FromCommandLine(Cache, cmdline, CacheSetHelper::CANDINST);
}
-
static VersionContainer FromString(pkgCacheFile &Cache, std::string const &pkg,
- Version const &fallback, CacheSetHelper &helper,
- bool const onlyFromName = false) {
+ CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper,
+ bool const /*onlyFromName = false*/) {
VersionContainer vercon;
VersionContainerInterface::FromString(&vercon, Cache, pkg, fallback, helper);
return vercon;
}
static VersionContainer FromString(pkgCacheFile &Cache, std::string pkg,
- Version const &fallback) {
+ CacheSetHelper::VerSelector const fallback) {
CacheSetHelper helper;
return FromString(Cache, pkg, fallback, helper);
}
static VersionContainer FromString(pkgCacheFile &Cache, std::string pkg) {
- return FromString(Cache, pkg, CANDINST);
+ return FromString(Cache, pkg, CacheSetHelper::CANDINST);
}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline,
+ Version const &fallback, CacheSetHelper &helper) {
+ VersionContainer vercon;
+ VersionContainerInterface::FromCommandLine(&vercon, Cache, cmdline, (CacheSetHelper::VerSelector)fallback, helper);
+ return vercon;
+ }
+ static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline,
+ Version const &fallback) {
+ CacheSetHelper helper;
+ return FromCommandLine(Cache, cmdline, (CacheSetHelper::VerSelector)fallback, helper);
+ }
+ static VersionContainer FromString(pkgCacheFile &Cache, std::string const &pkg,
+ Version const &fallback, CacheSetHelper &helper,
+ bool const /*onlyFromName = false*/) {
+ VersionContainer vercon;
+ VersionContainerInterface::FromString(&vercon, Cache, pkg, (CacheSetHelper::VerSelector)fallback, helper);
+ return vercon;
+ }
+ static VersionContainer FromString(pkgCacheFile &Cache, std::string pkg,
+ Version const &fallback) {
+ CacheSetHelper helper;
+ return FromString(Cache, pkg, (CacheSetHelper::VerSelector)fallback, helper);
+ }
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
/** \brief returns all versions specified for the package
@@ -605,18 +917,36 @@ public: /*{{{*/
\param fallback the version(s) you want to get
\param helper the helper used for display and error handling */
static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P,
- Version const &fallback, CacheSetHelper &helper) {
+ CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper) {
VersionContainer vercon;
VersionContainerInterface::FromPackage(&vercon, Cache, P, fallback, helper);
return vercon;
}
static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P,
- Version const &fallback) {
+ CacheSetHelper::VerSelector const fallback) {
CacheSetHelper helper;
return FromPackage(Cache, P, fallback, helper);
}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P,
+ Version const &fallback, CacheSetHelper &helper) {
+ VersionContainer vercon;
+ VersionContainerInterface::FromPackage(&vercon, Cache, P, (CacheSetHelper::VerSelector)fallback, helper);
+ return vercon;
+ }
+ static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P,
+ Version const &fallback) {
+ CacheSetHelper helper;
+ return FromPackage(Cache, P, (CacheSetHelper::VerSelector)fallback, helper);
+ }
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P) {
- return FromPackage(Cache, P, CANDIDATE);
+ return FromPackage(Cache, P, CacheSetHelper::CANDIDATE);
}
static std::map<unsigned short, VersionContainer> GroupedFromCommandLine(
@@ -645,26 +975,48 @@ public: /*{{{*/
}
static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D,
- Version const &selector, CacheSetHelper &helper) {
+ CacheSetHelper::VerSelector const selector, CacheSetHelper &helper) {
VersionContainer vercon;
VersionContainerInterface::FromDependency(&vercon, Cache, D, selector, helper);
return vercon;
}
static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D,
- Version const &selector) {
+ CacheSetHelper::VerSelector const selector) {
CacheSetHelper helper;
return FromPackage(Cache, D, selector, helper);
}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D,
+ Version const &selector, CacheSetHelper &helper) {
+ VersionContainer vercon;
+ VersionContainerInterface::FromDependency(&vercon, Cache, D, (CacheSetHelper::VerSelector)selector, helper);
+ return vercon;
+ }
+ static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D,
+ Version const &selector) {
+ CacheSetHelper helper;
+ return FromPackage(Cache, D, (CacheSetHelper::VerSelector)selector, helper);
+ }
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D) {
- return FromPackage(Cache, D, CANDIDATE);
+ return FromPackage(Cache, D, CacheSetHelper::CANDIDATE);
}
/*}}}*/
}; /*}}}*/
-
+// specialisations for push_back containers: std::list & std::vector /*{{{*/
template<> template<class Cont> void VersionContainer<std::list<pkgCache::VerIterator> >::insert(VersionContainer<Cont> const &vercont) {
for (typename VersionContainer<Cont>::const_iterator v = vercont.begin(); v != vercont.end(); ++v)
_cont.push_back(*v);
}
+template<> template<class Cont> void VersionContainer<std::vector<pkgCache::VerIterator> >::insert(VersionContainer<Cont> const &vercont) {
+ for (typename VersionContainer<Cont>::const_iterator v = vercont.begin(); v != vercont.end(); ++v)
+ _cont.push_back(*v);
+}
// these two are 'inline' as otherwise the linker has problems with seeing these untemplated
// specializations again and again - but we need to see them, so that library users can use them
template<> inline bool VersionContainer<std::list<pkgCache::VerIterator> >::insert(pkgCache::VerIterator const &V) {
@@ -673,11 +1025,29 @@ template<> inline bool VersionContainer<std::list<pkgCache::VerIterator> >::inse
_cont.push_back(V);
return true;
}
+template<> inline bool VersionContainer<std::vector<pkgCache::VerIterator> >::insert(pkgCache::VerIterator const &V) {
+ if (V.end() == true)
+ return false;
+ _cont.push_back(V);
+ return true;
+}
template<> inline void VersionContainer<std::list<pkgCache::VerIterator> >::insert(const_iterator begin, const_iterator end) {
for (const_iterator v = begin; v != end; ++v)
_cont.push_back(*v);
}
+template<> inline void VersionContainer<std::vector<pkgCache::VerIterator> >::insert(const_iterator begin, const_iterator end) {
+ for (const_iterator v = begin; v != end; ++v)
+ _cont.push_back(*v);
+}
+ /*}}}*/
+
+template<> template<class Compare> inline bool VersionContainer<std::vector<pkgCache::VerIterator> >::sort(Compare Comp) {
+ std::sort(_cont.begin(), _cont.end(), Comp);
+ return true;
+}
+
typedef VersionContainer<std::set<pkgCache::VerIterator> > VersionSet;
typedef VersionContainer<std::list<pkgCache::VerIterator> > VersionList;
+typedef VersionContainer<std::vector<pkgCache::VerIterator> > VersionVector;
}
#endif
diff --git a/apt-pkg/cdrom.cc b/apt-pkg/cdrom.cc
index a5ad6a9ff..b97f7b036 100644
--- a/apt-pkg/cdrom.cc
+++ b/apt-pkg/cdrom.cc
@@ -913,10 +913,14 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/
return true;
}
/*}}}*/
-pkgUdevCdromDevices::pkgUdevCdromDevices() /*{{{*/
- : libudev_handle(NULL)
+pkgUdevCdromDevices::pkgUdevCdromDevices() /*{{{*/
+: libudev_handle(NULL), udev_new(NULL), udev_enumerate_add_match_property(NULL),
+ udev_enumerate_scan_devices(NULL), udev_enumerate_get_list_entry(NULL),
+ udev_device_new_from_syspath(NULL), udev_enumerate_get_udev(NULL),
+ udev_list_entry_get_name(NULL), udev_device_get_devnode(NULL),
+ udev_enumerate_new(NULL), udev_list_entry_get_next(NULL),
+ udev_device_get_property_value(NULL), udev_enumerate_add_match_sysattr(NULL)
{
-
}
/*}}}*/
diff --git a/apt-pkg/clean.cc b/apt-pkg/clean.cc
index 37128e9aa..6edce5b6d 100644
--- a/apt-pkg/clean.cc
+++ b/apt-pkg/clean.cc
@@ -131,3 +131,5 @@ bool pkgArchiveCleaner::Go(std::string Dir,pkgCache &Cache)
return true;
}
/*}}}*/
+
+APT_CONST pkgArchiveCleaner::~pkgArchiveCleaner() {}
diff --git a/apt-pkg/clean.h b/apt-pkg/clean.h
index 930d54a7f..466cb67a9 100644
--- a/apt-pkg/clean.h
+++ b/apt-pkg/clean.h
@@ -24,13 +24,13 @@ class pkgArchiveCleaner
void *d;
protected:
-
+
virtual void Erase(const char * /*File*/,std::string /*Pkg*/,std::string /*Ver*/,struct stat & /*St*/) {};
- public:
-
+ public:
+
bool Go(std::string Dir,pkgCache &Cache);
- virtual ~pkgArchiveCleaner() {};
+ virtual ~pkgArchiveCleaner();
};
#endif
diff --git a/apt-pkg/contrib/cmndline.cc b/apt-pkg/contrib/cmndline.cc
index 3799c822d..93c1f4664 100644
--- a/apt-pkg/contrib/cmndline.cc
+++ b/apt-pkg/contrib/cmndline.cc
@@ -47,23 +47,26 @@ CommandLine::~CommandLine()
char const * CommandLine::GetCommand(Dispatch const * const Map,
unsigned int const argc, char const * const * const argv)
{
- // if there is a -- on the line there must be the word we search for around it
- // as -- marks the end of the options, just not sure if the command can be
- // considered an option or not, so accept both
+ // if there is a -- on the line there must be the word we search for either
+ // before it (as -- marks the end of the options) or right after it (as we can't
+ // decide if the command is actually an option, given that in theory, you could
+ // have parameters named like commands)
for (size_t i = 1; i < argc; ++i)
{
if (strcmp(argv[i], "--") != 0)
continue;
- ++i;
- if (i < argc)
+ // check if command is before --
+ for (size_t k = 1; k < i; ++k)
for (size_t j = 0; Map[j].Match != NULL; ++j)
- if (strcmp(argv[i], Map[j].Match) == 0)
+ if (strcmp(argv[k], Map[j].Match) == 0)
return Map[j].Match;
- i -= 2;
- if (i != 0)
+ // see if the next token after -- is the command
+ ++i;
+ if (i < argc)
for (size_t j = 0; Map[j].Match != NULL; ++j)
if (strcmp(argv[i], Map[j].Match) == 0)
return Map[j].Match;
+ // we found a --, but not a command
return NULL;
}
// no --, so search for the first word matching a command
diff --git a/apt-pkg/contrib/configuration.cc b/apt-pkg/contrib/configuration.cc
index 00f6ad0f9..4380d64b9 100644
--- a/apt-pkg/contrib/configuration.cc
+++ b/apt-pkg/contrib/configuration.cc
@@ -253,9 +253,6 @@ string Configuration::FindDir(const char *Name,const char *Default) const
// Configuration::FindVector - Find a vector of values /*{{{*/
// ---------------------------------------------------------------------
/* Returns a vector of config values under the given item */
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR < 13)
-vector<string> Configuration::FindVector(const char *Name) const { return FindVector(Name, ""); }
-#endif
vector<string> Configuration::FindVector(const char *Name, std::string const &Default) const
{
vector<string> Vec;
diff --git a/apt-pkg/contrib/configuration.h b/apt-pkg/contrib/configuration.h
index c256139f4..2ecea8bee 100644
--- a/apt-pkg/contrib/configuration.h
+++ b/apt-pkg/contrib/configuration.h
@@ -34,6 +34,8 @@
#include <vector>
#include <iostream>
+#include <apt-pkg/macros.h>
+
#ifndef APT_8_CLEANER_HEADERS
using std::string;
#endif
@@ -59,7 +61,7 @@ class Configuration
Item *Root;
bool ToFree;
-
+
Item *Lookup(Item *Head,const char *S,unsigned long const &Len,bool const &Create);
Item *Lookup(const char *Name,const bool &Create);
inline const Item *Lookup(const char *Name) const
@@ -82,14 +84,8 @@ class Configuration
*
* \param Name of the parent node
* \param Default list of values separated by commas */
- std::vector<std::string> FindVector(const char *Name, std::string const &Default) const;
- std::vector<std::string> FindVector(std::string const &Name, std::string const &Default) const { return FindVector(Name.c_str(), Default); };
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
- std::vector<std::string> FindVector(const char *Name) const { return FindVector(Name, ""); };
-#else
- std::vector<std::string> FindVector(const char *Name) const;
-#endif
- std::vector<std::string> FindVector(std::string const &Name) const { return FindVector(Name.c_str(), ""); };
+ std::vector<std::string> FindVector(const char *Name, std::string const &Default = "") const;
+ std::vector<std::string> FindVector(std::string const &Name, std::string const &Default = "") const { return FindVector(Name.c_str(), Default); };
int FindI(const char *Name,int const &Default = 0) const;
int FindI(std::string const &Name,int const &Default = 0) const {return FindI(Name.c_str(),Default);};
bool FindB(const char *Name,bool const &Default = false) const;
@@ -129,7 +125,7 @@ class Configuration
class MatchAgainstConfig
{
std::vector<regex_t *> patterns;
- void clearPatterns();
+ APT_HIDDEN void clearPatterns();
public:
MatchAgainstConfig(char const * Config);
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index 1ba4674e5..c51eee737 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -47,6 +47,8 @@
#include <signal.h>
#include <errno.h>
#include <glob.h>
+#include <pwd.h>
+#include <grp.h>
#include <set>
#include <algorithm>
@@ -63,6 +65,10 @@
#include <endian.h>
#include <stdint.h>
+#if __gnu_linux__
+#include <sys/prctl.h>
+#endif
+
#include <apti18n.h>
/*}}}*/
@@ -656,6 +662,22 @@ string flCombine(string Dir,string File)
return Dir + '/' + File;
}
/*}}}*/
+// flAbsPath - Return the absolute path of the filename /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string flAbsPath(string File)
+{
+ char *p = realpath(File.c_str(), NULL);
+ if (p == NULL)
+ {
+ _error->Errno("realpath", "flAbsPath failed");
+ return "";
+ }
+ std::string AbsPath(p);
+ free(p);
+ return AbsPath;
+}
+ /*}}}*/
// SetCloseExec - Set the close on exec flag /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -835,6 +857,23 @@ bool ExecWait(pid_t Pid,const char *Name,bool Reap)
return true;
}
/*}}}*/
+// StartsWithGPGClearTextSignature - Check if a file is Pgp/GPG clearsigned /*{{{*/
+bool StartsWithGPGClearTextSignature(string const &FileName)
+{
+ static const char* SIGMSG = "-----BEGIN PGP SIGNED MESSAGE-----\n";
+ char buffer[strlen(SIGMSG)+1];
+ FILE* gpg = fopen(FileName.c_str(), "r");
+ if (gpg == NULL)
+ return false;
+
+ char const * const test = fgets(buffer, sizeof(buffer), gpg);
+ fclose(gpg);
+ if (test == NULL || strcmp(buffer, SIGMSG) != 0)
+ return false;
+
+ return true;
+}
+ /*}}}*/
class FileFdPrivate { /*{{{*/
public:
@@ -853,7 +892,7 @@ class FileFdPrivate { /*{{{*/
bool eof;
bool compressing;
- LZMAFILE() : file(NULL), eof(false), compressing(false) {}
+ LZMAFILE() : file(NULL), eof(false), compressing(false) { buffer[0] = '\0'; }
~LZMAFILE() {
if (compressing == true)
{
@@ -1482,7 +1521,7 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
int err;
char const * const errmsg = BZ2_bzerror(d->bz2, &err);
if (err != BZ_IO_ERROR)
- return FileFdError("BZ2_bzread: %s (%d: %s)", _("Read error"), err, errmsg);
+ return FileFdError("BZ2_bzread: %s %s (%d: %s)", FileName.c_str(), _("Read error"), err, errmsg);
}
#endif
#ifdef HAVE_LZMA
@@ -1913,7 +1952,6 @@ bool FileFd::Close()
{
if ((Flags & Compressed) != Compressed && iFd > 0 && close(iFd) != 0)
Res &= _error->Errno("close",_("Problem closing the file %s"), FileName.c_str());
-
if (d != NULL)
{
Res &= d->CloseDown(FileName);
@@ -1993,10 +2031,7 @@ APT_DEPRECATED gzFile FileFd::gzFd() {
#endif
}
-
-// Glob - wrapper around "glob()" /*{{{*/
-// ---------------------------------------------------------------------
-/* */
+// Glob - wrapper around "glob()" /*{{{*/
std::vector<std::string> Glob(std::string const &pattern, int flags)
{
std::vector<std::string> result;
@@ -2022,8 +2057,7 @@ std::vector<std::string> Glob(std::string const &pattern, int flags)
return result;
}
/*}}}*/
-
-std::string GetTempDir()
+std::string GetTempDir() /*{{{*/
{
const char *tmpdir = getenv("TMPDIR");
@@ -2039,14 +2073,193 @@ std::string GetTempDir()
return string(tmpdir);
}
+ /*}}}*/
+FileFd* GetTempFile(std::string const &Prefix, bool ImmediateUnlink) /*{{{*/
+{
+ char fn[512];
+ FileFd *Fd = new FileFd();
+
+ std::string tempdir = GetTempDir();
+ snprintf(fn, sizeof(fn), "%s/%s.XXXXXX",
+ tempdir.c_str(), Prefix.c_str());
+ int fd = mkstemp(fn);
+ if(ImmediateUnlink)
+ unlink(fn);
+ if (fd < 0)
+ {
+ _error->Errno("GetTempFile",_("Unable to mkstemp %s"), fn);
+ return NULL;
+ }
+ if (!Fd->OpenDescriptor(fd, FileFd::WriteOnly, FileFd::None, true))
+ {
+ _error->Errno("GetTempFile",_("Unable to write to %s"),fn);
+ return NULL;
+ }
-bool Rename(std::string From, std::string To)
+ return Fd;
+}
+ /*}}}*/
+bool Rename(std::string From, std::string To) /*{{{*/
{
if (rename(From.c_str(),To.c_str()) != 0)
{
_error->Error(_("rename failed, %s (%s -> %s)."),strerror(errno),
From.c_str(),To.c_str());
return false;
- }
+ }
return true;
}
+ /*}}}*/
+bool Popen(const char* Args[], FileFd &Fd, pid_t &Child, FileFd::OpenMode Mode)/*{{{*/
+{
+ int fd;
+ if (Mode != FileFd::ReadOnly && Mode != FileFd::WriteOnly)
+ return _error->Error("Popen supports ReadOnly (x)or WriteOnly mode only");
+
+ int Pipe[2] = {-1, -1};
+ if(pipe(Pipe) != 0)
+ return _error->Errno("pipe", _("Failed to create subprocess IPC"));
+
+ std::set<int> keep_fds;
+ keep_fds.insert(Pipe[0]);
+ keep_fds.insert(Pipe[1]);
+ Child = ExecFork(keep_fds);
+ if(Child < 0)
+ return _error->Errno("fork", "Failed to fork");
+ if(Child == 0)
+ {
+ if(Mode == FileFd::ReadOnly)
+ {
+ close(Pipe[0]);
+ fd = Pipe[1];
+ }
+ else if(Mode == FileFd::WriteOnly)
+ {
+ close(Pipe[1]);
+ fd = Pipe[0];
+ }
+
+ if(Mode == FileFd::ReadOnly)
+ {
+ dup2(fd, 1);
+ dup2(fd, 2);
+ } else if(Mode == FileFd::WriteOnly)
+ dup2(fd, 0);
+
+ execv(Args[0], (char**)Args);
+ _exit(100);
+ }
+ if(Mode == FileFd::ReadOnly)
+ {
+ close(Pipe[1]);
+ fd = Pipe[0];
+ } else if(Mode == FileFd::WriteOnly)
+ {
+ close(Pipe[0]);
+ fd = Pipe[1];
+ }
+ Fd.OpenDescriptor(fd, Mode, FileFd::None, true);
+
+ return true;
+}
+ /*}}}*/
+bool DropPrivileges() /*{{{*/
+{
+ if(_config->FindB("Debug::NoDropPrivs", false) == true)
+ return true;
+
+#if __gnu_linux__
+#if defined(PR_SET_NO_NEW_PRIVS) && ( PR_SET_NO_NEW_PRIVS != 38 )
+#error "PR_SET_NO_NEW_PRIVS is defined, but with a different value than expected!"
+#endif
+ // see prctl(2), needs linux3.5 at runtime - magic constant to avoid it at buildtime
+ int ret = prctl(38, 1, 0, 0, 0);
+ // ignore EINVAL - kernel is too old to understand the option
+ if(ret < 0 && errno != EINVAL)
+ _error->Warning("PR_SET_NO_NEW_PRIVS failed with %i", ret);
+#endif
+
+ // empty setting disables privilege dropping - this also ensures
+ // backward compatibility, see bug #764506
+ const std::string toUser = _config->Find("APT::Sandbox::User");
+ if (toUser.empty())
+ return true;
+
+ // uid will be 0 in the end, but gid might be different anyway
+ uid_t const old_uid = getuid();
+ gid_t const old_gid = getgid();
+
+ if (old_uid != 0)
+ return true;
+
+ struct passwd *pw = getpwnam(toUser.c_str());
+ if (pw == NULL)
+ return _error->Error("No user %s, can not drop rights", toUser.c_str());
+
+ // Do not change the order here, it might break things
+ if (setgroups(1, &pw->pw_gid))
+ return _error->Errno("setgroups", "Failed to setgroups");
+
+ if (setegid(pw->pw_gid) != 0)
+ return _error->Errno("setegid", "Failed to setegid");
+
+ if (setgid(pw->pw_gid) != 0)
+ return _error->Errno("setgid", "Failed to setgid");
+
+ if (setuid(pw->pw_uid) != 0)
+ return _error->Errno("setuid", "Failed to setuid");
+
+ // the seteuid() is probably uneeded (at least thats what the linux
+ // man-page says about setuid(2)) but we cargo culted it anyway
+ if (seteuid(pw->pw_uid) != 0)
+ return _error->Errno("seteuid", "Failed to seteuid");
+
+ // Verify that the user has only a single group, and the correct one
+ gid_t groups[1];
+ if (getgroups(1, groups) != 1)
+ return _error->Errno("getgroups", "Could not get new groups");
+ if (groups[0] != pw->pw_gid)
+ return _error->Error("Could not switch group");
+
+ // Verify that gid, egid, uid, and euid changed
+ if (getgid() != pw->pw_gid)
+ return _error->Error("Could not switch group");
+ if (getegid() != pw->pw_gid)
+ return _error->Error("Could not switch effective group");
+ if (getuid() != pw->pw_uid)
+ return _error->Error("Could not switch user");
+ if (geteuid() != pw->pw_uid)
+ return _error->Error("Could not switch effective user");
+
+#ifdef HAVE_GETRESUID
+ // verify that the saved set-user-id was changed as well
+ uid_t ruid = 0;
+ uid_t euid = 0;
+ uid_t suid = 0;
+ if (getresuid(&ruid, &euid, &suid))
+ return _error->Errno("getresuid", "Could not get saved set-user-ID");
+ if (suid != pw->pw_uid)
+ return _error->Error("Could not switch saved set-user-ID");
+#endif
+
+#ifdef HAVE_GETRESGID
+ // verify that the saved set-group-id was changed as well
+ gid_t rgid = 0;
+ gid_t egid = 0;
+ gid_t sgid = 0;
+ if (getresgid(&rgid, &egid, &sgid))
+ return _error->Errno("getresuid", "Could not get saved set-group-ID");
+ if (sgid != pw->pw_gid)
+ return _error->Error("Could not switch saved set-group-ID");
+#endif
+
+ // Check that uid and gid changes do not work anymore
+ if (pw->pw_gid != old_gid && (setgid(old_gid) != -1 || setegid(old_gid) != -1))
+ return _error->Error("Could restore a gid to root, privilege dropping did not work");
+
+ if (pw->pw_uid != old_uid && (setuid(old_uid) != -1 || seteuid(old_uid) != -1))
+ return _error->Error("Could restore a uid to root, privilege dropping did not work");
+
+ return true;
+}
+ /*}}}*/
diff --git a/apt-pkg/contrib/fileutl.h b/apt-pkg/contrib/fileutl.h
index 667057067..a64d6cb98 100644
--- a/apt-pkg/contrib/fileutl.h
+++ b/apt-pkg/contrib/fileutl.h
@@ -170,6 +170,8 @@ time_t GetModificationTime(std::string const &Path);
bool Rename(std::string From, std::string To);
std::string GetTempDir();
+FileFd* GetTempFile(std::string const &Prefix = "",
+ bool ImmediateUnlink = true);
/** \brief Ensure the existence of the given Path
*
@@ -193,6 +195,23 @@ pid_t ExecFork(std::set<int> keep_fds);
void MergeKeepFdsFromConfiguration(std::set<int> &keep_fds);
bool ExecWait(pid_t Pid,const char *Name,bool Reap = false);
+
+// check if the given file starts with a PGP cleartext signature
+bool StartsWithGPGClearTextSignature(std::string const &FileName);
+
+/**
+ * \brief Drop privileges
+ *
+ * Drop the privileges to the user _apt (or the one specified in
+ * APT::Sandbox::User). This does not set the supplementary group
+ * ids up correctly, it only uses the default group. Also prevent
+ * the process from gaining any new privileges afterwards, at least
+ * on Linux.
+ *
+ * \return true on success, false on failure with _error set
+ */
+bool DropPrivileges();
+
// File string manipulators
std::string flNotDir(std::string File);
std::string flNotFile(std::string File);
@@ -200,7 +219,23 @@ std::string flNoLink(std::string File);
std::string flExtension(std::string File);
std::string flCombine(std::string Dir,std::string File);
+/** \brief Takes a file path and returns the absolute path
+ */
+std::string flAbsPath(std::string File);
+
// simple c++ glob
std::vector<std::string> Glob(std::string const &pattern, int flags=0);
+/** \brief Popen() implementation that execv() instead of using a shell
+ *
+ * \param Args the execv style command to run
+ * \param FileFd is a referenz to the FileFd to use for input or output
+ * \param Child a reference to the integer that stores the child pid
+ * Note that you must call ExecWait() or similar to cleanup
+ * \param Mode is either FileFd::ReadOnly or FileFd::WriteOnly
+ * \return true on success, false on failure with _error set
+ */
+bool Popen(const char* Args[], FileFd &Fd, pid_t &Child, FileFd::OpenMode Mode);
+
+
#endif
diff --git a/apt-pkg/contrib/gpgv.cc b/apt-pkg/contrib/gpgv.cc
index f24dd9640..9d798cca9 100644
--- a/apt-pkg/contrib/gpgv.cc
+++ b/apt-pkg/contrib/gpgv.cc
@@ -32,50 +32,30 @@ static char * GenerateTemporaryFileTemplate(const char *basename) /*{{{*/
/*}}}*/
// ExecGPGV - returns the command needed for verify /*{{{*/
// ---------------------------------------------------------------------
-/* Generating the commandline for calling gpgv is somehow complicated as
+/* Generating the commandline for calling gpg is somehow complicated as
we need to add multiple keyrings and user supplied options.
- Also, as gpgv has no options to enforce a certain reduced style of
+ Also, as gpg has no options to enforce a certain reduced style of
clear-signed files (=the complete content of the file is signed and
the content isn't encoded) we do a divide and conquer approach here
- and split up the clear-signed file in message and signature for gpgv
+ and split up the clear-signed file in message and signature for gpg.
+ And as a cherry on the cake, we use our apt-key wrapper to do part
+ of the lifting in regards to merging keyrings. Fun for the whole family.
*/
void ExecGPGV(std::string const &File, std::string const &FileGPG,
int const &statusfd, int fd[2])
{
#define EINTERNAL 111
- std::string const gpgvpath = _config->Find("Dir::Bin::gpg", "/usr/bin/gpgv");
- // FIXME: remove support for deprecated APT::GPGV setting
- std::string const trustedFile = _config->Find("APT::GPGV::TrustedKeyring", _config->FindFile("Dir::Etc::Trusted"));
- std::string const trustedPath = _config->FindDir("Dir::Etc::TrustedParts");
+ std::string const aptkey = _config->FindFile("Dir::Bin::apt-key", "/usr/bin/apt-key");
bool const Debug = _config->FindB("Debug::Acquire::gpgv", false);
- if (Debug == true)
- {
- std::clog << "gpgv path: " << gpgvpath << std::endl;
- std::clog << "Keyring file: " << trustedFile << std::endl;
- std::clog << "Keyring path: " << trustedPath << std::endl;
- }
-
- std::vector<std::string> keyrings;
- if (DirectoryExists(trustedPath))
- keyrings = GetListOfFilesInDir(trustedPath, "gpg", false, true);
- if (RealFileExists(trustedFile) == true)
- keyrings.push_back(trustedFile);
-
std::vector<const char *> Args;
- Args.reserve(30);
-
- if (keyrings.empty() == true)
- {
- // TRANSLATOR: %s is the trusted keyring parts directory
- ioprintf(std::cerr, _("No keyring installed in %s."),
- _config->FindDir("Dir::Etc::TrustedParts").c_str());
- exit(EINTERNAL);
- }
+ Args.reserve(10);
- Args.push_back(gpgvpath.c_str());
- Args.push_back("--ignore-time-conflict");
+ Args.push_back(aptkey.c_str());
+ Args.push_back("--quiet");
+ Args.push_back("--readonly");
+ Args.push_back("verify");
char statusfdstr[10];
if (statusfd != -1)
@@ -85,13 +65,6 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG,
Args.push_back(statusfdstr);
}
- for (std::vector<std::string>::const_iterator K = keyrings.begin();
- K != keyrings.end(); ++K)
- {
- Args.push_back("--keyring");
- Args.push_back(K->c_str());
- }
-
Configuration::Item const *Opts;
Opts = _config->Tree("Acquire::gpgv::Options");
if (Opts != 0)
@@ -160,7 +133,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG,
if (Debug == true)
{
- std::clog << "Preparing to exec: " << gpgvpath;
+ std::clog << "Preparing to exec: ";
for (std::vector<const char *>::const_iterator a = Args.begin(); *a != NULL; ++a)
std::clog << " " << *a;
std::clog << std::endl;
@@ -168,7 +141,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG,
if (statusfd != -1)
{
- int const nullfd = open("/dev/null", O_RDONLY);
+ int const nullfd = open("/dev/null", O_WRONLY);
close(fd[0]);
// Redirect output to /dev/null; we read from the status fd
if (statusfd != STDOUT_FILENO)
@@ -185,7 +158,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG,
if (releaseSignature == DETACHED)
{
- execvp(gpgvpath.c_str(), (char **) &Args[0]);
+ execvp(Args[0], (char **) &Args[0]);
ioprintf(std::cerr, "Couldn't execute %s to check %s", Args[0], File.c_str());
exit(EINTERNAL);
}
@@ -205,7 +178,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG,
{
if (statusfd != -1)
dup2(fd[1], statusfd);
- execvp(gpgvpath.c_str(), (char **) &Args[0]);
+ execvp(Args[0], (char **) &Args[0]);
ioprintf(std::cerr, "Couldn't execute %s to check %s", Args[0], File.c_str());
UNLINK_EXIT(EINTERNAL);
}
@@ -216,7 +189,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG,
{
if (errno == EINTR)
continue;
- ioprintf(std::cerr, _("Waited for %s but it wasn't there"), "gpgv");
+ ioprintf(std::cerr, _("Waited for %s but it wasn't there"), "apt-key");
UNLINK_EXIT(EINTERNAL);
}
#undef UNLINK_EXIT
@@ -229,14 +202,14 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG,
// check if it exit'ed normally …
if (WIFEXITED(Status) == false)
{
- ioprintf(std::cerr, _("Sub-process %s exited unexpectedly"), "gpgv");
+ ioprintf(std::cerr, _("Sub-process %s exited unexpectedly"), "apt-key");
exit(EINTERNAL);
}
// … and with a good exit code
if (WEXITSTATUS(Status) != 0)
{
- ioprintf(std::cerr, _("Sub-process %s returned an error code (%u)"), "gpgv", WEXITSTATUS(Status));
+ ioprintf(std::cerr, _("Sub-process %s returned an error code (%u)"), "apt-key", WEXITSTATUS(Status));
exit(WEXITSTATUS(Status));
}
diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc
index 15f83615d..417982343 100644
--- a/apt-pkg/contrib/hashes.cc
+++ b/apt-pkg/contrib/hashes.cc
@@ -27,7 +27,7 @@
#include <iostream>
/*}}}*/
-const char* HashString::_SupportedHashes[] =
+const char * HashString::_SupportedHashes[] =
{
"SHA512", "SHA256", "SHA1", "MD5Sum", NULL
};
@@ -42,11 +42,16 @@ HashString::HashString(std::string Type, std::string Hash) : Type(Type), Hash(Ha
HashString::HashString(std::string StringedHash) /*{{{*/
{
- // legacy: md5sum without "MD5Sum:" prefix
- if (StringedHash.find(":") == std::string::npos && StringedHash.size() == 32)
+ if (StringedHash.find(":") == std::string::npos)
{
- Type = "MD5Sum";
- Hash = StringedHash;
+ // legacy: md5sum without "MD5Sum:" prefix
+ if (StringedHash.size() == 32)
+ {
+ Type = "MD5Sum";
+ Hash = StringedHash;
+ }
+ if(_config->FindB("Debug::Hashes",false) == true)
+ std::clog << "HashString(string): invalid StringedHash " << StringedHash << std::endl;
return;
}
std::string::size_type pos = StringedHash.find(":");
@@ -82,25 +87,25 @@ std::string HashString::GetHashForFile(std::string filename) const /*{{{*/
std::string fileHash;
FileFd Fd(filename, FileFd::ReadOnly);
- if(Type == "MD5Sum")
+ if(strcasecmp(Type.c_str(), "MD5Sum") == 0)
{
MD5Summation MD5;
MD5.AddFD(Fd);
fileHash = (std::string)MD5.Result();
}
- else if (Type == "SHA1")
+ else if (strcasecmp(Type.c_str(), "SHA1") == 0)
{
SHA1Summation SHA1;
SHA1.AddFD(Fd);
fileHash = (std::string)SHA1.Result();
}
- else if (Type == "SHA256")
+ else if (strcasecmp(Type.c_str(), "SHA256") == 0)
{
SHA256Summation SHA256;
SHA256.AddFD(Fd);
fileHash = (std::string)SHA256.Result();
}
- else if (Type == "SHA512")
+ else if (strcasecmp(Type.c_str(), "SHA512") == 0)
{
SHA512Summation SHA512;
SHA512.AddFD(Fd);
@@ -111,26 +116,147 @@ std::string HashString::GetHashForFile(std::string filename) const /*{{{*/
return fileHash;
}
/*}}}*/
-const char** HashString::SupportedHashes()
+const char** HashString::SupportedHashes() /*{{{*/
{
return _SupportedHashes;
}
-
-APT_PURE bool HashString::empty() const
+ /*}}}*/
+APT_PURE bool HashString::empty() const /*{{{*/
{
return (Type.empty() || Hash.empty());
}
+ /*}}}*/
+std::string HashString::toStr() const /*{{{*/
+{
+ return Type + ":" + Hash;
+}
+ /*}}}*/
+APT_PURE bool HashString::operator==(HashString const &other) const /*{{{*/
+{
+ return (strcasecmp(Type.c_str(), other.Type.c_str()) == 0 && Hash == other.Hash);
+}
+APT_PURE bool HashString::operator!=(HashString const &other) const
+{
+ return !(*this == other);
+}
+ /*}}}*/
+
+bool HashStringList::usable() const /*{{{*/
+{
+ if (empty() == true)
+ return false;
+ std::string const forcedType = _config->Find("Acquire::ForceHash", "");
+ if (forcedType.empty() == true)
+ return true;
+ return find(forcedType) != NULL;
+}
+ /*}}}*/
+HashString const * HashStringList::find(char const * const type) const /*{{{*/
+{
+ if (type == NULL || type[0] == '\0')
+ {
+ std::string const forcedType = _config->Find("Acquire::ForceHash", "");
+ if (forcedType.empty() == false)
+ return find(forcedType.c_str());
+ for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t)
+ for (std::vector<HashString>::const_iterator hs = list.begin(); hs != list.end(); ++hs)
+ if (strcasecmp(hs->HashType().c_str(), *t) == 0)
+ return &*hs;
+ return NULL;
+ }
+ for (std::vector<HashString>::const_iterator hs = list.begin(); hs != list.end(); ++hs)
+ if (strcasecmp(hs->HashType().c_str(), type) == 0)
+ return &*hs;
+ return NULL;
+}
+ /*}}}*/
+bool HashStringList::supported(char const * const type) /*{{{*/
+{
+ for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t)
+ if (strcasecmp(*t, type) == 0)
+ return true;
+ return false;
+}
+ /*}}}*/
+bool HashStringList::push_back(const HashString &hashString) /*{{{*/
+{
+ if (hashString.HashType().empty() == true ||
+ hashString.HashValue().empty() == true ||
+ supported(hashString.HashType().c_str()) == false)
+ return false;
+
+ // ensure that each type is added only once
+ HashString const * const hs = find(hashString.HashType().c_str());
+ if (hs != NULL)
+ return *hs == hashString;
-std::string HashString::toStr() const
+ list.push_back(hashString);
+ return true;
+}
+ /*}}}*/
+bool HashStringList::VerifyFile(std::string filename) const /*{{{*/
{
- return Type + std::string(":") + Hash;
+ if (list.empty() == true)
+ return false;
+ HashString const * const hs = find(NULL);
+ if (hs == NULL || hs->VerifyFile(filename) == false)
+ return false;
+ return true;
}
+ /*}}}*/
+bool HashStringList::operator==(HashStringList const &other) const /*{{{*/
+{
+ std::string const forcedType = _config->Find("Acquire::ForceHash", "");
+ if (forcedType.empty() == false)
+ {
+ HashString const * const hs = find(forcedType);
+ HashString const * const ohs = other.find(forcedType);
+ if (hs == NULL || ohs == NULL)
+ return false;
+ return *hs == *ohs;
+ }
+ short matches = 0;
+ for (const_iterator hs = begin(); hs != end(); ++hs)
+ {
+ HashString const * const ohs = other.find(hs->HashType());
+ if (ohs == NULL)
+ continue;
+ if (*hs != *ohs)
+ return false;
+ ++matches;
+ }
+ if (matches == 0)
+ return false;
+ return true;
+}
+bool HashStringList::operator!=(HashStringList const &other) const
+{
+ return !(*this == other);
+}
+ /*}}}*/
-// Hashes::AddFD - Add the contents of the FD /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5,
- bool const addSHA1, bool const addSHA256, bool const addSHA512)
+// Hashes::Add* - Add the contents of data or FD /*{{{*/
+bool Hashes::Add(const unsigned char * const Data,unsigned long long const Size, unsigned int const Hashes)
+{
+ bool Res = true;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ if ((Hashes & MD5SUM) == MD5SUM)
+ Res &= MD5.Add(Data, Size);
+ if ((Hashes & SHA1SUM) == SHA1SUM)
+ Res &= SHA1.Add(Data, Size);
+ if ((Hashes & SHA256SUM) == SHA256SUM)
+ Res &= SHA256.Add(Data, Size);
+ if ((Hashes & SHA512SUM) == SHA512SUM)
+ Res &= SHA512.Add(Data, Size);
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ return Res;
+}
+bool Hashes::AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes)
{
unsigned char Buf[64*64];
bool const ToEOF = (Size == UntilEOF);
@@ -144,19 +270,12 @@ bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5,
if (ToEOF && Res == 0) // EOF
break;
Size -= Res;
- if (addMD5 == true)
- MD5.Add(Buf,Res);
- if (addSHA1 == true)
- SHA1.Add(Buf,Res);
- if (addSHA256 == true)
- SHA256.Add(Buf,Res);
- if (addSHA512 == true)
- SHA512.Add(Buf,Res);
+ if (Add(Buf, Res, Hashes) == false)
+ return false;
}
return true;
}
-bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, bool const addMD5,
- bool const addSHA1, bool const addSHA256, bool const addSHA512)
+bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes)
{
unsigned char Buf[64*64];
bool const ToEOF = (Size == 0);
@@ -175,15 +294,35 @@ bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, bool const addMD5,
else if (a == 0) // EOF
break;
Size -= a;
- if (addMD5 == true)
- MD5.Add(Buf, a);
- if (addSHA1 == true)
- SHA1.Add(Buf, a);
- if (addSHA256 == true)
- SHA256.Add(Buf, a);
- if (addSHA512 == true)
- SHA512.Add(Buf, a);
+ if (Add(Buf, a, Hashes) == false)
+ return false;
}
return true;
}
/*}}}*/
+HashStringList Hashes::GetHashStringList()
+{
+ HashStringList hashes;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ hashes.push_back(HashString("MD5Sum", MD5.Result().Value()));
+ hashes.push_back(HashString("SHA1", SHA1.Result().Value()));
+ hashes.push_back(HashString("SHA256", SHA256.Result().Value()));
+ hashes.push_back(HashString("SHA512", SHA512.Result().Value()));
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ return hashes;
+}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+ #pragma GCC diagnostic ignored "-Wsuggest-attribute=const"
+#endif
+Hashes::Hashes() {}
+Hashes::~Hashes() {}
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h
index 7a62f8a8f..caeba006d 100644
--- a/apt-pkg/contrib/hashes.h
+++ b/apt-pkg/contrib/hashes.h
@@ -17,6 +17,7 @@
#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
#include <apt-pkg/sha2.h>
+#include <apt-pkg/macros.h>
#include <cstring>
#include <string>
@@ -41,7 +42,7 @@ class HashString
protected:
std::string Type;
std::string Hash;
- static const char* _SupportedHashes[10];
+ static const char * _SupportedHashes[10];
// internal helper
std::string GetHashForFile(std::string filename) const;
@@ -52,7 +53,8 @@ class HashString
HashString();
// get hash type used
- std::string HashType() { return Type; };
+ std::string HashType() const { return Type; };
+ std::string HashValue() const { return Hash; };
// verify the given filename against the currently loaded hash
bool VerifyFile(std::string filename) const;
@@ -64,37 +66,160 @@ class HashString
// helper
std::string toStr() const; // convert to str as "type:hash"
bool empty() const;
+ bool operator==(HashString const &other) const;
+ bool operator!=(HashString const &other) const;
// return the list of hashes we support
static APT_CONST const char** SupportedHashes();
};
+class HashStringList
+{
+ public:
+ /** find best hash if no specific one is requested
+ *
+ * @param type of the checksum to return, can be \b NULL
+ * @return If type is \b NULL (or the empty string) it will
+ * return the 'best' hash; otherwise the hash which was
+ * specifically requested. If no hash is found \b NULL will be returned.
+ */
+ HashString const * find(char const * const type) const;
+ HashString const * find(std::string const &type) const { return find(type.c_str()); }
+ /** check if the given hash type is supported
+ *
+ * @param type to check
+ * @return true if supported, otherwise false
+ */
+ static APT_PURE bool supported(char const * const type);
+ /** add the given #HashString to the list
+ *
+ * @param hashString to add
+ * @return true if the hash is added because it is supported and
+ * not already a different hash of the same type included, otherwise false
+ */
+ bool push_back(const HashString &hashString);
+ /** @return size of the list of HashStrings */
+ size_t size() const { return list.size(); }
+
+ /** take the 'best' hash and verify file with it
+ *
+ * @param filename to verify
+ * @return true if the file matches the hashsum, otherwise false
+ */
+ bool VerifyFile(std::string filename) const;
+
+ /** is the list empty ?
+ *
+ * @return \b true if the list is empty, otherwise \b false
+ */
+ bool empty() const { return list.empty(); }
+
+ /** has the list at least one good entry
+ *
+ * similar to #empty, but handles forced hashes.
+ *
+ * @return if no hash is forced, same result as #empty,
+ * if one is forced \b true if this has is available, \b false otherwise
+ */
+ bool usable() const;
+
+ typedef std::vector<HashString>::const_iterator const_iterator;
+
+ /** iterator to the first element */
+ const_iterator begin() const { return list.begin(); }
+
+ /** iterator to the end element */
+ const_iterator end() const { return list.end(); }
+
+ /** start fresh with a clear list */
+ void clear() { list.clear(); }
+
+ /** compare two HashStringList for similarity.
+ *
+ * Two lists are similar if at least one hashtype is in both lists
+ * and the hashsum matches. All hashes are checked by default,
+ * if one doesn't match false is returned regardless of how many
+ * matched before. If a hash is forced, only this hash is compared,
+ * all others are ignored.
+ */
+ bool operator==(HashStringList const &other) const;
+ bool operator!=(HashStringList const &other) const;
+
+ HashStringList() {}
+
+ // simplifying API-compatibility constructors
+ HashStringList(std::string const &hash) {
+ if (hash.empty() == false)
+ list.push_back(HashString(hash));
+ }
+ HashStringList(char const * const hash) {
+ if (hash != NULL && hash[0] != '\0')
+ list.push_back(HashString(hash));
+ }
+
+ private:
+ std::vector<HashString> list;
+};
+
class Hashes
{
+ /** \brief dpointer placeholder */
+ void *d;
+
public:
+ /* those will disappear in the future as it is hard to add new ones this way.
+ * Use Add* to build the results and get them via GetHashStringList() instead */
+ APT_DEPRECATED MD5Summation MD5;
+ APT_DEPRECATED SHA1Summation SHA1;
+ APT_DEPRECATED SHA256Summation SHA256;
+ APT_DEPRECATED SHA512Summation SHA512;
- MD5Summation MD5;
- SHA1Summation SHA1;
- SHA256Summation SHA256;
- SHA512Summation SHA512;
-
static const int UntilEOF = 0;
- inline bool Add(const unsigned char *Data,unsigned long long Size)
+ bool Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes = ~0);
+ inline bool Add(const char * const Data)
+ {return Add((unsigned char const * const)Data,strlen(Data));};
+ inline bool Add(const unsigned char * const Beg,const unsigned char * const End)
+ {return Add(Beg,End-Beg);};
+
+ enum SupportedHashes { MD5SUM = (1 << 0), SHA1SUM = (1 << 1), SHA256SUM = (1 << 2),
+ SHA512SUM = (1 << 3) };
+ bool AddFD(int const Fd,unsigned long long Size = 0, unsigned int const Hashes = ~0);
+ bool AddFD(FileFd &Fd,unsigned long long Size = 0, unsigned int const Hashes = ~0);
+
+ HashStringList GetHashStringList();
+
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ Hashes();
+ virtual ~Hashes();
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+
+ private:
+ APT_HIDDEN APT_CONST inline unsigned int boolsToFlag(bool const addMD5, bool const addSHA1, bool const addSHA256, bool const addSHA512)
{
- return MD5.Add(Data,Size) && SHA1.Add(Data,Size) && SHA256.Add(Data,Size) && SHA512.Add(Data,Size);
+ unsigned int Hashes = ~0;
+ if (addMD5 == false) Hashes &= ~MD5SUM;
+ if (addSHA1 == false) Hashes &= ~SHA1SUM;
+ if (addSHA256 == false) Hashes &= ~SHA256SUM;
+ if (addSHA512 == false) Hashes &= ~SHA512SUM;
+ return Hashes;
+ }
+
+ public:
+ APT_DEPRECATED bool AddFD(int const Fd, unsigned long long Size, bool const addMD5,
+ bool const addSHA1, bool const addSHA256, bool const addSHA512) {
+ return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512));
+ };
+
+ APT_DEPRECATED bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5,
+ bool const addSHA1, bool const addSHA256, bool const addSHA512) {
+ return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512));
};
- inline bool Add(const char *Data) {return Add((unsigned char const *)Data,strlen(Data));};
- inline bool AddFD(int const Fd,unsigned long long Size = 0)
- { return AddFD(Fd, Size, true, true, true, true); };
- bool AddFD(int const Fd, unsigned long long Size, bool const addMD5,
- bool const addSHA1, bool const addSHA256, bool const addSHA512);
- inline bool AddFD(FileFd &Fd,unsigned long long Size = 0)
- { return AddFD(Fd, Size, true, true, true, true); };
- bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5,
- bool const addSHA1, bool const addSHA256, bool const addSHA512);
- inline bool Add(const unsigned char *Beg,const unsigned char *End)
- {return Add(Beg,End-Beg);};
};
#endif
diff --git a/apt-pkg/contrib/macros.h b/apt-pkg/contrib/macros.h
index 2d6448e5e..a0573398d 100644
--- a/apt-pkg/contrib/macros.h
+++ b/apt-pkg/contrib/macros.h
@@ -138,7 +138,7 @@
// Non-ABI-Breaks should only increase RELEASE number.
// See also buildlib/libversion.mak
#define APT_PKG_MAJOR 4
-#define APT_PKG_MINOR 12
+#define APT_PKG_MINOR 14
#define APT_PKG_RELEASE 0
#endif
diff --git a/apt-pkg/contrib/netrc.cc b/apt-pkg/contrib/netrc.cc
index feaed67c8..1e3778f45 100644
--- a/apt-pkg/contrib/netrc.cc
+++ b/apt-pkg/contrib/netrc.cc
@@ -152,18 +152,6 @@ static int parsenetrc_string (char *host, std::string &login, std::string &passw
return retcode;
}
-// for some unknown reason this method is exported so keep a compatible interface for now …
-int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL)
-{
- std::string login_string, password_string;
- int const ret = parsenetrc_string(host, login_string, password_string, netrcfile);
- if (ret < 0)
- return ret;
- strncpy(login, login_string.c_str(), LOGINSIZE - 1);
- strncpy(password, password_string.c_str(), PASSWORDSIZE - 1);
- return ret;
-}
-
void maybe_add_auth (URI &Uri, string NetRCFile)
{
diff --git a/apt-pkg/contrib/netrc.h b/apt-pkg/contrib/netrc.h
index dbeb45386..b5b56f5d4 100644
--- a/apt-pkg/contrib/netrc.h
+++ b/apt-pkg/contrib/netrc.h
@@ -27,9 +27,5 @@
class URI;
-// FIXME: kill this export on the next ABI break - strongly doubt its in use anyway
-// outside of the apt itself, its really a internal interface
-APT_DEPRECATED int parsenetrc (char *host, char *login, char *password, char *filename);
-
void maybe_add_auth (URI &Uri, std::string NetRCFile);
#endif
diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc
index 87f57a30e..aad358a55 100644
--- a/apt-pkg/contrib/strutl.cc
+++ b/apt-pkg/contrib/strutl.cc
@@ -74,6 +74,13 @@ bool Endswith(const std::string &s, const std::string &end)
return (s.substr(s.size() - end.size(), s.size()) == end);
}
+bool Startswith(const std::string &s, const std::string &start)
+{
+ if (start.size() > s.size())
+ return false;
+ return (s.substr(0, start.size()) == start);
+}
+
}
}
/*}}}*/
@@ -1061,7 +1068,7 @@ bool StrToNum(const char *Str,unsigned long long &Res,unsigned Len,unsigned Base
// ---------------------------------------------------------------------
/* This is used in decoding the 256bit encoded fixed length fields in
tar files */
-bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len)
+bool Base256ToNum(const char *Str,unsigned long long &Res,unsigned int Len)
{
if ((Str[0] & 0x80) == 0)
return false;
@@ -1074,6 +1081,23 @@ bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len)
}
}
/*}}}*/
+// Base256ToNum - Convert a fixed length binary to a number /*{{{*/
+// ---------------------------------------------------------------------
+/* This is used in decoding the 256bit encoded fixed length fields in
+ tar files */
+bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len)
+{
+ unsigned long long Num;
+ bool rc;
+
+ rc = Base256ToNum(Str, Num, Len);
+ Res = Num;
+ if (Res != Num)
+ return false;
+
+ return rc;
+}
+ /*}}}*/
// HexDigit - Convert a hex character into an integer /*{{{*/
// ---------------------------------------------------------------------
/* Helper for Hex2Num */
diff --git a/apt-pkg/contrib/strutl.h b/apt-pkg/contrib/strutl.h
index 185cdc3fc..e20ddca9c 100644
--- a/apt-pkg/contrib/strutl.h
+++ b/apt-pkg/contrib/strutl.h
@@ -40,6 +40,7 @@ namespace APT {
namespace String {
std::string Strip(const std::string &s);
bool Endswith(const std::string &s, const std::string &ending);
+ bool Startswith(const std::string &s, const std::string &starting);
}
}
@@ -72,6 +73,7 @@ bool ReadMessages(int Fd, std::vector<std::string> &List);
bool StrToNum(const char *Str,unsigned long &Res,unsigned Len,unsigned Base = 0);
bool StrToNum(const char *Str,unsigned long long &Res,unsigned Len,unsigned Base = 0);
bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len);
+bool Base256ToNum(const char *Str,unsigned long long &Res,unsigned int Len);
bool Hex2Num(const std::string &Str,unsigned char *Num,unsigned int Length);
// input changing string split
@@ -151,9 +153,9 @@ inline const char *DeNull(const char *s) {return (s == 0?"(null)":s);}
class URI
{
void CopyFrom(const std::string &From);
-
+
public:
-
+
std::string Access;
std::string User;
std::string Password;
diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc
index 5b4289e92..cc1d94d81 100644
--- a/apt-pkg/deb/debindexfile.cc
+++ b/apt-pkg/deb/debindexfile.cc
@@ -30,6 +30,7 @@
#include <apt-pkg/pkgcachegen.h>
#include <apt-pkg/pkgrecords.h>
#include <apt-pkg/srcrecords.h>
+#include <apt-pkg/sptr.h>
#include <stdio.h>
#include <iostream>
@@ -130,7 +131,7 @@ string debSourcesIndex::Info(const char *Type) const
// SourcesIndex::Index* - Return the URI to the index files /*{{{*/
// ---------------------------------------------------------------------
/* */
-inline string debSourcesIndex::IndexFile(const char *Type) const
+string debSourcesIndex::IndexFile(const char *Type) const
{
string s = URItoFileName(IndexURI(Type));
@@ -264,7 +265,7 @@ string debPackagesIndex::Info(const char *Type) const
// PackagesIndex::Index* - Return the URI to the index files /*{{{*/
// ---------------------------------------------------------------------
/* */
-inline string debPackagesIndex::IndexFile(const char *Type) const
+string debPackagesIndex::IndexFile(const char *Type) const
{
string s =_config->FindDir("Dir::State::lists") + URItoFileName(IndexURI(Type));
@@ -420,7 +421,7 @@ debTranslationsIndex::debTranslationsIndex(string URI,string Dist,string Section
// TranslationIndex::Trans* - Return the URI to the translation files /*{{{*/
// ---------------------------------------------------------------------
/* */
-inline string debTranslationsIndex::IndexFile(const char *Type) const
+string debTranslationsIndex::IndexFile(const char *Type) const
{
string s =_config->FindDir("Dir::State::lists") + URItoFileName(IndexURI(Type));
@@ -634,7 +635,7 @@ bool debStatusIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const
pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
CFile->Size = Pkg.FileSize();
CFile->mtime = Pkg.ModificationTime();
- map_ptrloc const storage = Gen.WriteUniqString("now");
+ map_stringitem_t const storage = Gen.StoreString(pkgCacheGenerator::MIXED, "now");
CFile->Archive = storage;
if (Gen.MergeList(Parser) == false)
@@ -683,6 +684,136 @@ APT_CONST bool debStatusIndex::Exists() const
}
/*}}}*/
+// debDebPkgFile - Single .deb file /*{{{*/
+// ---------------------------------------------------------------------
+debDebPkgFileIndex::debDebPkgFileIndex(std::string DebFile)
+ : pkgIndexFile(true), DebFile(DebFile)
+{
+ DebFileFullPath = flAbsPath(DebFile);
+}
+
+std::string debDebPkgFileIndex::ArchiveURI(std::string /*File*/) const
+{
+ return "file:" + DebFileFullPath;
+}
+
+bool debDebPkgFileIndex::Exists() const
+{
+ return FileExists(DebFile);
+}
+bool debDebPkgFileIndex::Merge(pkgCacheGenerator& Gen, OpProgress* Prog) const
+{
+ if(Prog)
+ Prog->SubProgress(0, "Reading deb file");
+
+ // get the control data out of the deb file vid dpkg -I
+ // ... can I haz libdpkg?
+ Configuration::Item const *Opts = _config->Tree("DPkg::Options");
+ std::string dpkg = _config->Find("Dir::Bin::dpkg","dpkg");
+ std::vector<const char *> Args;
+ Args.push_back(dpkg.c_str());
+ if (Opts != 0)
+ {
+ Opts = Opts->Child;
+ for (; Opts != 0; Opts = Opts->Next)
+ {
+ if (Opts->Value.empty() == true)
+ continue;
+ Args.push_back(Opts->Value.c_str());
+ }
+ }
+ Args.push_back("-I");
+ Args.push_back(DebFile.c_str());
+ Args.push_back("control");
+ Args.push_back(NULL);
+ FileFd PipeFd;
+ pid_t Child;
+ if(Popen((const char**)&Args[0], PipeFd, Child, FileFd::ReadOnly) == false)
+ return _error->Error("Popen failed");
+ // FIXME: static buffer
+ char buf[8*1024];
+ unsigned long long n = 0;
+ if(PipeFd.Read(buf, sizeof(buf)-1, &n) == false)
+ return _error->Errno("read", "Failed to read dpkg pipe");
+ ExecWait(Child, "Popen");
+
+ // now write the control data to a tempfile
+ SPtr<FileFd> DebControl = GetTempFile("deb-file-" + flNotDir(DebFile));
+ if(DebControl == NULL)
+ return false;
+ DebControl->Write(buf, n);
+ // append size of the file
+ FileFd Fd(DebFile, FileFd::ReadOnly);
+ string Size;
+ strprintf(Size, "Size: %llu\n", Fd.Size());
+ DebControl->Write(Size.c_str(), Size.size());
+ // and rewind for the listparser
+ DebControl->Seek(0);
+
+ // and give it to the list parser
+ debDebFileParser Parser(DebControl, DebFile);
+ if(Gen.SelectFile(DebFile, "local", *this) == false)
+ return _error->Error("Problem with SelectFile %s", DebFile.c_str());
+
+ pkgCache::PkgFileIterator File = Gen.GetCurFile();
+ File->Size = DebControl->Size();
+ File->mtime = DebControl->ModificationTime();
+
+ if (Gen.MergeList(Parser) == false)
+ return _error->Error("Problem with MergeLister for %s", DebFile.c_str());
+
+ return true;
+}
+pkgCache::PkgFileIterator debDebPkgFileIndex::FindInCache(pkgCache &Cache) const
+{
+ pkgCache::PkgFileIterator File = Cache.FileBegin();
+ for (; File.end() == false; ++File)
+ {
+ if (File.FileName() == NULL || DebFile != File.FileName())
+ continue;
+
+ return File;
+ }
+
+ return File;
+}
+unsigned long debDebPkgFileIndex::Size() const
+{
+ struct stat buf;
+ if(stat(DebFile.c_str(), &buf) != 0)
+ return 0;
+ return buf.st_size;
+}
+ /*}}}*/
+
+// debDscFileIndex stuff
+debDscFileIndex::debDscFileIndex(std::string &DscFile)
+ : pkgIndexFile(true), DscFile(DscFile)
+{
+}
+
+bool debDscFileIndex::Exists() const
+{
+ return FileExists(DscFile);
+}
+
+unsigned long debDscFileIndex::Size() const
+{
+ struct stat buf;
+ if(stat(DscFile.c_str(), &buf) == 0)
+ return buf.st_size;
+ return 0;
+}
+
+// DscFileIndex::CreateSrcParser - Get a parser for the .dsc file /*{{{*/
+pkgSrcRecords::Parser *debDscFileIndex::CreateSrcParser() const
+{
+ if (!FileExists(DscFile))
+ return NULL;
+
+ return new debDscRecordParser(DscFile,this);
+}
+ /*}}}*/
// Index File types for Debian /*{{{*/
class debIFTypeSrc : public pkgIndexFile::Type
{
@@ -715,10 +846,42 @@ class debIFTypeStatus : public pkgIndexFile::Type
};
debIFTypeStatus() {Label = "Debian dpkg status file";};
};
+class debIFTypeDebPkgFile : public pkgIndexFile::Type
+{
+ public:
+ virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator File) const
+ {
+ return new debDebFileRecordParser(File.FileName(),*File.Cache());
+ };
+ debIFTypeDebPkgFile() {Label = "deb Package file";};
+};
+class debIFTypeDscFile : public pkgIndexFile::Type
+{
+ public:
+ virtual pkgSrcRecords::Parser *CreateSrcPkgParser(std::string DscFile) const
+ {
+ return new debDscRecordParser(DscFile, NULL);
+ };
+ debIFTypeDscFile() {Label = "dsc File Source Index";};
+};
+class debIFTypeDebianSourceDir : public pkgIndexFile::Type
+{
+ public:
+ virtual pkgSrcRecords::Parser *CreateSrcPkgParser(std::string SourceDir) const
+ {
+ return new debDscRecordParser(SourceDir + string("/debian/control"), NULL);
+ };
+ debIFTypeDebianSourceDir() {Label = "debian/control File Source Index";};
+};
+
static debIFTypeSrc _apt_Src;
static debIFTypePkg _apt_Pkg;
static debIFTypeTrans _apt_Trans;
static debIFTypeStatus _apt_Status;
+static debIFTypeDebPkgFile _apt_DebPkgFile;
+// file based pseudo indexes
+static debIFTypeDscFile _apt_DscFile;
+static debIFTypeDebianSourceDir _apt_DebianSourceDir;
const pkgIndexFile::Type *debSourcesIndex::GetType() const
{
@@ -736,5 +899,23 @@ const pkgIndexFile::Type *debStatusIndex::GetType() const
{
return &_apt_Status;
}
-
+const pkgIndexFile::Type *debDebPkgFileIndex::GetType() const
+{
+ return &_apt_DebPkgFile;
+}
+const pkgIndexFile::Type *debDscFileIndex::GetType() const
+{
+ return &_apt_DscFile;
+}
+const pkgIndexFile::Type *debDebianSourceDirIndex::GetType() const
+{
+ return &_apt_DebianSourceDir;
+}
/*}}}*/
+
+debStatusIndex::~debStatusIndex() {}
+debPackagesIndex::~debPackagesIndex() {}
+debTranslationsIndex::~debTranslationsIndex() {}
+debSourcesIndex::~debSourcesIndex() {}
+
+debDebPkgFileIndex::~debDebPkgFileIndex() {}
diff --git a/apt-pkg/deb/debindexfile.h b/apt-pkg/deb/debindexfile.h
index 017c69a0a..e5a1a7873 100644
--- a/apt-pkg/deb/debindexfile.h
+++ b/apt-pkg/deb/debindexfile.h
@@ -52,7 +52,7 @@ class debStatusIndex : public pkgIndexFile
virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
debStatusIndex(std::string File);
- virtual ~debStatusIndex() {};
+ virtual ~debStatusIndex();
};
class debPackagesIndex : public pkgIndexFile
@@ -65,10 +65,10 @@ class debPackagesIndex : public pkgIndexFile
std::string Section;
std::string Architecture;
- std::string Info(const char *Type) const;
- std::string IndexFile(const char *Type) const;
- std::string IndexURI(const char *Type) const;
-
+ APT_HIDDEN std::string Info(const char *Type) const;
+ APT_HIDDEN std::string IndexFile(const char *Type) const;
+ APT_HIDDEN std::string IndexURI(const char *Type) const;
+
public:
virtual const Type *GetType() const APT_CONST;
@@ -89,7 +89,7 @@ class debPackagesIndex : public pkgIndexFile
debPackagesIndex(std::string const &URI, std::string const &Dist, std::string const &Section,
bool const &Trusted, std::string const &Arch = "native");
- virtual ~debPackagesIndex() {};
+ virtual ~debPackagesIndex();
};
class debTranslationsIndex : public pkgIndexFile
@@ -102,11 +102,11 @@ class debTranslationsIndex : public pkgIndexFile
std::string Section;
const char * const Language;
- std::string Info(const char *Type) const;
- std::string IndexFile(const char *Type) const;
- std::string IndexURI(const char *Type) const;
+ APT_HIDDEN std::string Info(const char *Type) const;
+ APT_HIDDEN std::string IndexFile(const char *Type) const;
+ APT_HIDDEN std::string IndexURI(const char *Type) const;
- inline std::string TranslationFile() const {return std::string("Translation-").append(Language);};
+ APT_HIDDEN std::string TranslationFile() const {return std::string("Translation-").append(Language);};
public:
@@ -124,7 +124,7 @@ class debTranslationsIndex : public pkgIndexFile
virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
debTranslationsIndex(std::string URI,std::string Dist,std::string Section, char const * const Language);
- virtual ~debTranslationsIndex() {};
+ virtual ~debTranslationsIndex();
};
class debSourcesIndex : public pkgIndexFile
@@ -136,10 +136,10 @@ class debSourcesIndex : public pkgIndexFile
std::string Dist;
std::string Section;
- std::string Info(const char *Type) const;
- std::string IndexFile(const char *Type) const;
- std::string IndexURI(const char *Type) const;
-
+ APT_HIDDEN std::string Info(const char *Type) const;
+ APT_HIDDEN std::string IndexFile(const char *Type) const;
+ APT_HIDDEN std::string IndexURI(const char *Type) const;
+
public:
virtual const Type *GetType() const APT_CONST;
@@ -161,7 +161,61 @@ class debSourcesIndex : public pkgIndexFile
virtual unsigned long Size() const;
debSourcesIndex(std::string URI,std::string Dist,std::string Section,bool Trusted);
- virtual ~debSourcesIndex() {};
+ virtual ~debSourcesIndex();
+};
+
+class debDebPkgFileIndex : public pkgIndexFile
+{
+ private:
+ void *d;
+ std::string DebFile;
+ std::string DebFileFullPath;
+
+ public:
+ virtual const Type *GetType() const APT_CONST;
+
+ virtual std::string Describe(bool /*Short*/) const {
+ return DebFile;
+ }
+
+ // Interface for the Cache Generator
+ virtual bool Exists() const;
+ virtual bool HasPackages() const {
+ return true;
+ };
+ virtual unsigned long Size() const;
+ virtual bool Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const;
+ virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
+
+ // Interface for acquire
+ virtual std::string ArchiveURI(std::string /*File*/) const;
+
+ debDebPkgFileIndex(std::string DebFile);
+ virtual ~debDebPkgFileIndex();
+};
+
+class debDscFileIndex : public pkgIndexFile
+{
+ private:
+ std::string DscFile;
+ public:
+ virtual const Type *GetType() const APT_CONST;
+ virtual pkgSrcRecords::Parser *CreateSrcParser() const;
+ virtual bool Exists() const;
+ virtual bool HasPackages() const {return false;};
+ virtual unsigned long Size() const;
+ virtual std::string Describe(bool /*Short*/) const {
+ return DscFile;
+ };
+
+ debDscFileIndex(std::string &DscFile);
+ virtual ~debDscFileIndex() {};
+};
+
+class debDebianSourceDirIndex : public debDscFileIndex
+{
+ public:
+ virtual const Type *GetType() const APT_CONST;
};
#endif
diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc
index 4eef66c2b..616d8296d 100644
--- a/apt-pkg/deb/deblistparser.cc
+++ b/apt-pkg/deb/deblistparser.cc
@@ -58,18 +58,6 @@ debListParser::debListParser(FileFd *File, string const &Arch) : Tags(File),
MultiArchEnabled = Architectures.size() > 1;
}
/*}}}*/
-// ListParser::UniqFindTagWrite - Find the tag and write a unq string /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-unsigned long debListParser::UniqFindTagWrite(const char *Tag)
-{
- const char *Start;
- const char *Stop;
- if (Section.Find(Tag,Start,Stop) == false)
- return 0;
- return WriteUniqString(Start,Stop - Start);
-}
- /*}}}*/
// ListParser::Package - Return the package name /*{{{*/
// ---------------------------------------------------------------------
/* This is to return the name of the package this section describes */
@@ -144,9 +132,67 @@ unsigned char debListParser::ParseMultiArch(bool const showErrors) /*{{{*/
/* */
bool debListParser::NewVersion(pkgCache::VerIterator &Ver)
{
+ const char *Start;
+ const char *Stop;
+
// Parse the section
- unsigned long const idxSection = UniqFindTagWrite("Section");
- Ver->Section = idxSection;
+ if (Section.Find("Section",Start,Stop) == true)
+ {
+ map_stringitem_t const idx = StoreString(pkgCacheGenerator::SECTION, Start, Stop - Start);
+ Ver->Section = idx;
+ }
+ // Parse the source package name
+ pkgCache::GrpIterator const G = Ver.ParentPkg().Group();
+ Ver->SourcePkgName = G->Name;
+ Ver->SourceVerStr = Ver->VerStr;
+ if (Section.Find("Source",Start,Stop) == true)
+ {
+ const char * const Space = (const char * const) memchr(Start, ' ', Stop - Start);
+ pkgCache::VerIterator V;
+
+ if (Space != NULL)
+ {
+ Stop = Space;
+ const char * const Open = (const char * const) memchr(Space, '(', Stop - Space);
+ if (likely(Open != NULL))
+ {
+ const char * const Close = (const char * const) memchr(Open, ')', Stop - Open);
+ if (likely(Close != NULL))
+ {
+ std::string const version(Open + 1, (Close - Open) - 1);
+ if (version != Ver.VerStr())
+ {
+ map_stringitem_t const idx = StoreString(pkgCacheGenerator::VERSIONNUMBER, version);
+ Ver->SourceVerStr = idx;
+ }
+ }
+ }
+ }
+
+ std::string const pkgname(Start, Stop - Start);
+ if (pkgname != G.Name())
+ {
+ for (pkgCache::PkgIterator P = G.PackageList(); P.end() == false; P = G.NextPkg(P))
+ {
+ for (V = P.VersionList(); V.end() == false; ++V)
+ {
+ if (pkgname == V.SourcePkgName())
+ {
+ Ver->SourcePkgName = V->SourcePkgName;
+ break;
+ }
+ }
+ if (V.end() == false)
+ break;
+ }
+ if (V.end() == true)
+ {
+ map_stringitem_t const idx = StoreString(pkgCacheGenerator::PKGNAME, pkgname);
+ Ver->SourcePkgName = idx;
+ }
+ }
+ }
+
Ver->MultiArch = ParseMultiArch(true);
// Archive Size
Ver->Size = Section.FindULL("Size");
@@ -155,10 +201,8 @@ bool debListParser::NewVersion(pkgCache::VerIterator &Ver)
Ver->InstalledSize *= 1024;
// Priority
- const char *Start;
- const char *Stop;
if (Section.Find("Priority",Start,Stop) == true)
- {
+ {
if (GrabWord(string(Start,Stop-Start),PrioList,Ver->Priority) == false)
Ver->Priority = pkgCache::State::Extra;
}
@@ -195,35 +239,31 @@ bool debListParser::NewVersion(pkgCache::VerIterator &Ver)
/* This is to return the string describing the package in debian
form. If this returns the blank string then the entry is assumed to
only describe package properties */
-string debListParser::Description()
+string debListParser::Description(std::string const &lang)
{
- string const lang = DescriptionLanguage();
if (lang.empty())
return Section.FindS("Description");
else
return Section.FindS(string("Description-").append(lang).c_str());
}
- /*}}}*/
-// ListParser::DescriptionLanguage - Return the description lang string /*{{{*/
-// ---------------------------------------------------------------------
-/* This is to return the string describing the language of
- description. If this returns the blank string then the entry is
- assumed to describe original description. */
-string debListParser::DescriptionLanguage()
+ /*}}}*/
+// ListParser::AvailableDescriptionLanguages /*{{{*/
+std::vector<std::string> debListParser::AvailableDescriptionLanguages()
{
- if (Section.FindS("Description").empty() == false)
- return "";
-
- std::vector<string> const lang = APT::Configuration::getLanguages(true);
- for (std::vector<string>::const_iterator l = lang.begin();
- l != lang.end(); ++l)
- if (Section.FindS(string("Description-").append(*l).c_str()).empty() == false)
- return *l;
-
- return "";
+ std::vector<std::string> const understood = APT::Configuration::getLanguages();
+ std::vector<std::string> avail;
+ if (Section.Exists("Description") == true)
+ avail.push_back("");
+ for (std::vector<std::string>::const_iterator lang = understood.begin(); lang != understood.end(); ++lang)
+ {
+ std::string const tagname = "Description-" + *lang;
+ if (Section.Exists(tagname.c_str()) == true)
+ avail.push_back(*lang);
+ }
+ return avail;
}
- /*}}}*/
-// ListParser::Description - Return the description_md5 MD5SumValue /*{{{*/
+ /*}}}*/
+// ListParser::Description_md5 - Return the description_md5 MD5SumValue /*{{{*/
// ---------------------------------------------------------------------
/* This is to return the md5 string to allow the check if it is the right
description. If no Description-md5 is found in the section it will be
@@ -234,7 +274,7 @@ MD5SumValue debListParser::Description_md5()
string const value = Section.FindS("Description-md5");
if (value.empty() == true)
{
- std::string const desc = Description() + "\n";
+ std::string const desc = Description("") + "\n";
if (desc == "\n")
return MD5SumValue();
@@ -260,12 +300,6 @@ MD5SumValue debListParser::Description_md5()
bool debListParser::UsePackage(pkgCache::PkgIterator &Pkg,
pkgCache::VerIterator &Ver)
{
- if (Pkg->Section == 0)
- {
- unsigned long const idxSection = UniqFindTagWrite("Section");
- Pkg->Section = idxSection;
- }
-
string const static myArch = _config->Find("APT::Architecture");
// Possible values are: "all", "native", "installed" and "none"
// The "installed" mode is handled by ParseStatus(), See #544481 and friends.
@@ -917,7 +951,7 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,
{
// apt-secure does no longer download individual (per-section) Release
// file. to provide Component pinning we use the section name now
- map_ptrloc const storage = WriteUniqString(component);
+ map_stringitem_t const storage = StoreString(pkgCacheGenerator::MIXED, component);
FileI->Component = storage;
pkgTagFile TagFile(&File, File.Size());
@@ -926,19 +960,19 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,
return false;
std::string data;
- #define APT_INRELEASE(TAG, STORE) \
+ #define APT_INRELEASE(TYPE, TAG, STORE) \
data = Section.FindS(TAG); \
if (data.empty() == false) \
{ \
- map_ptrloc const storage = WriteUniqString(data); \
+ map_stringitem_t const storage = StoreString(pkgCacheGenerator::TYPE, data); \
STORE = storage; \
}
- APT_INRELEASE("Suite", FileI->Archive)
- APT_INRELEASE("Component", FileI->Component)
- APT_INRELEASE("Version", FileI->Version)
- APT_INRELEASE("Origin", FileI->Origin)
- APT_INRELEASE("Codename", FileI->Codename)
- APT_INRELEASE("Label", FileI->Label)
+ APT_INRELEASE(MIXED, "Suite", FileI->Archive)
+ APT_INRELEASE(MIXED, "Component", FileI->Component)
+ APT_INRELEASE(VERSIONNUMBER, "Version", FileI->Version)
+ APT_INRELEASE(MIXED, "Origin", FileI->Origin)
+ APT_INRELEASE(MIXED, "Codename", FileI->Codename)
+ APT_INRELEASE(MIXED, "Label", FileI->Label)
#undef APT_INRELEASE
Section.FindFlag("NotAutomatic", FileI->Flags, pkgCache::Flag::NotAutomatic);
Section.FindFlag("ButAutomaticUpgrades", FileI->Flags, pkgCache::Flag::ButAutomaticUpgrades);
@@ -979,3 +1013,22 @@ bool debListParser::SameVersion(unsigned short const Hash, /*{{{*/
}
/*}}}*/
#endif
+
+
+debDebFileParser::debDebFileParser(FileFd *File, std::string const &DebFile)
+ : debListParser(File, ""), DebFile(DebFile)
+{
+}
+
+bool debDebFileParser::UsePackage(pkgCache::PkgIterator &Pkg,
+ pkgCache::VerIterator &Ver)
+{
+ bool res = debListParser::UsePackage(Pkg, Ver);
+ // we use the full file path as a provides so that the file is found
+ // by its name
+ if(NewProvidesAllArch(Ver, DebFile, Ver.VerStr()) == false)
+ return false;
+ return res;
+}
+
+debListParser::~debListParser() {}
diff --git a/apt-pkg/deb/deblistparser.h b/apt-pkg/deb/deblistparser.h
index 3b6963211..f20f7f33e 100644
--- a/apt-pkg/deb/deblistparser.h
+++ b/apt-pkg/deb/deblistparser.h
@@ -44,19 +44,19 @@ class debListParser : public pkgCacheGenerator::ListParser
protected:
pkgTagFile Tags;
pkgTagSection Section;
- unsigned long iOffset;
+ map_filesize_t iOffset;
std::string Arch;
std::vector<std::string> Architectures;
bool MultiArchEnabled;
- unsigned long UniqFindTagWrite(const char *Tag);
virtual bool ParseStatus(pkgCache::PkgIterator &Pkg,pkgCache::VerIterator &Ver);
bool ParseDepends(pkgCache::VerIterator &Ver,const char *Tag,
unsigned int Type);
bool ParseProvides(pkgCache::VerIterator &Ver);
bool NewProvidesAllArch(pkgCache::VerIterator &Ver, std::string const &Package, std::string const &Version);
static bool GrabWord(std::string Word,WordList *List,unsigned char &Out);
-
+ APT_HIDDEN unsigned char ParseMultiArch(bool const showErrors);
+
public:
static unsigned char GetPrio(std::string Str);
@@ -67,8 +67,8 @@ class debListParser : public pkgCacheGenerator::ListParser
virtual bool ArchitectureAll();
virtual std::string Version();
virtual bool NewVersion(pkgCache::VerIterator &Ver);
- virtual std::string Description();
- virtual std::string DescriptionLanguage();
+ virtual std::string Description(std::string const &lang);
+ virtual std::vector<std::string> AvailableDescriptionLanguages();
virtual MD5SumValue Description_md5();
virtual unsigned short VersionHash();
#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
@@ -76,8 +76,8 @@ class debListParser : public pkgCacheGenerator::ListParser
#endif
virtual bool UsePackage(pkgCache::PkgIterator &Pkg,
pkgCache::VerIterator &Ver);
- virtual unsigned long Offset() {return iOffset;};
- virtual unsigned long Size() {return Section.size();};
+ virtual map_filesize_t Offset() {return iOffset;};
+ virtual map_filesize_t Size() {return Section.size();};
virtual bool Step();
@@ -100,10 +100,18 @@ class debListParser : public pkgCacheGenerator::ListParser
static const char *ConvertRelation(const char *I,unsigned int &Op);
debListParser(FileFd *File, std::string const &Arch = "");
- virtual ~debListParser() {};
+ virtual ~debListParser();
+};
- private:
- APT_HIDDEN unsigned char ParseMultiArch(bool const showErrors);
+class debDebFileParser : public debListParser
+{
+ private:
+ std::string DebFile;
+
+ public:
+ debDebFileParser(FileFd *File, std::string const &DebFile);
+ virtual bool UsePackage(pkgCache::PkgIterator &Pkg,
+ pkgCache::VerIterator &Ver);
};
class debTranslationsParser : public debListParser
diff --git a/apt-pkg/deb/debmetaindex.cc b/apt-pkg/deb/debmetaindex.cc
index 6fd12add8..c103da8f7 100644
--- a/apt-pkg/deb/debmetaindex.cc
+++ b/apt-pkg/deb/debmetaindex.cc
@@ -186,8 +186,8 @@ debReleaseIndex::~debReleaseIndex() {
delete *S;
}
-vector <struct IndexTarget *>* debReleaseIndex::ComputeIndexTargets() const {
- vector <struct IndexTarget *>* IndexTargets = new vector <IndexTarget *>;
+vector <IndexTarget *>* debReleaseIndex::ComputeIndexTargets() const {
+ vector <IndexTarget *>* IndexTargets = new vector <IndexTarget *>;
map<string, vector<debSectionEntry const*> >::const_iterator const src = ArchEntries.find("source");
if (src != ArchEntries.end()) {
@@ -253,38 +253,44 @@ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool const &GetAll) const
{
bool const tryInRelease = _config->FindB("Acquire::TryInRelease", true);
+ indexRecords * const iR = new indexRecords(Dist);
+ if (Trusted == ALWAYS_TRUSTED)
+ iR->SetTrusted(true);
+ else if (Trusted == NEVER_TRUSTED)
+ iR->SetTrusted(false);
+
// special case for --print-uris
if (GetAll) {
- vector <struct IndexTarget *> *targets = ComputeIndexTargets();
- for (vector <struct IndexTarget*>::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) {
+ vector <IndexTarget *> *targets = ComputeIndexTargets();
+ for (vector <IndexTarget*>::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) {
new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, HashString());
+ (*Target)->ShortDesc, HashStringList());
}
delete targets;
// this is normally created in pkgAcqMetaSig, but if we run
// in --print-uris mode, we add it here
if (tryInRelease == false)
- new pkgAcqMetaIndex(Owner, MetaIndexURI("Release"),
- MetaIndexInfo("Release"), "Release",
- MetaIndexURI("Release.gpg"),
- ComputeIndexTargets(),
- new indexRecords (Dist));
+ new pkgAcqMetaIndex(Owner, NULL,
+ MetaIndexURI("Release"),
+ MetaIndexInfo("Release"), "Release",
+ MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg",
+ ComputeIndexTargets(),
+ iR);
}
-
if (tryInRelease == true)
- new pkgAcqMetaClearSig(Owner, MetaIndexURI("InRelease"),
- MetaIndexInfo("InRelease"), "InRelease",
+ new pkgAcqMetaClearSig(Owner,
+ MetaIndexURI("InRelease"), MetaIndexInfo("InRelease"), "InRelease",
MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release",
MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg",
ComputeIndexTargets(),
- new indexRecords (Dist));
+ iR);
else
- new pkgAcqMetaSig(Owner, MetaIndexURI("Release.gpg"),
- MetaIndexInfo("Release.gpg"), "Release.gpg",
- MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release",
- ComputeIndexTargets(),
- new indexRecords (Dist));
+ new pkgAcqMetaIndex(Owner, NULL,
+ MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release",
+ MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg",
+ ComputeIndexTargets(),
+ iR);
return true;
}
@@ -471,6 +477,15 @@ class debSLTypeDebian : public pkgSourceList::Type
}
};
+debDebFileMetaIndex::debDebFileMetaIndex(std::string const &DebFile)
+ : metaIndex(DebFile, "local-uri", "deb-dist"), DebFile(DebFile)
+{
+ DebIndex = new debDebPkgFileIndex(DebFile);
+ Indexes = new vector<pkgIndexFile *>();
+ Indexes->push_back(DebIndex);
+}
+
+
class debSLTypeDeb : public debSLTypeDebian
{
public:
@@ -507,5 +522,25 @@ class debSLTypeDebSrc : public debSLTypeDebian
}
};
+class debSLTypeDebFile : public pkgSourceList::Type
+{
+ public:
+
+ bool CreateItem(vector<metaIndex *> &List, string const &URI,
+ string const &/*Dist*/, string const &/*Section*/,
+ std::map<string, string> const &/*Options*/) const
+ {
+ metaIndex *mi = new debDebFileMetaIndex(URI);
+ List.push_back(mi);
+ return true;
+ }
+
+ debSLTypeDebFile()
+ {
+ Name = "deb-file";
+ Label = "Debian Deb File";
+ }
+};
debSLTypeDeb _apt_DebType;
debSLTypeDebSrc _apt_DebSrcType;
+debSLTypeDebFile _apt_DebFileType;
diff --git a/apt-pkg/deb/debmetaindex.h b/apt-pkg/deb/debmetaindex.h
index 2286fa8b2..399543953 100644
--- a/apt-pkg/deb/debmetaindex.h
+++ b/apt-pkg/deb/debmetaindex.h
@@ -18,6 +18,8 @@
class pkgAcquire;
class pkgIndexFile;
+class debDebPkgFileIndex;
+class IndexTarget;
class debReleaseIndex : public metaIndex {
public:
@@ -34,7 +36,7 @@ class debReleaseIndex : public metaIndex {
/** \brief dpointer placeholder (for later in case we need it) */
void *d;
std::map<std::string, std::vector<debSectionEntry const*> > ArchEntries;
- enum { ALWAYS_TRUSTED, NEVER_TRUSTED, CHECK_TRUST } Trusted;
+ enum APT_HIDDEN { ALWAYS_TRUSTED, NEVER_TRUSTED, CHECK_TRUST } Trusted;
public:
@@ -44,7 +46,7 @@ class debReleaseIndex : public metaIndex {
virtual std::string ArchiveURI(std::string const &File) const {return URI + File;};
virtual bool GetIndexes(pkgAcquire *Owner, bool const &GetAll=false) const;
- std::vector <struct IndexTarget *>* ComputeIndexTargets() const;
+ std::vector <IndexTarget *>* ComputeIndexTargets() const;
std::string Info(const char *Type, std::string const &Section, std::string const &Arch="") const;
std::string MetaIndexInfo(const char *Type) const;
@@ -71,4 +73,27 @@ class debReleaseIndex : public metaIndex {
void PushSectionEntry(const debSectionEntry *Entry);
};
+class debDebFileMetaIndex : public metaIndex
+{
+ private:
+ std::string DebFile;
+ debDebPkgFileIndex *DebIndex;
+ public:
+ virtual std::string ArchiveURI(std::string const& /*File*/) const {
+ return DebFile;
+ }
+ virtual bool GetIndexes(pkgAcquire* /*Owner*/, const bool& /*GetAll=false*/) const {
+ return true;
+ }
+ virtual std::vector<pkgIndexFile *> *GetIndexFiles() {
+ return Indexes;
+ }
+ virtual bool IsTrusted() const {
+ return true;
+ }
+ debDebFileMetaIndex(std::string const &DebFile);
+ virtual ~debDebFileMetaIndex() {};
+
+};
+
#endif
diff --git a/apt-pkg/deb/debrecords.cc b/apt-pkg/deb/debrecords.cc
index 6063db5a8..b41aa5584 100644
--- a/apt-pkg/deb/debrecords.cc
+++ b/apt-pkg/deb/debrecords.cc
@@ -73,36 +73,17 @@ string debRecordParser::Homepage()
return Section.FindS("Homepage");
}
/*}}}*/
-// RecordParser::MD5Hash - Return the archive hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string debRecordParser::MD5Hash()
+// RecordParser::Hashes - return the available archive hashes /*{{{*/
+HashStringList debRecordParser::Hashes() const
{
- return Section.FindS("MD5Sum");
-}
- /*}}}*/
-// RecordParser::SHA1Hash - Return the archive hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string debRecordParser::SHA1Hash()
-{
- return Section.FindS("SHA1");
-}
- /*}}}*/
-// RecordParser::SHA256Hash - Return the archive hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string debRecordParser::SHA256Hash()
-{
- return Section.FindS("SHA256");
-}
- /*}}}*/
-// RecordParser::SHA512Hash - Return the archive hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string debRecordParser::SHA512Hash()
-{
- return Section.FindS("SHA512");
+ HashStringList hashes;
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
+ {
+ std::string const hash = Section.FindS(*type);
+ if (hash.empty() == false)
+ hashes.push_back(HashString(*type, hash));
+ }
+ return hashes;
}
/*}}}*/
// RecordParser::Maintainer - Return the maintainer email /*{{{*/
@@ -125,10 +106,12 @@ string debRecordParser::RecordField(const char *fieldName)
// RecordParser::ShortDesc - Return a 1 line description /*{{{*/
// ---------------------------------------------------------------------
/* */
-string debRecordParser::ShortDesc()
+string debRecordParser::ShortDesc(std::string const &lang)
{
- string Res = LongDesc();
- string::size_type Pos = Res.find('\n');
+ string const Res = LongDesc(lang);
+ if (Res.empty() == true)
+ return "";
+ string::size_type const Pos = Res.find('\n');
if (Pos == string::npos)
return Res;
return string(Res,0,Pos);
@@ -137,26 +120,44 @@ string debRecordParser::ShortDesc()
// RecordParser::LongDesc - Return a longer description /*{{{*/
// ---------------------------------------------------------------------
/* */
-string debRecordParser::LongDesc()
-{
- string orig, dest;
+string debRecordParser::LongDesc(std::string const &lang)
+{
+ string orig;
+ if (lang.empty() == true)
+ {
+ std::vector<string> const lang = APT::Configuration::getLanguages();
+ for (std::vector<string>::const_iterator l = lang.begin();
+ l != lang.end(); ++l)
+ {
+ std::string const tagname = "Description-" + *l;
+ orig = Section.FindS(tagname.c_str());
+ if (orig.empty() == false)
+ break;
+ else if (*l == "en")
+ {
+ orig = Section.FindS("Description");
+ if (orig.empty() == false)
+ break;
+ }
+ }
+ if (orig.empty() == true)
+ orig = Section.FindS("Description");
+ }
+ else
+ {
+ std::string const tagname = "Description-" + lang;
+ orig = Section.FindS(tagname.c_str());
+ if (orig.empty() == true && lang == "en")
+ orig = Section.FindS("Description");
+ }
- if (!Section.FindS("Description").empty())
- orig = Section.FindS("Description").c_str();
- else
- {
- std::vector<string> const lang = APT::Configuration::getLanguages();
- for (std::vector<string>::const_iterator l = lang.begin();
- orig.empty() && l != lang.end(); ++l)
- orig = Section.FindS(string("Description-").append(*l).c_str());
- }
+ char const * const codeset = nl_langinfo(CODESET);
+ if (strcmp(codeset,"UTF-8") != 0) {
+ string dest;
+ UTF8ToCodeset(codeset, orig, &dest);
+ return dest;
+ }
- char const * const codeset = nl_langinfo(CODESET);
- if (strcmp(codeset,"UTF-8") != 0) {
- UTF8ToCodeset(codeset, orig, &dest);
- orig = dest;
- }
-
return orig;
}
/*}}}*/
@@ -206,3 +207,5 @@ void debRecordParser::GetRec(const char *&Start,const char *&Stop)
Section.GetSection(Start,Stop);
}
/*}}}*/
+
+debRecordParser::~debRecordParser() {}
diff --git a/apt-pkg/deb/debrecords.h b/apt-pkg/deb/debrecords.h
index bdac6c90b..6b5f94334 100644
--- a/apt-pkg/deb/debrecords.h
+++ b/apt-pkg/deb/debrecords.h
@@ -29,31 +29,28 @@ class debRecordParser : public pkgRecords::Parser
{
/** \brief dpointer placeholder (for later in case we need it) */
void *d;
-
+
+ protected:
FileFd File;
pkgTagFile Tags;
pkgTagSection Section;
- protected:
-
virtual bool Jump(pkgCache::VerFileIterator const &Ver);
virtual bool Jump(pkgCache::DescFileIterator const &Desc);
- public:
+ public:
// These refer to the archive file for the Version
virtual std::string FileName();
- virtual std::string MD5Hash();
- virtual std::string SHA1Hash();
- virtual std::string SHA256Hash();
- virtual std::string SHA512Hash();
virtual std::string SourcePkg();
virtual std::string SourceVer();
-
+
+ virtual HashStringList Hashes() const;
+
// These are some general stats about the package
virtual std::string Maintainer();
- virtual std::string ShortDesc();
- virtual std::string LongDesc();
+ virtual std::string ShortDesc(std::string const &lang);
+ virtual std::string LongDesc(std::string const &lang);
virtual std::string Name();
virtual std::string Homepage();
@@ -63,7 +60,18 @@ class debRecordParser : public pkgRecords::Parser
virtual void GetRec(const char *&Start,const char *&Stop);
debRecordParser(std::string FileName,pkgCache &Cache);
- virtual ~debRecordParser() {};
+ virtual ~debRecordParser();
+};
+
+// custom record parser that reads deb files directly
+class debDebFileRecordParser : public debRecordParser
+{
+ public:
+ virtual std::string FileName() {
+ return File.Name();
+ }
+ debDebFileRecordParser(std::string FileName,pkgCache &Cache)
+ : debRecordParser(FileName, Cache) {};
};
#endif
diff --git a/apt-pkg/deb/debsrcrecords.cc b/apt-pkg/deb/debsrcrecords.cc
index a444cbe4d..97f43aca2 100644
--- a/apt-pkg/deb/debsrcrecords.cc
+++ b/apt-pkg/deb/debsrcrecords.cc
@@ -18,6 +18,8 @@
#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/srcrecords.h>
#include <apt-pkg/tagfile.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/gpgv.h>
#include <ctype.h>
#include <stdlib.h>
@@ -55,12 +57,13 @@ const char **debSrcRecordParser::Binaries()
char* binStartNext = strchrnul(bin, ',');
char* binEnd = binStartNext - 1;
for (; isspace(*binEnd) != 0; --binEnd)
- binEnd = '\0';
+ binEnd = 0;
StaticBinList.push_back(bin);
if (*binStartNext != ',')
break;
*binStartNext = '\0';
- for (bin = binStartNext + 1; isspace(*bin) != 0; ++bin);
+ for (bin = binStartNext + 1; isspace(*bin) != 0; ++bin)
+ ;
} while (*bin != '\0');
StaticBinList.push_back(NULL);
@@ -121,10 +124,6 @@ bool debSrcRecordParser::BuildDepends(std::vector<pkgSrcRecords::Parser::BuildDe
bool debSrcRecordParser::Files(std::vector<pkgSrcRecords::File> &List)
{
List.erase(List.begin(),List.end());
-
- string Files = Sect.FindS("Files");
- if (Files.empty() == true)
- return false;
// Stash the / terminated directory prefix
string Base = Sect.FindS("Directory");
@@ -133,51 +132,105 @@ bool debSrcRecordParser::Files(std::vector<pkgSrcRecords::File> &List)
std::vector<std::string> const compExts = APT::Configuration::getCompressorExtensions();
- // Iterate over the entire list grabbing each triplet
- const char *C = Files.c_str();
- while (*C != 0)
- {
- pkgSrcRecords::File F;
- string Size;
-
- // Parse each of the elements
- if (ParseQuoteWord(C,F.MD5Hash) == false ||
- ParseQuoteWord(C,Size) == false ||
- ParseQuoteWord(C,F.Path) == false)
- return _error->Error("Error parsing file record");
-
- // Parse the size and append the directory
- F.Size = atoi(Size.c_str());
- F.Path = Base + F.Path;
-
- // Try to guess what sort of file it is we are getting.
- string::size_type Pos = F.Path.length()-1;
- while (1)
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
+ {
+ // derive field from checksum type
+ std::string checksumField("Checksums-");
+ if (strcmp(*type, "MD5Sum") == 0)
+ checksumField = "Files"; // historic name for MD5 checksums
+ else
+ checksumField.append(*type);
+
+ string const Files = Sect.FindS(checksumField.c_str());
+ if (Files.empty() == true)
+ continue;
+
+ // Iterate over the entire list grabbing each triplet
+ const char *C = Files.c_str();
+ while (*C != 0)
{
- string::size_type Tmp = F.Path.rfind('.',Pos);
- if (Tmp == string::npos)
- break;
- if (F.Type == "tar") {
- // source v3 has extension 'debian.tar.*' instead of 'diff.*'
- if (string(F.Path, Tmp+1, Pos-Tmp) == "debian")
- F.Type = "diff";
- break;
- }
- F.Type = string(F.Path,Tmp+1,Pos-Tmp);
-
- if (std::find(compExts.begin(), compExts.end(), std::string(".").append(F.Type)) != compExts.end() ||
- F.Type == "tar")
+ string hash, size, path;
+
+ // Parse each of the elements
+ if (ParseQuoteWord(C, hash) == false ||
+ ParseQuoteWord(C, size) == false ||
+ ParseQuoteWord(C, path) == false)
+ return _error->Error("Error parsing file record in %s of source package %s", checksumField.c_str(), Package().c_str());
+
+ HashString const hashString(*type, hash);
+ if (Base.empty() == false)
+ path = Base + path;
+
+ // look if we have a record for this file already
+ std::vector<pkgSrcRecords::File>::iterator file = List.begin();
+ for (; file != List.end(); ++file)
+ if (file->Path == path)
+ break;
+
+ // we have it already, store the new hash and be done
+ if (file != List.end())
{
- Pos = Tmp-1;
+#if __GNUC__ >= 4
+ // set for compatibility only, so warn users not us
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ if (checksumField == "Files")
+ file->MD5Hash = hash;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ // an error here indicates that we have two different hashes for the same file
+ if (file->Hashes.push_back(hashString) == false)
+ return _error->Error("Error parsing checksum in %s of source package %s", checksumField.c_str(), Package().c_str());
continue;
}
-
- break;
+
+ // we haven't seen this file yet
+ pkgSrcRecords::File F;
+ F.Path = path;
+ F.Size = strtoull(size.c_str(), NULL, 10);
+ F.Hashes.push_back(hashString);
+
+#if __GNUC__ >= 4
+ // set for compatibility only, so warn users not us
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ if (checksumField == "Files")
+ F.MD5Hash = hash;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+
+ // Try to guess what sort of file it is we are getting.
+ string::size_type Pos = F.Path.length()-1;
+ while (1)
+ {
+ string::size_type Tmp = F.Path.rfind('.',Pos);
+ if (Tmp == string::npos)
+ break;
+ if (F.Type == "tar") {
+ // source v3 has extension 'debian.tar.*' instead of 'diff.*'
+ if (string(F.Path, Tmp+1, Pos-Tmp) == "debian")
+ F.Type = "diff";
+ break;
+ }
+ F.Type = string(F.Path,Tmp+1,Pos-Tmp);
+
+ if (std::find(compExts.begin(), compExts.end(), std::string(".").append(F.Type)) != compExts.end() ||
+ F.Type == "tar")
+ {
+ Pos = Tmp-1;
+ continue;
+ }
+
+ break;
+ }
+ List.push_back(F);
}
-
- List.push_back(F);
}
-
+
return true;
}
/*}}}*/
@@ -190,3 +243,21 @@ debSrcRecordParser::~debSrcRecordParser()
free(Buffer);
}
/*}}}*/
+
+
+debDscRecordParser::debDscRecordParser(std::string const &DscFile, pkgIndexFile const *Index)
+ : debSrcRecordParser(DscFile, Index)
+{
+ // support clear signed files
+ if (OpenMaybeClearSignedFile(DscFile, Fd) == false)
+ {
+ _error->Error("Failed to open %s", DscFile.c_str());
+ return;
+ }
+
+ // re-init to ensure the updated Fd is used
+ Tags.Init(&Fd);
+ // read the first (and only) record
+ Step();
+
+}
diff --git a/apt-pkg/deb/debsrcrecords.h b/apt-pkg/deb/debsrcrecords.h
index b65d1480b..a0a151875 100644
--- a/apt-pkg/deb/debsrcrecords.h
+++ b/apt-pkg/deb/debsrcrecords.h
@@ -26,6 +26,7 @@ class debSrcRecordParser : public pkgSrcRecords::Parser
/** \brief dpointer placeholder (for later in case we need it) */
void *d;
+ protected:
FileFd Fd;
pkgTagFile Tags;
pkgTagSection Sect;
@@ -60,4 +61,10 @@ class debSrcRecordParser : public pkgSrcRecords::Parser
virtual ~debSrcRecordParser();
};
+class debDscRecordParser : public debSrcRecordParser
+{
+ public:
+ debDscRecordParser(std::string const &DscFile, pkgIndexFile const *Index);
+};
+
#endif
diff --git a/apt-pkg/deb/debsystem.h b/apt-pkg/deb/debsystem.h
index a945f68fb..226cd60bf 100644
--- a/apt-pkg/deb/debsystem.h
+++ b/apt-pkg/deb/debsystem.h
@@ -29,7 +29,7 @@ class debSystem : public pkgSystem
{
// private d-pointer
debSystemPrivate *d;
- bool CheckUpdates();
+ APT_HIDDEN bool CheckUpdates();
public:
diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc
index 04a13a86c..95fae9a28 100644
--- a/apt-pkg/deb/dpkgpm.cc
+++ b/apt-pkg/deb/dpkgpm.cc
@@ -517,7 +517,7 @@ bool pkgDPkgPM::RunScriptsWithPkgs(const char *Cnf)
void pkgDPkgPM::DoStdin(int master)
{
unsigned char input_buf[256] = {0,};
- ssize_t len = read(0, input_buf, sizeof(input_buf));
+ ssize_t len = read(STDIN_FILENO, input_buf, sizeof(input_buf));
if (len)
FileFd::Write(master, input_buf, len);
else
@@ -1034,7 +1034,6 @@ void pkgDPkgPM::BuildPackagesProgressMap()
}
}
/*}}}*/
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR < 13)
bool pkgDPkgPM::Go(int StatusFd)
{
APT::Progress::PackageManager *progress = NULL;
@@ -1043,9 +1042,8 @@ bool pkgDPkgPM::Go(int StatusFd)
else
progress = new APT::Progress::PackageManagerProgressFd(StatusFd);
- return GoNoABIBreak(progress);
+ return Go(progress);
}
-#endif
void pkgDPkgPM::StartPtyMagic()
{
@@ -1184,11 +1182,7 @@ void pkgDPkgPM::StopPtyMagic()
* through to human readable (and i10n-able)
* names and calculates a percentage for each step.
*/
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
bool pkgDPkgPM::Go(APT::Progress::PackageManager *progress)
-#else
-bool pkgDPkgPM::GoNoABIBreak(APT::Progress::PackageManager *progress)
-#endif
{
pkgPackageManager::SigINTStop = false;
d->progress = progress;
@@ -1760,11 +1754,6 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg)
if (Ver.end() == true)
return;
pkgver = Ver.VerStr() == NULL ? "unknown" : Ver.VerStr();
- pkgRecords Recs(Cache);
- pkgRecords::Parser &Parse = Recs.Lookup(Ver.FileList());
- srcpkgname = Parse.SourcePkg();
- if(srcpkgname.empty())
- srcpkgname = pkgname;
// if the file exists already, we check:
// - if it was reported already (touched by apport).
@@ -1815,7 +1804,7 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg)
time_t now = time(NULL);
fprintf(report, "Date: %s" , ctime(&now));
fprintf(report, "Package: %s %s\n", pkgname.c_str(), pkgver.c_str());
- fprintf(report, "SourcePackage: %s\n", srcpkgname.c_str());
+ fprintf(report, "SourcePackage: %s\n", Ver.SourcePkgName());
fprintf(report, "ErrorMessage:\n %s\n", errormsg);
// ensure that the log is flushed
diff --git a/apt-pkg/deb/dpkgpm.h b/apt-pkg/deb/dpkgpm.h
index 2c1805015..2a6e7e004 100644
--- a/apt-pkg/deb/dpkgpm.h
+++ b/apt-pkg/deb/dpkgpm.h
@@ -52,7 +52,7 @@ class pkgDPkgPM : public pkgPackageManager
needs to declare a Replaces on the disappeared package.
\param pkgname Name of the package that disappeared
*/
- void handleDisappearAction(std::string const &pkgname);
+ APT_HIDDEN void handleDisappearAction(std::string const &pkgname);
protected:
int pkgFailures;
@@ -118,27 +118,14 @@ class pkgDPkgPM : public pkgPackageManager
void DoTerminalPty(int master);
void DoDpkgStatusFd(int statusfd);
void ProcessDpkgStatusLine(char *line);
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR < 13)
- void DoDpkgStatusFd(int statusfd, int /*unused*/) {
- DoDpkgStatusFd(statusfd);
- }
- void ProcessDpkgStatusLine(int /*unused*/, char *line) {
- ProcessDpkgStatusLine(line);
- }
-#endif
-
// The Actuall installation implementation
virtual bool Install(PkgIterator Pkg,std::string File);
virtual bool Configure(PkgIterator Pkg);
virtual bool Remove(PkgIterator Pkg,bool Purge = false);
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
virtual bool Go(APT::Progress::PackageManager *progress);
-#else
virtual bool Go(int StatusFd=-1);
- bool GoNoABIBreak(APT::Progress::PackageManager *progress);
-#endif
virtual void Reset();
diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc
index 42e31396b..1332f3ee2 100644
--- a/apt-pkg/depcache.cc
+++ b/apt-pkg/depcache.cc
@@ -1226,7 +1226,7 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
continue;
}
// now check if we should consider it a automatic dependency or not
- if(InstPkg->CurrentVer == 0 && Pkg->Section != 0 && ConfigValueInSubTree("APT::Never-MarkAuto-Sections", Pkg.Section()))
+ if(InstPkg->CurrentVer == 0 && InstVer->Section != 0 && ConfigValueInSubTree("APT::Never-MarkAuto-Sections", InstVer.Section()))
{
if(DebugAutoInstall == true)
std::clog << OutputInDepth(Depth) << "Setting NOT as auto-installed (direct "
@@ -1961,3 +1961,17 @@ bool pkgDepCache::Sweep() /*{{{*/
return true;
}
/*}}}*/
+// DepCache::MarkAndSweep /*{{{*/
+bool pkgDepCache::MarkAndSweep(InRootSetFunc &rootFunc)
+{
+ return MarkRequired(rootFunc) && Sweep();
+}
+bool pkgDepCache::MarkAndSweep()
+{
+ std::auto_ptr<InRootSetFunc> f(GetRootSetFunc());
+ if(f.get() != NULL)
+ return MarkAndSweep(*f.get());
+ else
+ return false;
+}
+ /*}}}*/
diff --git a/apt-pkg/depcache.h b/apt-pkg/depcache.h
index bec651279..5554e1a69 100644
--- a/apt-pkg/depcache.h
+++ b/apt-pkg/depcache.h
@@ -91,7 +91,7 @@ class pkgDepCache : protected pkgCache::Namespace
* \param follow_suggests If \b true, suggestions of the package
* will be recursively marked.
*/
- void MarkPackage(const pkgCache::PkgIterator &pkg,
+ APT_HIDDEN void MarkPackage(const pkgCache::PkgIterator &pkg,
const pkgCache::VerIterator &ver,
bool const &follow_recommends,
bool const &follow_suggests);
@@ -109,7 +109,7 @@ class pkgDepCache : protected pkgCache::Namespace
*
* \return \b false if an error occurred.
*/
- bool MarkRequired(InRootSetFunc &rootFunc);
+ APT_HIDDEN bool MarkRequired(InRootSetFunc &rootFunc);
/** \brief Set the StateCache::Garbage flag on all packages that
* should be removed.
@@ -120,7 +120,7 @@ class pkgDepCache : protected pkgCache::Namespace
*
* \return \b false if an error occurred.
*/
- bool Sweep();
+ APT_HIDDEN bool Sweep();
public:
@@ -169,7 +169,7 @@ class pkgDepCache : protected pkgCache::Namespace
bool released;
/** Action groups are noncopyable. */
- ActionGroup(const ActionGroup &other);
+ APT_HIDDEN ActionGroup(const ActionGroup &other);
public:
/** \brief Create a new ActionGroup.
*
@@ -396,19 +396,8 @@ class pkgDepCache : protected pkgCache::Namespace
* \param rootFunc A predicate that returns \b true for packages
* that should be added to the root set.
*/
- bool MarkAndSweep(InRootSetFunc &rootFunc)
- {
- return MarkRequired(rootFunc) && Sweep();
- }
-
- bool MarkAndSweep()
- {
- std::auto_ptr<InRootSetFunc> f(GetRootSetFunc());
- if(f.get() != NULL)
- return MarkAndSweep(*f.get());
- else
- return false;
- }
+ bool MarkAndSweep(InRootSetFunc &rootFunc);
+ bool MarkAndSweep();
/** \name State Manipulators
*/
@@ -514,7 +503,7 @@ class pkgDepCache : protected pkgCache::Namespace
bool const rPurge, unsigned long const Depth, bool const FromUser);
private:
- bool IsModeChangeOk(ModeList const mode, PkgIterator const &Pkg,
+ APT_HIDDEN bool IsModeChangeOk(ModeList const mode, PkgIterator const &Pkg,
unsigned long const Depth, bool const FromUser);
};
diff --git a/apt-pkg/edsp.cc b/apt-pkg/edsp.cc
index 0d0418e06..2ba914b16 100644
--- a/apt-pkg/edsp.cc
+++ b/apt-pkg/edsp.cc
@@ -95,12 +95,8 @@ bool EDSP::WriteLimitedScenario(pkgDepCache &Cache, FILE* output,
void EDSP::WriteScenarioVersion(pkgDepCache &Cache, FILE* output, pkgCache::PkgIterator const &Pkg,
pkgCache::VerIterator const &Ver)
{
- pkgRecords Recs(Cache);
- pkgRecords::Parser &rec = Recs.Lookup(Ver.FileList());
- string srcpkg = rec.SourcePkg().empty() ? Pkg.Name() : rec.SourcePkg();
-
fprintf(output, "Package: %s\n", Pkg.Name());
- fprintf(output, "Source: %s\n", srcpkg.c_str());
+ fprintf(output, "Source: %s\n", Ver.SourcePkgName());
fprintf(output, "Architecture: %s\n", Ver.Arch());
fprintf(output, "Version: %s\n", Ver.VerStr());
if (Pkg.CurrentVer() == Ver)
diff --git a/apt-pkg/edsp/edspindexfile.cc b/apt-pkg/edsp/edspindexfile.cc
index 10313fd61..c38f24567 100644
--- a/apt-pkg/edsp/edspindexfile.cc
+++ b/apt-pkg/edsp/edspindexfile.cc
@@ -56,7 +56,7 @@ bool edspIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const
pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
CFile->Size = Pkg.FileSize();
CFile->mtime = Pkg.ModificationTime();
- map_ptrloc const storage = Gen.WriteUniqString("edsp::scenario");
+ map_stringitem_t const storage = Gen.StoreString(pkgCacheGenerator::MIXED, "edsp::scenario");
CFile->Archive = storage;
if (Gen.MergeList(Parser) == false)
diff --git a/apt-pkg/indexcopy.cc b/apt-pkg/indexcopy.cc
index 854ba1bd7..5fa57fd8b 100644
--- a/apt-pkg/indexcopy.cc
+++ b/apt-pkg/indexcopy.cc
@@ -516,7 +516,7 @@ bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex)
return false;
}
- if (!Record->Hash.VerifyFile(prefix+file))
+ if (!Record->Hashes.VerifyFile(prefix+file))
{
_error->Warning(_("Hash mismatch for: %s"),file.c_str());
return false;
@@ -524,8 +524,10 @@ bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex)
if(Debug == true)
{
- cout << "File: " << prefix+file << endl;
- cout << "Expected Hash " << Record->Hash.toStr() << endl;
+ cout << "File: " << prefix+file << endl
+ << "Expected Hash " << endl;
+ for (HashStringList::const_iterator hs = Record->Hashes.begin(); hs != Record->Hashes.end(); ++hs)
+ std::cout << "\t- " << hs->toStr() << std::endl;
}
return true;
@@ -791,3 +793,5 @@ bool TranslationsCopy::CopyTranslations(string CDROM,string Name, /*{{{*/
return true;
}
/*}}}*/
+
+APT_CONST IndexCopy::~IndexCopy() {}
diff --git a/apt-pkg/indexcopy.h b/apt-pkg/indexcopy.h
index 43cdb3f0a..701beb075 100644
--- a/apt-pkg/indexcopy.h
+++ b/apt-pkg/indexcopy.h
@@ -53,7 +53,7 @@ class IndexCopy /*{{{*/
bool CopyPackages(std::string CDROM,std::string Name,std::vector<std::string> &List,
pkgCdromStatus *log);
- virtual ~IndexCopy() {};
+ virtual ~IndexCopy();
};
/*}}}*/
class PackageCopy : public IndexCopy /*{{{*/
@@ -93,8 +93,8 @@ class SigVerify /*{{{*/
/** \brief dpointer placeholder (for later in case we need it) */
void *d;
- bool Verify(std::string prefix,std::string file, indexRecords *records);
- bool CopyMetaIndex(std::string CDROM, std::string CDName,
+ APT_HIDDEN bool Verify(std::string prefix,std::string file, indexRecords *records);
+ APT_HIDDEN bool CopyMetaIndex(std::string CDROM, std::string CDName,
std::string prefix, std::string file);
public:
diff --git a/apt-pkg/indexfile.h b/apt-pkg/indexfile.h
index b5c9ac77e..817165f08 100644
--- a/apt-pkg/indexfile.h
+++ b/apt-pkg/indexfile.h
@@ -59,6 +59,7 @@ class pkgIndexFile
const char *Label;
virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator /*File*/) const {return 0;};
+ virtual pkgSrcRecords::Parser *CreateSrcPkgParser(std::string /*File*/) const {return 0;};
Type();
virtual ~Type() {};
};
diff --git a/apt-pkg/indexrecords.cc b/apt-pkg/indexrecords.cc
index 5353d1098..bf1901e11 100644
--- a/apt-pkg/indexrecords.cc
+++ b/apt-pkg/indexrecords.cc
@@ -37,6 +37,11 @@ APT_PURE string indexRecords::GetSuite() const
return this->Suite;
}
+APT_PURE bool indexRecords::GetSupportsAcquireByHash() const
+{
+ return this->SupportsAcquireByHash;
+}
+
APT_PURE bool indexRecords::CheckDist(const string MaybeDist) const
{
return (this->Dist == MaybeDist
@@ -53,7 +58,7 @@ APT_PURE time_t indexRecords::GetValidUntil() const
return this->ValidUntil;
}
-APT_PURE const indexRecords::checkSum *indexRecords::Lookup(const string MetaKey)
+APT_PURE indexRecords::checkSum *indexRecords::Lookup(const string MetaKey)
{
std::map<std::string, indexRecords::checkSum* >::const_iterator sum = Entries.find(MetaKey);
if (sum == Entries.end())
@@ -86,12 +91,14 @@ bool indexRecords::Load(const string Filename) /*{{{*/
strprintf(ErrorText, _("No sections in Release file %s"), Filename.c_str());
return false;
}
+ // FIXME: find better tag name
+ SupportsAcquireByHash = Section.FindB("Acquire-By-Hash", false);
Suite = Section.FindS("Suite");
Dist = Section.FindS("Codename");
- int i;
- for (i=0;HashString::SupportedHashes()[i] != NULL; i++)
+ bool FoundHashSum = false;
+ for (int i=0;HashString::SupportedHashes()[i] != NULL; i++)
{
if (!Section.Find(HashString::SupportedHashes()[i], Start, End))
continue;
@@ -103,16 +110,28 @@ bool indexRecords::Load(const string Filename) /*{{{*/
{
if (!parseSumData(Start, End, Name, Hash, Size))
return false;
- indexRecords::checkSum *Sum = new indexRecords::checkSum;
- Sum->MetaKeyFilename = Name;
- Sum->Hash = HashString(HashString::SupportedHashes()[i],Hash);
- Sum->Size = Size;
- Entries[Name] = Sum;
+
+ if (Entries.find(Name) == Entries.end())
+ {
+ indexRecords::checkSum *Sum = new indexRecords::checkSum;
+ Sum->MetaKeyFilename = Name;
+ Sum->Size = Size;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ Sum->Hash = HashString(HashString::SupportedHashes()[i],Hash);
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+ Entries[Name] = Sum;
+ }
+ Entries[Name]->Hashes.push_back(HashString(HashString::SupportedHashes()[i],Hash));
+ FoundHashSum = true;
}
- break;
}
- if(HashString::SupportedHashes()[i] == NULL)
+ if(FoundHashSum == false)
{
strprintf(ErrorText, _("No Hash entry in Release file %s"), Filename.c_str());
return false;
@@ -234,11 +253,31 @@ bool indexRecords::parseSumData(const char *&Start, const char *End, /*{{{*/
return true;
}
/*}}}*/
-indexRecords::indexRecords()
+
+APT_PURE bool indexRecords::IsAlwaysTrusted() const
+{
+ if (Trusted == ALWAYS_TRUSTED)
+ return true;
+ return false;
+}
+APT_PURE bool indexRecords::IsNeverTrusted() const
+{
+ if (Trusted == NEVER_TRUSTED)
+ return true;
+ return false;
+}
+void indexRecords::SetTrusted(bool const Trusted)
{
+ if (Trusted == true)
+ this->Trusted = ALWAYS_TRUSTED;
+ else
+ this->Trusted = NEVER_TRUSTED;
}
-indexRecords::indexRecords(const string ExpectedDist) :
- ExpectedDist(ExpectedDist), ValidUntil(0)
+indexRecords::indexRecords(const string &ExpectedDist) :
+ Trusted(CHECK_TRUST), d(NULL), ExpectedDist(ExpectedDist), ValidUntil(0),
+ SupportsAcquireByHash(false)
{
}
+
+indexRecords::~indexRecords() {}
diff --git a/apt-pkg/indexrecords.h b/apt-pkg/indexrecords.h
index e31f889ad..88a06779c 100644
--- a/apt-pkg/indexrecords.h
+++ b/apt-pkg/indexrecords.h
@@ -21,45 +21,76 @@
class indexRecords
{
- bool parseSumData(const char *&Start, const char *End, std::string &Name,
+ APT_HIDDEN bool parseSumData(const char *&Start, const char *End, std::string &Name,
std::string &Hash, unsigned long long &Size);
public:
struct checkSum;
std::string ErrorText;
-
+
+ private:
+ enum APT_HIDDEN { ALWAYS_TRUSTED, NEVER_TRUSTED, CHECK_TRUST } Trusted;
+ // dpointer (for later)
+ void * d;
+
protected:
std::string Dist;
std::string Suite;
std::string ExpectedDist;
time_t ValidUntil;
+ bool SupportsAcquireByHash;
std::map<std::string,checkSum *> Entries;
public:
- indexRecords();
- indexRecords(const std::string ExpectedDist);
+ indexRecords(const std::string &ExpectedDist = "");
// Lookup function
- virtual const checkSum *Lookup(const std::string MetaKey);
+ virtual checkSum *Lookup(const std::string MetaKey);
/** \brief tests if a checksum for this file is available */
bool Exists(std::string const &MetaKey) const;
std::vector<std::string> MetaKeys();
virtual bool Load(std::string Filename);
+ virtual bool CheckDist(const std::string MaybeDist) const;
+
std::string GetDist() const;
std::string GetSuite() const;
+ bool GetSupportsAcquireByHash() const;
time_t GetValidUntil() const;
- virtual bool CheckDist(const std::string MaybeDist) const;
std::string GetExpectedDist() const;
- virtual ~indexRecords(){};
+
+ /** \brief check if source is marked as always trusted */
+ bool IsAlwaysTrusted() const;
+ /** \brief check if source is marked as never trusted */
+ bool IsNeverTrusted() const;
+
+ /** \brief sets an explicit trust value
+ *
+ * \b true means that the source should always be considered trusted,
+ * while \b false marks a source as always untrusted, even if we have
+ * a valid signature and everything.
+ */
+ void SetTrusted(bool const Trusted);
+
+ virtual ~indexRecords();
};
+#if __GNUC__ >= 4
+ // ensure that con- & de-structor don't trigger this warning
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
struct indexRecords::checkSum
{
std::string MetaKeyFilename;
- HashString Hash;
+ HashStringList Hashes;
unsigned long long Size;
+
+ APT_DEPRECATED HashString Hash;
};
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
#endif
diff --git a/apt-pkg/init.cc b/apt-pkg/init.cc
index 241628632..d04c51621 100644
--- a/apt-pkg/init.cc
+++ b/apt-pkg/init.cc
@@ -88,9 +88,19 @@ bool pkgInitConfig(Configuration &Cnf)
Cnf.Set("Dir::Ignore-Files-Silently::", "\\.orig$");
Cnf.Set("Dir::Ignore-Files-Silently::", "\\.distUpgrade$");
+ // Repository security
+ // FIXME: this is set to "true" for backward compatiblity, once
+ // jessie is out we want to change this to "false" to
+ // improve security
+ Cnf.CndSet("Acquire::AllowInsecureRepositories", true);
+ Cnf.CndSet("Acquire::AllowDowngradeToInsecureRepositories", false);
+
// Default cdrom mount point
Cnf.CndSet("Acquire::cdrom::mount", "/media/cdrom/");
+ // The default user we drop to in the methods
+ Cnf.CndSet("APT::Sandbox::User", "_apt");
+
bool Res = true;
// Read an alternate config file
diff --git a/apt-pkg/install-progress.cc b/apt-pkg/install-progress.cc
index cf6c85912..5ea8bf4d0 100644
--- a/apt-pkg/install-progress.cc
+++ b/apt-pkg/install-progress.cc
@@ -21,6 +21,8 @@
namespace APT {
namespace Progress {
+PackageManager::PackageManager() : d(NULL), percentage(0.0), last_reported_progress(-1) {}
+PackageManager::~PackageManager() {}
/* Return a APT::Progress::PackageManager based on the global
* apt configuration (i.e. APT::Status-Fd and APT::Status-deb822-Fd)
diff --git a/apt-pkg/install-progress.h b/apt-pkg/install-progress.h
index 5d1a20e9b..d8b4a5c82 100644
--- a/apt-pkg/install-progress.h
+++ b/apt-pkg/install-progress.h
@@ -26,9 +26,8 @@ namespace Progress {
int last_reported_progress;
public:
- PackageManager()
- : percentage(0.0), last_reported_progress(-1) {};
- virtual ~PackageManager() {};
+ PackageManager();
+ virtual ~PackageManager();
/* Global Start/Stop */
virtual void Start(int /*child_pty*/=-1) {};
@@ -120,7 +119,7 @@ namespace Progress {
class PackageManagerFancy : public PackageManager
{
private:
- static void staticSIGWINCH(int);
+ APT_HIDDEN static void staticSIGWINCH(int);
static std::vector<PackageManagerFancy*> instances;
APT_HIDDEN bool DrawStatusLine();
diff --git a/apt-pkg/metaindex.h b/apt-pkg/metaindex.h
index ffabaadbf..7c4d0c1aa 100644
--- a/apt-pkg/metaindex.h
+++ b/apt-pkg/metaindex.h
@@ -53,7 +53,7 @@ class metaIndex
metaIndex(std::string const &URI, std::string const &Dist,
char const * const Type)
- : Indexes(NULL), Type(Type), URI(URI), Dist(Dist)
+ : Indexes(NULL), Type(Type), URI(URI), Dist(Dist), Trusted(false)
{
/* nothing */
}
diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc
index 249542c68..101912f9d 100644
--- a/apt-pkg/packagemanager.cc
+++ b/apt-pkg/packagemanager.cc
@@ -28,6 +28,7 @@
#include <apt-pkg/pkgcache.h>
#include <apt-pkg/cacheiterators.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/install-progress.h>
#include <stddef.h>
#include <list>
@@ -1079,7 +1080,7 @@ pkgPackageManager::DoInstallPostFork(APT::Progress::PackageManager *progress)
return Failed;
return Res;
-};
+}
#else
pkgPackageManager::OrderResult
pkgPackageManager::DoInstallPostFork(int statusFd)
diff --git a/apt-pkg/packagemanager.h b/apt-pkg/packagemanager.h
index d72790b6e..5bcd2045d 100644
--- a/apt-pkg/packagemanager.h
+++ b/apt-pkg/packagemanager.h
@@ -44,6 +44,11 @@ class pkgDepCache;
class pkgSourceList;
class pkgOrderList;
class pkgRecords;
+namespace APT {
+ namespace Progress {
+ class PackageManager;
+ }
+}
class pkgPackageManager : protected pkgCache::Namespace
@@ -93,6 +98,7 @@ class pkgPackageManager : protected pkgCache::Namespace
virtual bool Remove(PkgIterator /*Pkg*/,bool /*Purge*/=false) {return false;};
#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
virtual bool Go(APT::Progress::PackageManager * /*progress*/) {return true;};
+ virtual bool Go(int /*statusFd*/=-1) {return true;};
#else
virtual bool Go(int /*statusFd*/=-1) {return true;};
#endif
diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc
index 58a63459f..572685ba5 100644
--- a/apt-pkg/pkgcache.cc
+++ b/apt-pkg/pkgcache.cc
@@ -54,12 +54,8 @@ pkgCache::Header::Header()
/* Whenever the structures change the major version should be bumped,
whenever the generator changes the minor version should be bumped. */
- MajorVersion = 8;
-#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13)
- MinorVersion = 2;
-#else
- MinorVersion = 1;
-#endif
+ MajorVersion = 10;
+ MinorVersion = 0;
Dirty = false;
HeaderSz = sizeof(pkgCache::Header);
@@ -86,11 +82,10 @@ pkgCache::Header::Header()
MaxDescFileSize = 0;
FileList = 0;
- StringList = 0;
VerSysName = 0;
Architecture = 0;
- memset(PkgHashTable,0,sizeof(PkgHashTable));
- memset(GrpHashTable,0,sizeof(GrpHashTable));
+ Architectures = 0;
+ HashTableSize = _config->FindI("APT::Cache-HashTableSize", 10 * 1048);
memset(Pools,0,sizeof(Pools));
CacheFileSize = 0;
@@ -145,7 +140,6 @@ bool pkgCache::ReMap(bool const &Errorchecks)
DescP = (Description *)Map.Data();
ProvideP = (Provides *)Map.Data();
DepP = (Dependency *)Map.Data();
- StringItemP = (StringItem *)Map.Data();
StrP = (char *)Map.Data();
if (Errorchecks == false)
@@ -168,15 +162,23 @@ bool pkgCache::ReMap(bool const &Errorchecks)
if (Map.Size() < HeaderP->CacheFileSize)
return _error->Error(_("The package cache file is corrupted, it is too small"));
+ if (HeaderP->VerSysName == 0 || HeaderP->Architecture == 0 || HeaderP->Architectures == 0)
+ return _error->Error(_("The package cache file is corrupted"));
+
// Locate our VS..
- if (HeaderP->VerSysName == 0 ||
- (VS = pkgVersioningSystem::GetVS(StrP + HeaderP->VerSysName)) == 0)
+ if ((VS = pkgVersioningSystem::GetVS(StrP + HeaderP->VerSysName)) == 0)
return _error->Error(_("This APT does not support the versioning system '%s'"),StrP + HeaderP->VerSysName);
- // Chcek the arhcitecture
- if (HeaderP->Architecture == 0 ||
- _config->Find("APT::Architecture") != StrP + HeaderP->Architecture)
- return _error->Error(_("The package cache was built for a different architecture"));
+ // Check the architecture
+ std::vector<std::string> archs = APT::Configuration::getArchitectures();
+ std::vector<std::string>::const_iterator a = archs.begin();
+ std::string list = *a;
+ for (++a; a != archs.end(); ++a)
+ list.append(",").append(*a);
+ if (_config->Find("APT::Architecture") != StrP + HeaderP->Architecture ||
+ list != StrP + HeaderP->Architectures)
+ return _error->Error(_("The package cache was built for different architectures: %s vs %s"), StrP + HeaderP->Architectures, list.c_str());
+
return true;
}
/*}}}*/
@@ -185,20 +187,20 @@ bool pkgCache::ReMap(bool const &Errorchecks)
/* This is used to generate the hash entries for the HashTable. With my
package list from bo this function gets 94% table usage on a 512 item
table (480 used items) */
-unsigned long pkgCache::sHash(const string &Str) const
+map_id_t pkgCache::sHash(const string &Str) const
{
unsigned long Hash = 0;
for (string::const_iterator I = Str.begin(); I != Str.end(); ++I)
Hash = 41 * Hash + tolower_ascii(*I);
- return Hash % _count(HeaderP->PkgHashTable);
+ return Hash % HeaderP->HashTableSize;
}
-unsigned long pkgCache::sHash(const char *Str) const
+map_id_t pkgCache::sHash(const char *Str) const
{
unsigned long Hash = tolower_ascii(*Str);
for (const char *I = Str + 1; *I != 0; ++I)
Hash = 41 * Hash + tolower_ascii(*I);
- return Hash % _count(HeaderP->PkgHashTable);
+ return Hash % HeaderP->HashTableSize;
}
/*}}}*/
// Cache::SingleArchFindPkg - Locate a package by name /*{{{*/
@@ -209,13 +211,10 @@ unsigned long pkgCache::sHash(const char *Str) const
pkgCache::PkgIterator pkgCache::SingleArchFindPkg(const string &Name)
{
// Look at the hash bucket
- Package *Pkg = PkgP + HeaderP->PkgHashTable[Hash(Name)];
- for (; Pkg != PkgP; Pkg = PkgP + Pkg->NextPackage)
+ Package *Pkg = PkgP + HeaderP->PkgHashTable()[Hash(Name)];
+ for (; Pkg != PkgP; Pkg = PkgP + Pkg->Next)
{
- if (unlikely(Pkg->Name == 0))
- continue;
-
- int const cmp = strcasecmp(Name.c_str(), StrP + Pkg->Name);
+ int const cmp = strcmp(Name.c_str(), StrP + (GrpP + Pkg->Group)->Name);
if (cmp == 0)
return PkgIterator(*this, Pkg);
else if (cmp < 0)
@@ -274,12 +273,9 @@ pkgCache::GrpIterator pkgCache::FindGrp(const string &Name) {
return GrpIterator(*this,0);
// Look at the hash bucket for the group
- Group *Grp = GrpP + HeaderP->GrpHashTable[sHash(Name)];
+ Group *Grp = GrpP + HeaderP->GrpHashTable()[sHash(Name)];
for (; Grp != GrpP; Grp = GrpP + Grp->Next) {
- if (unlikely(Grp->Name == 0))
- continue;
-
- int const cmp = strcasecmp(Name.c_str(), StrP + Grp->Name);
+ int const cmp = strcmp(Name.c_str(), StrP + Grp->Name);
if (cmp == 0)
return GrpIterator(*this, Grp);
else if (cmp < 0)
@@ -356,19 +352,15 @@ pkgCache::PkgIterator pkgCache::GrpIterator::FindPkg(string Arch) const {
last one we check, so we do it now. */
if (Arch == "native" || Arch == myArch || Arch == "all") {
pkgCache::Package *Pkg = Owner->PkgP + S->LastPackage;
- if (strcasecmp(myArch, Owner->StrP + Pkg->Arch) == 0)
+ if (strcmp(myArch, Owner->StrP + Pkg->Arch) == 0)
return PkgIterator(*Owner, Pkg);
Arch = myArch;
}
- /* Iterate over the list to find the matching arch
- unfortunately this list includes "package noise"
- (= different packages with same calculated hash),
- so we need to check the name also */
+ // Iterate over the list to find the matching arch
for (pkgCache::Package *Pkg = PackageList(); Pkg != Owner->PkgP;
- Pkg = Owner->PkgP + Pkg->NextPackage) {
- if (S->Name == Pkg->Name &&
- stringcasecmp(Arch, Owner->StrP + Pkg->Arch) == 0)
+ Pkg = Owner->PkgP + Pkg->Next) {
+ if (stringcmp(Arch, Owner->StrP + Pkg->Arch) == 0)
return PkgIterator(*Owner, Pkg);
if ((Owner->PkgP + S->LastPackage) == Pkg)
break;
@@ -415,7 +407,7 @@ pkgCache::PkgIterator pkgCache::GrpIterator::NextPkg(pkgCache::PkgIterator const
if (S->LastPackage == LastPkg.Index())
return PkgIterator(*Owner, 0);
- return PkgIterator(*Owner, Owner->PkgP + LastPkg->NextPackage);
+ return PkgIterator(*Owner, Owner->PkgP + LastPkg->Next);
}
/*}}}*/
// GrpIterator::operator ++ - Postfix incr /*{{{*/
@@ -428,10 +420,10 @@ void pkgCache::GrpIterator::operator ++(int)
S = Owner->GrpP + S->Next;
// Follow the hash table
- while (S == Owner->GrpP && (HashIndex+1) < (signed)_count(Owner->HeaderP->GrpHashTable))
+ while (S == Owner->GrpP && (HashIndex+1) < (signed)Owner->HeaderP->HashTableSize)
{
HashIndex++;
- S = Owner->GrpP + Owner->HeaderP->GrpHashTable[HashIndex];
+ S = Owner->GrpP + Owner->HeaderP->GrpHashTable()[HashIndex];
}
}
/*}}}*/
@@ -442,13 +434,13 @@ void pkgCache::PkgIterator::operator ++(int)
{
// Follow the current links
if (S != Owner->PkgP)
- S = Owner->PkgP + S->NextPackage;
+ S = Owner->PkgP + S->Next;
// Follow the hash table
- while (S == Owner->PkgP && (HashIndex+1) < (signed)_count(Owner->HeaderP->PkgHashTable))
+ while (S == Owner->PkgP && (HashIndex+1) < (signed)Owner->HeaderP->HashTableSize)
{
HashIndex++;
- S = Owner->PkgP + Owner->HeaderP->PkgHashTable[HashIndex];
+ S = Owner->PkgP + Owner->HeaderP->PkgHashTable()[HashIndex];
}
}
/*}}}*/
@@ -524,7 +516,10 @@ operator<<(std::ostream& out, pkgCache::PkgIterator Pkg)
out << " -> " << candidate;
if ( newest != "none" && candidate != newest)
out << " | " << newest;
- out << " > ( " << string(Pkg.Section()==0?"none":Pkg.Section()) << " )";
+ if (Pkg->VersionList == 0)
+ out << " > ( none )";
+ else
+ out << " > ( " << string(Pkg.VersionList().Section()==0?"unknown":Pkg.VersionList().Section()) << " )";
return out;
}
/*}}}*/
@@ -1031,8 +1026,14 @@ bool pkgCache::PrvIterator::IsMultiArchImplicit() const
{
pkgCache::PkgIterator const Owner = OwnerPkg();
pkgCache::PkgIterator const Parent = ParentPkg();
- if (strcmp(Owner.Arch(), Parent.Arch()) != 0 || Owner->Name == Parent->Name)
+ if (strcmp(Owner.Arch(), Parent.Arch()) != 0 || Owner.Group()->Name == Parent.Group()->Name)
return true;
return false;
}
/*}}}*/
+APT_DEPRECATED APT_PURE const char * pkgCache::PkgIterator::Section() const {/*{{{*/
+ if (S->VersionList == 0)
+ return 0;
+ return VersionList().Section();
+}
+ /*}}}*/
diff --git a/apt-pkg/pkgcache.h b/apt-pkg/pkgcache.h
index 5e8a9630a..4f8568205 100644
--- a/apt-pkg/pkgcache.h
+++ b/apt-pkg/pkgcache.h
@@ -79,11 +79,23 @@
#include <string>
#include <time.h>
+#include <stdint.h>
#ifndef APT_8_CLEANER_HEADERS
using std::string;
#endif
+// storing file sizes of indexes, which are way below 4 GB for now
+typedef uint32_t map_filesize_t;
+// each package/group/dependency gets an id
+typedef uint32_t map_id_t;
+// some files get an id, too, but in far less absolute numbers
+typedef uint16_t map_fileid_t;
+// relative pointer from cache start
+typedef uint32_t map_pointer_t;
+// same as the previous, but documented to be to a string item
+typedef map_pointer_t map_stringitem_t;
+
class pkgVersioningSystem;
class pkgCache /*{{{*/
{
@@ -97,7 +109,6 @@ class pkgCache /*{{{*/
struct Description;
struct Provides;
struct Dependency;
- struct StringItem;
struct VerFile;
struct DescFile;
@@ -138,7 +149,7 @@ class pkgCache /*{{{*/
/** \brief priority of a package version
Zero is used for unparsable or absent Priority fields. */
- enum VerPriority {Important=1,Required=2,Standard=3,Optional=4,Extra=5};
+ enum VerPriority {Required=1,Important=2,Standard=3,Optional=4,Extra=5};
enum PkgSelectedState {Unknown=0,Install=1,Hold=2,DeInstall=3,Purge=4};
enum PkgInstState {Ok=0,ReInstReq=1,HoldInst=2,HoldReInstReq=3};
enum PkgCurrentState {NotInstalled=0,UnPacked=1,HalfConfigured=2,
@@ -158,8 +169,8 @@ class pkgCache /*{{{*/
std::string CacheFile;
MMap &Map;
- unsigned long sHash(const std::string &S) const APT_PURE;
- unsigned long sHash(const char *S) const APT_PURE;
+ map_id_t sHash(const std::string &S) const APT_PURE;
+ map_id_t sHash(const char *S) const APT_PURE;
public:
@@ -174,7 +185,6 @@ class pkgCache /*{{{*/
Description *DescP;
Provides *ProvideP;
Dependency *DepP;
- StringItem *StringItemP;
char *StrP;
virtual bool ReMap(bool const &Errorchecks = true);
@@ -183,8 +193,8 @@ class pkgCache /*{{{*/
inline void *DataEnd() {return ((unsigned char *)Map.Data()) + Map.Size();}
// String hashing function (512 range)
- inline unsigned long Hash(const std::string &S) const {return sHash(S);}
- inline unsigned long Hash(const char *S) const {return sHash(S);}
+ inline map_id_t Hash(const std::string &S) const {return sHash(S);}
+ inline map_id_t Hash(const char *S) const {return sHash(S);}
// Useful transformation things
const char *Priority(unsigned char Priority);
@@ -218,7 +228,7 @@ class pkgCache /*{{{*/
private:
bool MultiArchEnabled;
- PkgIterator SingleArchFindPkg(const std::string &Name);
+ APT_HIDDEN PkgIterator SingleArchFindPkg(const std::string &Name);
};
/*}}}*/
// Header structure /*{{{*/
@@ -263,35 +273,31 @@ struct pkgCache::Header
These indicate the number of each structure contained in the cache.
PackageCount is especially useful for generating user state structures.
See Package::Id for more info. */
- unsigned long GroupCount;
- unsigned long PackageCount;
- unsigned long VersionCount;
- unsigned long DescriptionCount;
- unsigned long DependsCount;
- unsigned long PackageFileCount;
- unsigned long VerFileCount;
- unsigned long DescFileCount;
- unsigned long ProvidesCount;
+ map_id_t GroupCount;
+ map_id_t PackageCount;
+ map_id_t VersionCount;
+ map_id_t DescriptionCount;
+ map_id_t DependsCount;
+ map_fileid_t PackageFileCount;
+ map_fileid_t VerFileCount;
+ map_fileid_t DescFileCount;
+ map_id_t ProvidesCount;
/** \brief index of the first PackageFile structure
The PackageFile structures are singly linked lists that represent
all package files that have been merged into the cache. */
- map_ptrloc FileList;
- /** \brief index of the first StringItem structure
-
- The cache contains a list of all the unique strings (StringItems).
- The parser reads this list into memory so it can match strings
- against it.*/
- map_ptrloc StringList;
+ map_pointer_t FileList;
/** \brief String representing the version system used */
- map_ptrloc VerSysName;
- /** \brief Architecture(s) the cache was built against */
- map_ptrloc Architecture;
+ map_pointer_t VerSysName;
+ /** \brief native architecture the cache was built against */
+ map_pointer_t Architecture;
+ /** \brief all architectures the cache was built against */
+ map_pointer_t Architectures;
/** \brief The maximum size of a raw entry from the original Package file */
- unsigned long MaxVerFileSize;
+ map_filesize_t MaxVerFileSize;
/** \brief The maximum size of a raw entry from the original Translation file */
- unsigned long MaxDescFileSize;
+ map_filesize_t MaxDescFileSize;
/** \brief The Pool structures manage the allocation pools that the generator uses
@@ -302,23 +308,23 @@ struct pkgCache::Header
stores this information so future additions can make use of any unused pool
blocks. */
DynamicMMap::Pool Pools[9];
-
+
/** \brief hash tables providing rapid group/package name lookup
- Each group/package name is inserted into the hash table using pkgCache::Hash(const &string)
+ Each group/package name is inserted into a hash table using pkgCache::Hash(const &string)
By iterating over each entry in the hash table it is possible to iterate over
the entire list of packages. Hash Collisions are handled with a singly linked
list of packages based at the hash item. The linked list contains only
packages that match the hashing function.
In the PkgHashTable is it possible that multiple packages have the same name -
these packages are stored as a sequence in the list.
-
- Beware: The Hashmethod assumes that the hash table sizes are equal */
- map_ptrloc PkgHashTable[2*1048];
- map_ptrloc GrpHashTable[2*1048];
+ The size of both tables is the same. */
+ unsigned int HashTableSize;
+ map_pointer_t * PkgHashTable() const { return (map_pointer_t*) (this + 1); }
+ map_pointer_t * GrpHashTable() const { return PkgHashTable() + HashTableSize; }
/** \brief Size of the complete cache file */
- unsigned long CacheFileSize;
+ unsigned long long CacheFileSize;
bool CheckSizes(Header &Against) const APT_PURE;
Header();
@@ -334,17 +340,17 @@ struct pkgCache::Header
struct pkgCache::Group
{
/** \brief Name of the group */
- map_ptrloc Name; // StringItem
+ map_stringitem_t Name;
// Linked List
/** \brief Link to the first package which belongs to the group */
- map_ptrloc FirstPackage; // Package
+ map_pointer_t FirstPackage; // Package
/** \brief Link to the last package which belongs to the group */
- map_ptrloc LastPackage; // Package
+ map_pointer_t LastPackage; // Package
/** \brief Link to the next Group */
- map_ptrloc Next; // Group
+ map_pointer_t Next; // Group
/** \brief unique sequel ID */
- unsigned int ID;
+ map_id_t ID;
};
/*}}}*/
@@ -362,10 +368,13 @@ struct pkgCache::Group
*/
struct pkgCache::Package
{
- /** \brief Name of the package */
- map_ptrloc Name; // StringItem
+ /** \brief Name of the package
+ * Note that the access method Name() will remain. It is just this data member
+ * deprecated as this information is already stored and available via the
+ * associated Group – so it is wasting precious binary cache space */
+ APT_DEPRECATED map_stringitem_t Name;
/** \brief Architecture of the package */
- map_ptrloc Arch; // StringItem
+ map_stringitem_t Arch;
/** \brief Base of a singly linked list of versions
Each structure represents a unique version of the package.
@@ -375,24 +384,19 @@ struct pkgCache::Package
versions of a package can be cleanly handled by the system.
Furthermore, this linked list is guaranteed to be sorted
from Highest version to lowest version with no duplicate entries. */
- map_ptrloc VersionList; // Version
+ map_pointer_t VersionList; // Version
/** \brief index to the installed version */
- map_ptrloc CurrentVer; // Version
- /** \brief indicates the deduced section
-
- Should be the index to the string "Unknown" or to the section
- of the last parsed item. */
- map_ptrloc Section; // StringItem
+ map_pointer_t CurrentVer; // Version
/** \brief index of the group this package belongs to */
- map_ptrloc Group; // Group the Package belongs to
+ map_pointer_t Group; // Group the Package belongs to
// Linked list
/** \brief Link to the next package in the same bucket */
- map_ptrloc NextPackage; // Package
+ map_pointer_t Next; // Package
/** \brief List of all dependencies on this package */
- map_ptrloc RevDepends; // Dependency
+ map_pointer_t RevDepends; // Dependency
/** \brief List of all "packages" this package provide */
- map_ptrloc ProvidesList; // Provides
+ map_pointer_t ProvidesList; // Provides
// Install/Remove/Purge etc
/** \brief state that the user wishes the package to be in */
@@ -412,7 +416,7 @@ struct pkgCache::Package
This allows clients to create an array of size PackageCount and use it to store
state information for the package map. For instance the status file emitter uses
this to track which packages have been emitted already. */
- unsigned int ID;
+ map_id_t ID;
/** \brief some useful indicators of the package's state */
unsigned long Flags;
};
@@ -426,30 +430,30 @@ struct pkgCache::Package
struct pkgCache::PackageFile
{
/** \brief physical disk file that this PackageFile represents */
- map_ptrloc FileName; // StringItem
+ map_stringitem_t FileName;
/** \brief the release information
Please see the files document for a description of what the
release information means. */
- map_ptrloc Archive; // StringItem
- map_ptrloc Codename; // StringItem
- map_ptrloc Component; // StringItem
- map_ptrloc Version; // StringItem
- map_ptrloc Origin; // StringItem
- map_ptrloc Label; // StringItem
- map_ptrloc Architecture; // StringItem
+ map_stringitem_t Archive;
+ map_stringitem_t Codename;
+ map_stringitem_t Component;
+ map_stringitem_t Version;
+ map_stringitem_t Origin;
+ map_stringitem_t Label;
+ map_stringitem_t Architecture;
/** \brief The site the index file was fetched from */
- map_ptrloc Site; // StringItem
+ map_stringitem_t Site;
/** \brief indicates what sort of index file this is
@TODO enumerate at least the possible indexes */
- map_ptrloc IndexType; // StringItem
+ map_stringitem_t IndexType;
/** \brief Size of the file
Used together with the modification time as a
simple check to ensure that the Packages
file has not been altered since Cache generation. */
- unsigned long Size;
+ map_filesize_t Size;
/** \brief Modification time for the file */
time_t mtime;
@@ -458,9 +462,9 @@ struct pkgCache::PackageFile
// Linked list
/** \brief Link to the next PackageFile in the Cache */
- map_ptrloc NextFile; // PackageFile
+ map_pointer_t NextFile; // PackageFile
/** \brief unique sequel ID */
- unsigned int ID;
+ map_fileid_t ID;
};
/*}}}*/
// VerFile structure /*{{{*/
@@ -471,13 +475,13 @@ struct pkgCache::PackageFile
struct pkgCache::VerFile
{
/** \brief index of the package file that this version was found in */
- map_ptrloc File; // PackageFile
+ map_pointer_t File; // PackageFile
/** \brief next step in the linked list */
- map_ptrloc NextFile; // PkgVerFile
+ map_pointer_t NextFile; // PkgVerFile
/** \brief position in the package file */
- map_ptrloc Offset; // File offset
+ map_filesize_t Offset; // File offset
/** @TODO document pkgCache::VerFile::Size */
- unsigned long Size;
+ map_filesize_t Size;
};
/*}}}*/
// DescFile structure /*{{{*/
@@ -485,13 +489,13 @@ struct pkgCache::VerFile
struct pkgCache::DescFile
{
/** \brief index of the file that this description was found in */
- map_ptrloc File; // PackageFile
+ map_pointer_t File; // PackageFile
/** \brief next step in the linked list */
- map_ptrloc NextFile; // PkgVerFile
+ map_pointer_t NextFile; // PkgVerFile
/** \brief position in the file */
- map_ptrloc Offset; // File offset
+ map_filesize_t Offset; // File offset
/** @TODO document pkgCache::DescFile::Size */
- unsigned long Size;
+ map_filesize_t Size;
};
/*}}}*/
// Version structure /*{{{*/
@@ -503,9 +507,15 @@ struct pkgCache::DescFile
struct pkgCache::Version
{
/** \brief complete version string */
- map_ptrloc VerStr; // StringItem
+ map_stringitem_t VerStr;
/** \brief section this version is filled in */
- map_ptrloc Section; // StringItem
+ map_stringitem_t Section;
+ /** \brief source package name this version comes from
+ Always contains the name, even if it is the same as the binary name */
+ map_stringitem_t SourcePkgName;
+ /** \brief source version this version comes from
+ Always contains the version string, even if it is the same as the binary version */
+ map_stringitem_t SourceVerStr;
/** \brief Multi-Arch capabilities of a package version */
enum VerMultiArch { None = 0, /*!< is the default and doesn't trigger special behaviour */
@@ -527,33 +537,33 @@ struct pkgCache::Version
applies to. If FileList is 0 then this is a blank version.
The structure should also have a 0 in all other fields excluding
pkgCache::Version::VerStr and Possibly pkgCache::Version::NextVer. */
- map_ptrloc FileList; // VerFile
+ map_pointer_t FileList; // VerFile
/** \brief next (lower or equal) version in the linked list */
- map_ptrloc NextVer; // Version
+ map_pointer_t NextVer; // Version
/** \brief next description in the linked list */
- map_ptrloc DescriptionList; // Description
+ map_pointer_t DescriptionList; // Description
/** \brief base of the dependency list */
- map_ptrloc DependsList; // Dependency
+ map_pointer_t DependsList; // Dependency
/** \brief links to the owning package
This allows reverse dependencies to determine the package */
- map_ptrloc ParentPkg; // Package
+ map_pointer_t ParentPkg; // Package
/** \brief list of pkgCache::Provides */
- map_ptrloc ProvidesList; // Provides
+ map_pointer_t ProvidesList; // Provides
/** \brief archive size for this version
For Debian this is the size of the .deb file. */
- unsigned long long Size; // These are the .deb size
+ uint64_t Size; // These are the .deb size
/** \brief uncompressed size for this version */
- unsigned long long InstalledSize;
+ uint64_t InstalledSize;
/** \brief characteristic value representing this version
No two packages in existence should have the same VerStr
and Hash with different contents. */
unsigned short Hash;
/** \brief unique sequel ID */
- unsigned int ID;
+ map_id_t ID;
/** \brief parsed priority value */
unsigned char Priority;
};
@@ -566,22 +576,22 @@ struct pkgCache::Description
If the value has a 0 length then this is read using the Package
file else the Translation-CODE file is used. */
- map_ptrloc language_code; // StringItem
+ map_stringitem_t language_code;
/** \brief MD5sum of the original description
Used to map Translations of a description to a version
and to check that the Translation is up-to-date. */
- map_ptrloc md5sum; // StringItem
+ map_stringitem_t md5sum;
/** @TODO document pkgCache::Description::FileList */
- map_ptrloc FileList; // DescFile
+ map_pointer_t FileList; // DescFile
/** \brief next translation for this description */
- map_ptrloc NextDesc; // Description
+ map_pointer_t NextDesc; // Description
/** \brief the text is a description of this package */
- map_ptrloc ParentPkg; // Package
+ map_pointer_t ParentPkg; // Package
/** \brief unique sequel ID */
- unsigned int ID;
+ map_id_t ID;
};
/*}}}*/
// Dependency structure /*{{{*/
@@ -594,21 +604,21 @@ struct pkgCache::Description
struct pkgCache::Dependency
{
/** \brief string of the version the dependency is applied against */
- map_ptrloc Version; // StringItem
+ map_stringitem_t Version;
/** \brief index of the package this depends applies to
The generator will - if the package does not already exist -
create a blank (no version records) package. */
- map_ptrloc Package; // Package
+ map_pointer_t Package; // Package
/** \brief next dependency of this version */
- map_ptrloc NextDepends; // Dependency
+ map_pointer_t NextDepends; // Dependency
/** \brief next reverse dependency of this package */
- map_ptrloc NextRevDepends; // Dependency
+ map_pointer_t NextRevDepends; // Dependency
/** \brief version of the package which has the reverse depends */
- map_ptrloc ParentVer; // Version
+ map_pointer_t ParentVer; // Version
/** \brief unique sequel ID */
- map_ptrloc ID;
+ map_id_t ID;
/** \brief Dependency type - Depends, Recommends, Conflicts, etc */
unsigned char Type;
/** \brief comparison operator specified on the depends line
@@ -629,39 +639,21 @@ struct pkgCache::Dependency
struct pkgCache::Provides
{
/** \brief index of the package providing this */
- map_ptrloc ParentPkg; // Package
+ map_pointer_t ParentPkg; // Package
/** \brief index of the version this provide line applies to */
- map_ptrloc Version; // Version
+ map_pointer_t Version; // Version
/** \brief version in the provides line (if any)
This version allows dependencies to depend on specific versions of a
Provides, as well as allowing Provides to override existing packages.
This is experimental. Note that Debian doesn't allow versioned provides */
- map_ptrloc ProvideVersion; // StringItem
+ map_stringitem_t ProvideVersion;
/** \brief next provides (based of package) */
- map_ptrloc NextProvides; // Provides
+ map_pointer_t NextProvides; // Provides
/** \brief next provides (based of version) */
- map_ptrloc NextPkgProv; // Provides
+ map_pointer_t NextPkgProv; // Provides
};
/*}}}*/
-// StringItem structure /*{{{*/
-/** \brief used for generating single instances of strings
-
- Some things like Section Name are are useful to have as unique tags.
- It is part of a linked list based at pkgCache::Header::StringList
-
- All strings are simply inlined any place in the file that is natural
- for the writer. The client should make no assumptions about the positioning
- of strings. All StringItems should be null-terminated. */
-struct pkgCache::StringItem
-{
- /** \brief string this refers to */
- map_ptrloc String; // StringItem
- /** \brief Next link in the chain */
- map_ptrloc NextItem; // StringItem
-};
- /*}}}*/
-
inline char const * pkgCache::NativeArch()
{ return StrP + HeaderP->Architecture; }
diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc
index 810f0b022..9e6931fa6 100644
--- a/apt-pkg/pkgcachegen.cc
+++ b/apt-pkg/pkgcachegen.cc
@@ -57,8 +57,7 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
FoundFileDeps(0)
{
CurrentFile = 0;
- memset(UniqHash,0,sizeof(UniqHash));
-
+
if (_error->PendingError() == true)
return;
@@ -73,14 +72,35 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
// Starting header
*Cache.HeaderP = pkgCache::Header();
- map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
+
+ // make room for the hashtables for packages and groups
+ if (Map.RawAllocate(2 * (Cache.HeaderP->HashTableSize * sizeof(map_pointer_t))) == 0)
+ return;
+
+ map_stringitem_t const idxVerSysName = WriteStringInMap(_system->VS->Label);
+ if (unlikely(idxVerSysName == 0))
+ return;
Cache.HeaderP->VerSysName = idxVerSysName;
- // this pointer is set in ReMap, but we need it now for WriteUniqString
- Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
- map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
- Cache.HeaderP->Architecture = idxArchitecture;
- if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
+ map_stringitem_t const idxArchitecture = StoreString(MIXED, _config->Find("APT::Architecture"));
+ if (unlikely(idxArchitecture == 0))
return;
+ Cache.HeaderP->Architecture = idxArchitecture;
+
+ std::vector<std::string> archs = APT::Configuration::getArchitectures();
+ if (archs.size() > 1)
+ {
+ std::vector<std::string>::const_iterator a = archs.begin();
+ std::string list = *a;
+ for (++a; a != archs.end(); ++a)
+ list.append(",").append(*a);
+ map_stringitem_t const idxArchitectures = WriteStringInMap(list);
+ if (unlikely(idxArchitectures == 0))
+ return;
+ Cache.HeaderP->Architectures = idxArchitectures;
+ }
+ else
+ Cache.HeaderP->Architectures = idxArchitecture;
+
Cache.ReMap();
}
else
@@ -92,9 +112,9 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
{
_error->Error(_("Cache has an incompatible versioning system"));
return;
- }
+ }
}
-
+
Cache.HeaderP->Dirty = true;
Map.Sync(0,sizeof(pkgCache::Header));
}
@@ -126,10 +146,6 @@ void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newM
CurrentFile += (pkgCache::PackageFile const * const) newMap - (pkgCache::PackageFile const * const) oldMap;
- for (size_t i = 0; i < _count(UniqHash); ++i)
- if (UniqHash[i] != 0)
- UniqHash[i] += (pkgCache::StringItem const * const) newMap - (pkgCache::StringItem const * const) oldMap;
-
for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
(*i)->ReMap(oldMap, newMap);
@@ -153,27 +169,27 @@ void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newM
(*i)->ReMap(oldMap, newMap);
} /*}}}*/
// CacheGenerator::WriteStringInMap /*{{{*/
-map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
+map_stringitem_t pkgCacheGenerator::WriteStringInMap(const char *String,
const unsigned long &Len) {
void const * const oldMap = Map.Data();
- map_ptrloc const index = Map.WriteString(String, Len);
+ map_stringitem_t const index = Map.WriteString(String, Len);
if (index != 0)
ReMap(oldMap, Map.Data());
return index;
}
/*}}}*/
// CacheGenerator::WriteStringInMap /*{{{*/
-map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
+map_stringitem_t pkgCacheGenerator::WriteStringInMap(const char *String) {
void const * const oldMap = Map.Data();
- map_ptrloc const index = Map.WriteString(String);
+ map_stringitem_t const index = Map.WriteString(String);
if (index != 0)
ReMap(oldMap, Map.Data());
return index;
}
/*}}}*/
-map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
+map_pointer_t pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
void const * const oldMap = Map.Data();
- map_ptrloc const index = Map.Allocate(size);
+ map_pointer_t const index = Map.Allocate(size);
if (index != 0)
ReMap(oldMap, Map.Data());
return index;
@@ -253,16 +269,16 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
}
}
- if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
+ if (Cache.HeaderP->PackageCount >= std::numeric_limits<map_id_t>::max())
return _error->Error(_("Wow, you exceeded the number of package "
"names this APT is capable of."));
- if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
+ if (Cache.HeaderP->VersionCount >= std::numeric_limits<map_id_t>::max())
return _error->Error(_("Wow, you exceeded the number of versions "
"this APT is capable of."));
- if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
+ if (Cache.HeaderP->DescriptionCount >= std::numeric_limits<map_id_t>::max())
return _error->Error(_("Wow, you exceeded the number of descriptions "
"this APT is capable of."));
- if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
+ if (Cache.HeaderP->DependsCount >= std::numeric_limits<map_id_t>::max())
return _error->Error(_("Wow, you exceeded the number of dependencies "
"this APT is capable of."));
@@ -302,10 +318,9 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator
// Find the right version to write the description
MD5SumValue CurMd5 = List.Description_md5();
- if (CurMd5.Value().empty() == true || List.Description().empty() == true)
+ if (CurMd5.Value().empty() == true && List.Description("").empty() == true)
return true;
- std::string CurLang = List.DescriptionLanguage();
-
+ std::vector<std::string> availDesc = List.AvailableDescriptionLanguages();
for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
{
pkgCache::DescIterator VerDesc = Ver.DescriptionList();
@@ -314,31 +329,16 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator
if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
continue;
- // don't add a new description if we have one for the given
- // md5 && language
- if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true)
- continue;
-
- pkgCache::DescIterator Desc;
- Dynamic<pkgCache::DescIterator> DynDesc(Desc);
-
- map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum);
- if (unlikely(descindex == 0 && _error->PendingError()))
- return _error->Error(_("Error occurred while processing %s (%s%d)"),
- Pkg.Name(), "NewDescription", 1);
-
- Desc->ParentPkg = Pkg.Index();
-
- // we add at the end, so that the start is constant as we need
- // that to be able to efficiently share these lists
- VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
- for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
- map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
- *LastNextDesc = descindex;
+ map_stringitem_t md5idx = VerDesc->md5sum;
+ for (std::vector<std::string>::const_iterator CurLang = availDesc.begin(); CurLang != availDesc.end(); ++CurLang)
+ {
+ // don't add a new description if we have one for the given
+ // md5 && language
+ if (IsDuplicateDescription(VerDesc, CurMd5, *CurLang) == true)
+ continue;
- if (NewFileDesc(Desc,List) == false)
- return _error->Error(_("Error occurred while processing %s (%s%d)"),
- Pkg.Name(), "NewFileDesc", 1);
+ AddNewDescription(List, Ver, *CurLang, CurMd5, md5idx);
+ }
// we can stop here as all "same" versions will share the description
break;
@@ -353,7 +353,7 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
{
pkgCache::VerIterator Ver = Pkg.VersionList();
Dynamic<pkgCache::VerIterator> DynVer(Ver);
- map_ptrloc *LastVer = &Pkg->VersionList;
+ map_pointer_t *LastVer = &Pkg->VersionList;
void const * oldMap = Map.Data();
unsigned short const Hash = List.VersionHash();
@@ -362,7 +362,7 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
/* We know the list is sorted so we use that fact in the search.
Insertion of new versions is done with correct sorting */
int Res = 1;
- for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
+ for (; Ver.end() == false; LastVer = &Ver->NextVer, ++Ver)
{
Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
// Version is higher as current version - insert here
@@ -398,13 +398,13 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
}
// Add a new version
- map_ptrloc const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer);
+ map_pointer_t const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer);
if (verindex == 0 && _error->PendingError())
return _error->Error(_("Error occurred while processing %s (%s%d)"),
Pkg.Name(), "NewVersion", 1);
if (oldMap != Map.Data())
- LastVer += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
+ LastVer += (map_pointer_t const * const) Map.Data() - (map_pointer_t const * const) oldMap;
*LastVer = verindex;
if (unlikely(List.NewVersion(Ver) == false))
@@ -465,7 +465,7 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
D.ParentPkg().Group() == Grp)
continue;
- map_ptrloc *OldDepLast = NULL;
+ map_pointer_t *OldDepLast = NULL;
pkgCache::VerIterator ConVersion = D.ParentVer();
Dynamic<pkgCache::VerIterator> DynV(ConVersion);
// duplicate the Conflicts/Breaks/Replaces for :none arch
@@ -486,11 +486,10 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
return true;
}
- /* Record the Description (it is not translated) */
+ /* Record the Description(s) based on their master md5sum */
MD5SumValue CurMd5 = List.Description_md5();
- if (CurMd5.Value().empty() == true || List.Description().empty() == true)
+ if (CurMd5.Value().empty() == true && List.Description("").empty() == true)
return true;
- std::string CurLang = List.DescriptionLanguage();
/* Before we add a new description we first search in the group for
a version with a description of the same MD5 - if so we reuse this
@@ -501,28 +500,44 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator
for (pkgCache::VerIterator V = P.VersionList();
V.end() == false; ++V)
{
- if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
+ if (V->DescriptionList == 0 || MD5SumValue(V.DescriptionList().md5()) != CurMd5)
continue;
Ver->DescriptionList = V->DescriptionList;
- return true;
}
}
- // We haven't found reusable descriptions, so add the first description
- pkgCache::DescIterator Desc = Ver.DescriptionList();
+ // We haven't found reusable descriptions, so add the first description(s)
+ map_stringitem_t md5idx = Ver->DescriptionList == 0 ? 0 : Ver.DescriptionList()->md5sum;
+ std::vector<std::string> availDesc = List.AvailableDescriptionLanguages();
+ for (std::vector<std::string>::const_iterator CurLang = availDesc.begin(); CurLang != availDesc.end(); ++CurLang)
+ if (AddNewDescription(List, Ver, *CurLang, CurMd5, md5idx) == false)
+ return false;
+ return true;
+}
+ /*}}}*/
+bool pkgCacheGenerator::AddNewDescription(ListParser &List, pkgCache::VerIterator &Ver, std::string const &lang, MD5SumValue const &CurMd5, map_stringitem_t &md5idx) /*{{{*/
+{
+ pkgCache::DescIterator Desc;
Dynamic<pkgCache::DescIterator> DynDesc(Desc);
- map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0);
+ map_pointer_t const descindex = NewDescription(Desc, lang, CurMd5, md5idx);
if (unlikely(descindex == 0 && _error->PendingError()))
return _error->Error(_("Error occurred while processing %s (%s%d)"),
- Pkg.Name(), "NewDescription", 2);
+ Ver.ParentPkg().Name(), "NewDescription", 1);
+
+ md5idx = Desc->md5sum;
+ Desc->ParentPkg = Ver.ParentPkg().Index();
- Desc->ParentPkg = Pkg.Index();
- Ver->DescriptionList = descindex;
+ // we add at the end, so that the start is constant as we need
+ // that to be able to efficiently share these lists
+ pkgCache::DescIterator VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
+ for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
+ map_pointer_t * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
+ *LastNextDesc = descindex;
if (NewFileDesc(Desc,List) == false)
return _error->Error(_("Error occurred while processing %s (%s%d)"),
- Pkg.Name(), "NewFileDesc", 2);
+ Ver.ParentPkg().Name(), "NewFileDesc", 1);
return true;
}
@@ -589,19 +604,19 @@ bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
return true;
// Get a structure
- map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
+ map_pointer_t const Group = AllocateInMap(sizeof(pkgCache::Group));
if (unlikely(Group == 0))
return false;
Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
- map_ptrloc const idxName = WriteStringInMap(Name);
+ map_stringitem_t const idxName = StoreString(PKGNAME, Name);
if (unlikely(idxName == 0))
return false;
Grp->Name = idxName;
// Insert it into the hash table
unsigned long const Hash = Cache.Hash(Name);
- map_ptrloc *insertAt = &Cache.HeaderP->GrpHashTable[Hash];
+ map_pointer_t *insertAt = &Cache.HeaderP->GrpHashTable()[Hash];
while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + *insertAt)->Name) > 0)
insertAt = &(Cache.GrpP + *insertAt)->Next;
Grp->Next = *insertAt;
@@ -626,7 +641,7 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name
return true;
// Get a structure
- map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
+ map_pointer_t const Package = AllocateInMap(sizeof(pkgCache::Package));
if (unlikely(Package == 0))
return false;
Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
@@ -636,27 +651,34 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name
{
Grp->FirstPackage = Package;
// Insert it into the hash table
- unsigned long const Hash = Cache.Hash(Name);
- map_ptrloc *insertAt = &Cache.HeaderP->PkgHashTable[Hash];
- while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.PkgP + *insertAt)->Name) > 0)
- insertAt = &(Cache.PkgP + *insertAt)->NextPackage;
- Pkg->NextPackage = *insertAt;
+ map_id_t const Hash = Cache.Hash(Name);
+ map_pointer_t *insertAt = &Cache.HeaderP->PkgHashTable()[Hash];
+ while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + (Cache.PkgP + *insertAt)->Group)->Name) > 0)
+ insertAt = &(Cache.PkgP + *insertAt)->Next;
+ Pkg->Next = *insertAt;
*insertAt = Package;
}
else // Group the Packages together
{
// this package is the new last package
pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
- Pkg->NextPackage = LastPkg->NextPackage;
- LastPkg->NextPackage = Package;
+ Pkg->Next = LastPkg->Next;
+ LastPkg->Next = Package;
}
Grp->LastPackage = Package;
// Set the name, arch and the ID
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
Pkg->Name = Grp->Name;
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
Pkg->Group = Grp.Index();
// all is mapped to the native architecture
- map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
+ map_stringitem_t const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : StoreString(MIXED, Arch);
if (unlikely(idxArch == 0))
return false;
Pkg->Arch = idxArch;
@@ -673,14 +695,14 @@ bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
// copy P.Arch() into a string here as a cache remap
// in NewDepends() later may alter the pointer location
string Arch = P.Arch() == NULL ? "" : P.Arch();
- map_ptrloc *OldDepLast = NULL;
+ map_pointer_t *OldDepLast = NULL;
/* MultiArch handling introduces a lot of implicit Dependencies:
- MultiArch: same → Co-Installable if they have the same version
- All others conflict with all other group members */
bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
pkgCache::PkgIterator D = G.PackageList();
Dynamic<pkgCache::PkgIterator> DynD(D);
- map_ptrloc const VerStrIdx = V->VerStr;
+ map_stringitem_t const VerStrIdx = V->VerStr;
for (; D.end() != true; D = G.NextPkg(D))
{
if (Arch == D.Arch() || D->VersionList == 0)
@@ -713,11 +735,11 @@ bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
/* MultiArch handling introduces a lot of implicit Dependencies:
- MultiArch: same → Co-Installable if they have the same version
- All others conflict with all other group members */
- map_ptrloc *OldDepLast = NULL;
+ map_pointer_t *OldDepLast = NULL;
bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
if (coInstall == true)
{
- map_ptrloc const VerStrIdx = V->VerStr;
+ map_stringitem_t const VerStrIdx = V->VerStr;
// Replaces: ${self}:other ( << ${binary:Version})
NewDepends(D, V, VerStrIdx,
pkgCache::Dep::Less, pkgCache::Dep::Replaces,
@@ -746,15 +768,15 @@ bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
return true;
// Get a structure
- map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
+ map_pointer_t const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
if (VerFile == 0)
- return 0;
+ return false;
pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
VF->File = CurrentFile - Cache.PkgFileP;
// Link it to the end of the list
- map_ptrloc *Last = &Ver->FileList;
+ map_pointer_t *Last = &Ver->FileList;
for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
Last = &V->NextFile;
VF->NextFile = *Last;
@@ -772,14 +794,14 @@ bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
// CacheGenerator::NewVersion - Create a new Version /*{{{*/
// ---------------------------------------------------------------------
/* This puts a version structure in the linked list */
-unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
+map_pointer_t pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
const string &VerStr,
- map_ptrloc const ParentPkg,
- unsigned long const Hash,
- unsigned long Next)
+ map_pointer_t const ParentPkg,
+ unsigned short const Hash,
+ map_pointer_t const Next)
{
// Get a structure
- map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
+ map_pointer_t const Version = AllocateInMap(sizeof(pkgCache::Version));
if (Version == 0)
return 0;
@@ -814,7 +836,7 @@ unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
}
}
// haven't found the version string, so create
- map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
+ map_stringitem_t const idxVerStr = StoreString(VERSIONNUMBER, VerStr);
if (unlikely(idxVerStr == 0))
return 0;
Ver->VerStr = idxVerStr;
@@ -831,7 +853,7 @@ bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
return true;
// Get a structure
- map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
+ map_pointer_t const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
if (DescFile == 0)
return false;
@@ -839,7 +861,7 @@ bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
DF->File = CurrentFile - Cache.PkgFileP;
// Link it to the end of the list
- map_ptrloc *Last = &Desc->FileList;
+ map_pointer_t *Last = &Desc->FileList;
for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
Last = &D->NextFile;
@@ -858,20 +880,20 @@ bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
// CacheGenerator::NewDescription - Create a new Description /*{{{*/
// ---------------------------------------------------------------------
/* This puts a description structure in the linked list */
-map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
+map_pointer_t pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
const string &Lang,
const MD5SumValue &md5sum,
- map_ptrloc idxmd5str)
+ map_stringitem_t const idxmd5str)
{
// Get a structure
- map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
+ map_pointer_t const Description = AllocateInMap(sizeof(pkgCache::Description));
if (Description == 0)
return 0;
// Fill it in
Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
Desc->ID = Cache.HeaderP->DescriptionCount++;
- map_ptrloc const idxlanguage_code = WriteUniqString(Lang);
+ map_stringitem_t const idxlanguage_code = StoreString(MIXED, Lang);
if (unlikely(idxlanguage_code == 0))
return 0;
Desc->language_code = idxlanguage_code;
@@ -880,7 +902,7 @@ map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
Desc->md5sum = idxmd5str;
else
{
- map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
+ map_stringitem_t const idxmd5sum = WriteStringInMap(md5sum.Value());
if (unlikely(idxmd5sum == 0))
return 0;
Desc->md5sum = idxmd5sum;
@@ -898,9 +920,9 @@ bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
string const &Version,
unsigned int const &Op,
unsigned int const &Type,
- map_ptrloc* &OldDepLast)
+ map_stringitem_t* &OldDepLast)
{
- map_ptrloc index = 0;
+ map_stringitem_t index = 0;
if (Version.empty() == false)
{
int const CmpOp = Op & 0x0F;
@@ -911,25 +933,25 @@ bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
if (index == 0)
{
void const * const oldMap = Map.Data();
- index = WriteStringInMap(Version);
+ index = StoreString(VERSIONNUMBER, Version);
if (unlikely(index == 0))
return false;
if (OldDepLast != 0 && oldMap != Map.Data())
- OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
+ OldDepLast += (map_pointer_t const * const) Map.Data() - (map_pointer_t const * const) oldMap;
}
}
return NewDepends(Pkg, Ver, index, Op, Type, OldDepLast);
}
bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
pkgCache::VerIterator &Ver,
- map_ptrloc const Version,
+ map_pointer_t const Version,
unsigned int const &Op,
unsigned int const &Type,
- map_ptrloc* &OldDepLast)
+ map_pointer_t* &OldDepLast)
{
void const * const oldMap = Map.Data();
// Get a structure
- map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
+ map_pointer_t const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
if (unlikely(Dependency == 0))
return false;
@@ -954,7 +976,7 @@ bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
OldDepLast = &D->NextDepends;
} else if (oldMap != Map.Data())
- OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
+ OldDepLast += (map_pointer_t const * const) Map.Data() - (map_pointer_t const * const) oldMap;
Dep->NextDepends = *OldDepLast;
*OldDepLast = Dep.Index();
@@ -1019,7 +1041,7 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
return true;
// Get a structure
- map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
+ map_pointer_t const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
if (unlikely(Provides == 0))
return false;
Cache.HeaderP->ProvidesCount++;
@@ -1031,7 +1053,7 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
Prv->NextPkgProv = Ver->ProvidesList;
Ver->ProvidesList = Prv.Index();
if (Version.empty() == false) {
- map_ptrloc const idxProvideVersion = WriteString(Version);
+ map_stringitem_t const idxProvideVersion = WriteString(Version);
Prv->ProvideVersion = idxProvideVersion;
if (unlikely(idxProvideVersion == 0))
return false;
@@ -1066,14 +1088,14 @@ bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
unsigned long Flags)
{
// Get some space for the structure
- map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
+ map_pointer_t const idxFile = AllocateInMap(sizeof(*CurrentFile));
if (unlikely(idxFile == 0))
return false;
CurrentFile = Cache.PkgFileP + idxFile;
// Fill it in
- map_ptrloc const idxFileName = WriteStringInMap(File);
- map_ptrloc const idxSite = WriteUniqString(Site);
+ map_stringitem_t const idxFileName = WriteStringInMap(File);
+ map_stringitem_t const idxSite = StoreString(MIXED, Site);
if (unlikely(idxFileName == 0 || idxSite == 0))
return false;
CurrentFile->FileName = idxFileName;
@@ -1081,7 +1103,7 @@ bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
CurrentFile->NextFile = Cache.HeaderP->FileList;
CurrentFile->Flags = Flags;
CurrentFile->ID = Cache.HeaderP->PackageFileCount;
- map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
+ map_stringitem_t const idxIndexType = StoreString(MIXED, Index.GetType()->Label);
if (unlikely(idxIndexType == 0))
return false;
CurrentFile->IndexType = idxIndexType;
@@ -1098,57 +1120,27 @@ bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
// ---------------------------------------------------------------------
/* This is used to create handles to strings. Given the same text it
always returns the same number */
-unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
+map_stringitem_t pkgCacheGenerator::StoreString(enum StringType const type, const char *S,
unsigned int Size)
{
- /* We use a very small transient hash table here, this speeds up generation
- by a fair amount on slower machines */
- pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
- if (Bucket != 0 &&
- stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
- return Bucket->String;
-
- // Search for an insertion point
- pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
- int Res = 1;
- map_ptrloc *Last = &Cache.HeaderP->StringList;
- for (; I != Cache.StringItemP; Last = &I->NextItem,
- I = Cache.StringItemP + I->NextItem)
- {
- Res = stringcmp(S,S+Size,Cache.StrP + I->String);
- if (Res >= 0)
- break;
- }
-
- // Match
- if (Res == 0)
- {
- Bucket = I;
- return I->String;
- }
-
- // Get a structure
- void const * const oldMap = Map.Data();
- map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
- if (Item == 0)
- return 0;
-
- map_ptrloc const idxString = WriteStringInMap(S,Size);
- if (unlikely(idxString == 0))
- return 0;
- if (oldMap != Map.Data()) {
- Last += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
- I += (pkgCache::StringItem const * const) Map.Data() - (pkgCache::StringItem const * const) oldMap;
+ std::string const key(S, Size);
+
+ std::map<std::string,map_stringitem_t> * strings;
+ switch(type) {
+ case MIXED: strings = &strMixed; break;
+ case PKGNAME: strings = &strPkgNames; break;
+ case VERSIONNUMBER: strings = &strVersions; break;
+ case SECTION: strings = &strSections; break;
+ default: _error->Fatal("Unknown enum type used for string storage of '%s'", key.c_str()); return 0;
}
- *Last = Item;
- // Fill in the structure
- pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
- ItemP->NextItem = I - Cache.StringItemP;
- ItemP->String = idxString;
+ std::map<std::string,map_stringitem_t>::const_iterator const item = strings->find(key);
+ if (item != strings->end())
+ return item->second;
- Bucket = ItemP;
- return ItemP->String;
+ map_stringitem_t const idxString = WriteStringInMap(S,Size);
+ strings->insert(std::make_pair(key, idxString));
+ return idxString;
}
/*}}}*/
// CheckValidity - Check that a cache is up-to-date /*{{{*/
@@ -1258,9 +1250,9 @@ static bool CheckValidity(const string &CacheFile,
// ---------------------------------------------------------------------
/* Size is kind of an abstract notion that is only used for the progress
meter */
-static unsigned long ComputeSize(FileIterator Start,FileIterator End)
+static map_filesize_t ComputeSize(FileIterator Start,FileIterator End)
{
- unsigned long TotalSize = 0;
+ map_filesize_t TotalSize = 0;
for (; Start < End; ++Start)
{
if ((*Start)->HasPackages() == false)
@@ -1275,7 +1267,7 @@ static unsigned long ComputeSize(FileIterator Start,FileIterator End)
/* */
static bool BuildCache(pkgCacheGenerator &Gen,
OpProgress *Progress,
- unsigned long &CurrentSize,unsigned long TotalSize,
+ map_filesize_t &CurrentSize,map_filesize_t TotalSize,
FileIterator Start, FileIterator End)
{
FileIterator I;
@@ -1294,7 +1286,7 @@ static bool BuildCache(pkgCacheGenerator &Gen,
continue;
}
- unsigned long Size = (*I)->Size();
+ map_filesize_t Size = (*I)->Size();
if (Progress != NULL)
Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
CurrentSize += Size;
@@ -1311,7 +1303,7 @@ static bool BuildCache(pkgCacheGenerator &Gen,
CurrentSize = 0;
for (I = Start; I != End; ++I)
{
- unsigned long Size = (*I)->Size();
+ map_filesize_t Size = (*I)->Size();
if (Progress != NULL)
Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
CurrentSize += Size;
@@ -1325,9 +1317,9 @@ static bool BuildCache(pkgCacheGenerator &Gen,
/*}}}*/
// CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
- unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
- unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
- unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
+ map_filesize_t const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
+ map_filesize_t const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
+ map_filesize_t const MapLimit = _config->FindI("APT::Cache-Limit", 0);
Flags |= MMap::Moveable;
if (_config->FindB("APT::Cache-Fallback", false) == true)
Flags |= MMap::Fallback;
@@ -1365,7 +1357,7 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress
Files.push_back (*j);
}
- unsigned long const EndOfSource = Files.size();
+ map_filesize_t const EndOfSource = Files.size();
if (_system->AddStatusFiles(Files) == false)
return false;
@@ -1455,8 +1447,8 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress
}
// Lets try the source cache.
- unsigned long CurrentSize = 0;
- unsigned long TotalSize = 0;
+ map_filesize_t CurrentSize = 0;
+ map_filesize_t TotalSize = 0;
if (CheckValidity(SrcCacheFile, List, Files.begin(),
Files.begin()+EndOfSource) == true)
{
@@ -1464,7 +1456,7 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress
std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
// Preload the map with the source cache
FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
- unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
+ map_pointer_t const alloc = Map->RawAllocate(SCacheF.Size());
if ((alloc == 0 && _error->PendingError())
|| SCacheF.Read((unsigned char *)Map->Data() + alloc,
SCacheF.Size()) == false)
@@ -1551,13 +1543,13 @@ APT_DEPRECATED bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **Ou
bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
{
std::vector<pkgIndexFile *> Files;
- unsigned long EndOfSource = Files.size();
+ map_filesize_t EndOfSource = Files.size();
if (_system->AddStatusFiles(Files) == false)
return false;
SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
- unsigned long CurrentSize = 0;
- unsigned long TotalSize = 0;
+ map_filesize_t CurrentSize = 0;
+ map_filesize_t TotalSize = 0;
TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
diff --git a/apt-pkg/pkgcachegen.h b/apt-pkg/pkgcachegen.h
index 1e1a71026..54c4f9e7a 100644
--- a/apt-pkg/pkgcachegen.h
+++ b/apt-pkg/pkgcachegen.h
@@ -27,6 +27,7 @@
#include <vector>
#include <string>
+#include <map>
class FileFd;
class pkgSourceList;
@@ -36,12 +37,15 @@ class pkgIndexFile;
class pkgCacheGenerator /*{{{*/
{
private:
+ APT_HIDDEN map_stringitem_t WriteStringInMap(std::string const &String) { return WriteStringInMap(String.c_str()); };
+ APT_HIDDEN map_stringitem_t WriteStringInMap(const char *String);
+ APT_HIDDEN map_stringitem_t WriteStringInMap(const char *String, const unsigned long &Len);
+ APT_HIDDEN map_pointer_t AllocateInMap(const unsigned long &size);
- pkgCache::StringItem *UniqHash[26];
- APT_HIDDEN map_ptrloc WriteStringInMap(std::string const &String) { return WriteStringInMap(String.c_str()); };
- APT_HIDDEN map_ptrloc WriteStringInMap(const char *String);
- APT_HIDDEN map_ptrloc WriteStringInMap(const char *String, const unsigned long &Len);
- APT_HIDDEN map_ptrloc AllocateInMap(const unsigned long &size);
+ std::map<std::string,map_stringitem_t> strMixed;
+ std::map<std::string,map_stringitem_t> strSections;
+ std::map<std::string,map_stringitem_t> strPkgNames;
+ std::map<std::string,map_stringitem_t> strVersions;
public:
@@ -78,21 +82,22 @@ class pkgCacheGenerator /*{{{*/
bool NewFileDesc(pkgCache::DescIterator &Desc,ListParser &List);
bool NewDepends(pkgCache::PkgIterator &Pkg, pkgCache::VerIterator &Ver,
std::string const &Version, unsigned int const &Op,
- unsigned int const &Type, map_ptrloc* &OldDepLast);
+ unsigned int const &Type, map_pointer_t* &OldDepLast);
bool NewDepends(pkgCache::PkgIterator &Pkg, pkgCache::VerIterator &Ver,
- map_ptrloc const Version, unsigned int const &Op,
- unsigned int const &Type, map_ptrloc* &OldDepLast);
- unsigned long NewVersion(pkgCache::VerIterator &Ver,const std::string &VerStr,unsigned long Next) APT_DEPRECATED
+ map_pointer_t const Version, unsigned int const &Op,
+ unsigned int const &Type, map_pointer_t* &OldDepLast);
+ map_pointer_t NewVersion(pkgCache::VerIterator &Ver,const std::string &VerStr,map_pointer_t const Next) APT_DEPRECATED
{ return NewVersion(Ver, VerStr, 0, 0, Next); }
- unsigned long NewVersion(pkgCache::VerIterator &Ver,const std::string &VerStr,
- map_ptrloc const ParentPkg, unsigned long const Hash,
- unsigned long Next);
- map_ptrloc NewDescription(pkgCache::DescIterator &Desc,const std::string &Lang,const MD5SumValue &md5sum,map_ptrloc Next);
+ map_pointer_t NewVersion(pkgCache::VerIterator &Ver,const std::string &VerStr,
+ map_pointer_t const ParentPkg, unsigned short const Hash,
+ map_pointer_t const Next);
+ map_pointer_t NewDescription(pkgCache::DescIterator &Desc,const std::string &Lang,const MD5SumValue &md5sum,map_stringitem_t const idxmd5str);
public:
- unsigned long WriteUniqString(const char *S,unsigned int Size);
- inline unsigned long WriteUniqString(const std::string &S) {return WriteUniqString(S.c_str(),S.length());};
+ enum StringType { MIXED, PKGNAME, VERSIONNUMBER, SECTION };
+ map_stringitem_t StoreString(StringType const type, const char * S, unsigned int const Size);
+ inline map_stringitem_t StoreString(enum StringType const type, const std::string &S) {return StoreString(type, S.c_str(),S.length());};
void DropProgress() {Progress = 0;};
bool SelectFile(const std::string &File,const std::string &Site,pkgIndexFile const &Index,
@@ -125,6 +130,9 @@ class pkgCacheGenerator /*{{{*/
APT_HIDDEN bool AddImplicitDepends(pkgCache::GrpIterator &G, pkgCache::PkgIterator &P,
pkgCache::VerIterator &V);
APT_HIDDEN bool AddImplicitDepends(pkgCache::VerIterator &V, pkgCache::PkgIterator &D);
+
+ APT_HIDDEN bool AddNewDescription(ListParser &List, pkgCache::VerIterator &Ver,
+ std::string const &lang, MD5SumValue const &CurMd5, map_stringitem_t &md5idx);
};
/*}}}*/
// This is the abstract package list parser class. /*{{{*/
@@ -135,17 +143,18 @@ class pkgCacheGenerator::ListParser
// Some cache items
pkgCache::VerIterator OldDepVer;
- map_ptrloc *OldDepLast;
+ map_pointer_t *OldDepLast;
// Flag file dependencies
bool FoundFileDeps;
protected:
- inline unsigned long WriteUniqString(std::string S) {return Owner->WriteUniqString(S);};
- inline unsigned long WriteUniqString(const char *S,unsigned int Size) {return Owner->WriteUniqString(S,Size);};
- inline unsigned long WriteString(const std::string &S) {return Owner->WriteStringInMap(S);};
- inline unsigned long WriteString(const char *S,unsigned int Size) {return Owner->WriteStringInMap(S,Size);};
+ inline map_stringitem_t StoreString(pkgCacheGenerator::StringType const type, std::string const &S) {return Owner->StoreString(type, S);};
+ inline map_stringitem_t StoreString(pkgCacheGenerator::StringType const type, const char *S,unsigned int Size) {return Owner->StoreString(type, S, Size);};
+
+ inline map_stringitem_t WriteString(const std::string &S) {return Owner->WriteStringInMap(S);};
+ inline map_stringitem_t WriteString(const char *S,unsigned int Size) {return Owner->WriteStringInMap(S,Size);};
bool NewDepends(pkgCache::VerIterator &Ver,const std::string &Package, const std::string &Arch,
const std::string &Version,unsigned int Op,
unsigned int Type);
@@ -160,8 +169,8 @@ class pkgCacheGenerator::ListParser
virtual bool ArchitectureAll() = 0;
virtual std::string Version() = 0;
virtual bool NewVersion(pkgCache::VerIterator &Ver) = 0;
- virtual std::string Description() = 0;
- virtual std::string DescriptionLanguage() = 0;
+ virtual std::string Description(std::string const &lang) = 0;
+ virtual std::vector<std::string> AvailableDescriptionLanguages() = 0;
virtual MD5SumValue Description_md5() = 0;
virtual unsigned short VersionHash() = 0;
/** compare currently parsed version with given version
@@ -175,8 +184,8 @@ class pkgCacheGenerator::ListParser
APT_PURE bool SameVersion(unsigned short const Hash, pkgCache::VerIterator const &Ver);
virtual bool UsePackage(pkgCache::PkgIterator &Pkg,
pkgCache::VerIterator &Ver) = 0;
- virtual unsigned long Offset() = 0;
- virtual unsigned long Size() = 0;
+ virtual map_filesize_t Offset() = 0;
+ virtual map_filesize_t Size() = 0;
virtual bool Step() = 0;
@@ -184,7 +193,7 @@ class pkgCacheGenerator::ListParser
virtual bool CollectFileProvides(pkgCache &/*Cache*/,
pkgCache::VerIterator &/*Ver*/) {return true;};
- ListParser() : FoundFileDeps(false) {};
+ ListParser() : Owner(NULL), OldDepLast(NULL), FoundFileDeps(false) {};
virtual ~ListParser() {};
};
/*}}}*/
diff --git a/apt-pkg/pkgrecords.cc b/apt-pkg/pkgrecords.cc
index c403e4dc3..859af3a09 100644
--- a/apt-pkg/pkgrecords.cc
+++ b/apt-pkg/pkgrecords.cc
@@ -26,7 +26,7 @@
// Records::pkgRecords - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* This will create the necessary structures to access the status files */
-pkgRecords::pkgRecords(pkgCache &Cache) : d(NULL), Cache(Cache),
+pkgRecords::pkgRecords(pkgCache &aCache) : d(NULL), Cache(aCache),
Files(Cache.HeaderP->PackageFileCount)
{
for (pkgCache::PkgFileIterator I = Cache.FileBegin();
diff --git a/apt-pkg/pkgrecords.h b/apt-pkg/pkgrecords.h
index b5237b3a0..a902da8b8 100644
--- a/apt-pkg/pkgrecords.h
+++ b/apt-pkg/pkgrecords.h
@@ -18,6 +18,8 @@
#define PKGLIB_PKGRECORDS_H
#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/macros.h>
#include <string>
#include <vector>
@@ -56,17 +58,39 @@ class pkgRecords::Parser /*{{{*/
// These refer to the archive file for the Version
virtual std::string FileName() {return std::string();};
- virtual std::string MD5Hash() {return std::string();};
- virtual std::string SHA1Hash() {return std::string();};
- virtual std::string SHA256Hash() {return std::string();};
- virtual std::string SHA512Hash() {return std::string();};
virtual std::string SourcePkg() {return std::string();};
virtual std::string SourceVer() {return std::string();};
+ /** return all known hashes in this record.
+ *
+ * For authentication proposes packages come with hashsums which
+ * this method is supposed to parse and return so that clients can
+ * choose the hash to be used.
+ */
+ virtual HashStringList Hashes() const { return HashStringList(); };
+ APT_DEPRECATED std::string MD5Hash() const { return GetHashFromHashes("MD5Sum"); };
+ APT_DEPRECATED std::string SHA1Hash() const { return GetHashFromHashes("SHA1"); };
+ APT_DEPRECATED std::string SHA256Hash() const { return GetHashFromHashes("SHA256"); };
+ APT_DEPRECATED std::string SHA512Hash() const { return GetHashFromHashes("SHA512"); };
+
// These are some general stats about the package
virtual std::string Maintainer() {return std::string();};
- virtual std::string ShortDesc() {return std::string();};
- virtual std::string LongDesc() {return std::string();};
+ /** return short description in language from record.
+ *
+ * @see #LongDesc
+ */
+ virtual std::string ShortDesc(std::string const &/*lang*/) {return std::string();};
+ /** return long description in language from record.
+ *
+ * If \b lang is empty the "best" available language will be
+ * returned as determined by the APT::Languages configuration.
+ * If a (requested) language can't be found in this record an empty
+ * string will be returned.
+ */
+ virtual std::string LongDesc(std::string const &/*lang*/) {return std::string();};
+ std::string ShortDesc() {return ShortDesc("");};
+ std::string LongDesc() {return LongDesc("");};
+
virtual std::string Name() {return std::string();};
virtual std::string Homepage() {return std::string();}
@@ -77,6 +101,14 @@ class pkgRecords::Parser /*{{{*/
virtual void GetRec(const char *&Start,const char *&Stop) {Start = Stop = 0;};
virtual ~Parser() {};
+
+ private:
+ APT_HIDDEN std::string GetHashFromHashes(char const * const type) const
+ {
+ HashStringList const hashes = Hashes();
+ HashString const * const hs = hashes.find(type);
+ return hs != NULL ? hs->HashValue() : "";
+ };
};
/*}}}*/
#endif
diff --git a/apt-pkg/pkgsystem.h b/apt-pkg/pkgsystem.h
index 6e33c67ed..f88ffa7c8 100644
--- a/apt-pkg/pkgsystem.h
+++ b/apt-pkg/pkgsystem.h
@@ -85,10 +85,12 @@ class pkgSystem
virtual bool AddStatusFiles(std::vector<pkgIndexFile *> &List) = 0;
virtual bool FindIndex(pkgCache::PkgFileIterator File,
pkgIndexFile *&Found) const = 0;
-
+
/* Evauluate how 'right' we are for this system based on the filesystem
etc.. */
- virtual signed Score(Configuration const &/*Cnf*/) {return 0;};
+ virtual signed Score(Configuration const &/*Cnf*/) {
+ return 0;
+ };
pkgSystem();
virtual ~pkgSystem() {};
diff --git a/apt-pkg/sourcelist.cc b/apt-pkg/sourcelist.cc
index e37899ec6..7170e8b5b 100644
--- a/apt-pkg/sourcelist.cc
+++ b/apt-pkg/sourcelist.cc
@@ -119,7 +119,7 @@ bool pkgSourceList::Type::ParseStanza(vector<metaIndex *> &List,
std::vector<std::string> list_section = StringSplit(Section, " ");
for (std::vector<std::string>::const_iterator U = list_uris.begin();
- U != list_uris.end(); U++)
+ U != list_uris.end(); ++U)
{
std::string URI = (*U);
if (!FixupURI(URI))
@@ -129,10 +129,10 @@ bool pkgSourceList::Type::ParseStanza(vector<metaIndex *> &List,
}
for (std::vector<std::string>::const_iterator I = list_dist.begin();
- I != list_dist.end(); I++)
+ I != list_dist.end(); ++I)
{
for (std::vector<std::string>::const_iterator J = list_section.begin();
- J != list_section.end(); J++)
+ J != list_section.end(); ++J)
{
if (CreateItem(List, URI, (*I), (*J), Options) == false)
{
@@ -408,7 +408,7 @@ int pkgSourceList::ParseFileDeb822(string File)
string const types = Tags.FindS("Types");
std::vector<std::string> list_types = StringSplit(types, " ");
for (std::vector<std::string>::const_iterator I = list_types.begin();
- I != list_types.end(); I++)
+ I != list_types.end(); ++I)
{
Type *Parse = Type::GetType((*I).c_str());
if (Parse == 0)
diff --git a/apt-pkg/sourcelist.h b/apt-pkg/sourcelist.h
index 9df0c1d74..35b786e1a 100644
--- a/apt-pkg/sourcelist.h
+++ b/apt-pkg/sourcelist.h
@@ -52,7 +52,15 @@ class pkgAcquire;
class pkgIndexFile;
class metaIndex;
-class pkgSourceList
+class pkgSource
+{
+ protected:
+
+ std::vector<metaIndex *> SrcList;
+
+};
+
+class pkgSourceList : public pkgSource
{
public:
@@ -86,15 +94,11 @@ class pkgSourceList
typedef std::vector<metaIndex *>::const_iterator const_iterator;
- protected:
-
- std::vector<metaIndex *> SrcList;
+ public:
int ParseFileDeb822(std::string File);
bool ParseFileOldStyle(std::string File);
- public:
-
bool ReadMainList();
bool Read(std::string File);
diff --git a/apt-pkg/srcrecords.h b/apt-pkg/srcrecords.h
index e000e176a..c68d374bb 100644
--- a/apt-pkg/srcrecords.h
+++ b/apt-pkg/srcrecords.h
@@ -14,6 +14,7 @@
#define PKGLIB_SRCRECORDS_H
#include <apt-pkg/macros.h>
+#include <apt-pkg/hashes.h>
#include <string>
#include <vector>
@@ -29,15 +30,24 @@ class pkgSrcRecords
{
public:
+#if __GNUC__ >= 4
+ // ensure that con- & de-structor don't trigger this warning
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
// Describes a single file
struct File
{
- std::string MD5Hash;
- unsigned long Size;
std::string Path;
std::string Type;
+ unsigned long long Size;
+ HashStringList Hashes;
+ APT_DEPRECATED std::string MD5Hash;
};
-
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
+
// Abstract parser for each source record
class Parser
{
diff --git a/apt-pkg/tagfile.cc b/apt-pkg/tagfile.cc
index bf865bdc4..b263baf66 100644
--- a/apt-pkg/tagfile.cc
+++ b/apt-pkg/tagfile.cc
@@ -47,16 +47,43 @@ public:
unsigned long long Size;
};
+static unsigned long AlphaHash(const char *Text, size_t Length) /*{{{*/
+{
+ /* This very simple hash function for the last 8 letters gives
+ very good performance on the debian package files */
+ if (Length > 8)
+ {
+ Text += (Length - 8);
+ Length = 8;
+ }
+ unsigned long Res = 0;
+ for (size_t i = 0; i < Length; ++i)
+ Res = ((unsigned long)(Text[i]) & 0xDF) ^ (Res << 1);
+ return Res & 0xFF;
+}
+ /*}}}*/
+
// TagFile::pkgTagFile - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
pkgTagFile::pkgTagFile(FileFd *pFd,unsigned long long Size)
+ : d(NULL)
+{
+ Init(pFd, Size);
+}
+
+void pkgTagFile::Init(FileFd *pFd,unsigned long long Size)
{
/* The size is increased by 4 because if we start with the Size of the
filename we need to try to read 1 char more to see an EOF faster, 1
char the end-pointer can be on and maybe 2 newlines need to be added
to the end of the file -> 4 extra chars */
Size += 4;
+ if(d != NULL)
+ {
+ free(d->Buffer);
+ delete d;
+ }
d = new pkgTagFilePrivate(pFd, Size);
if (d->Fd.IsOpen() == false)
@@ -128,18 +155,23 @@ bool pkgTagFile::Resize(unsigned long long const newSize)
*/
bool pkgTagFile::Step(pkgTagSection &Tag)
{
- while (Tag.Scan(d->Start,d->End - d->Start) == false)
+ if(Tag.Scan(d->Start,d->End - d->Start) == false)
{
- if (Fill() == false)
- return false;
-
- if(Tag.Scan(d->Start,d->End - d->Start))
- break;
+ do
+ {
+ if (Fill() == false)
+ return false;
- if (Resize() == false)
- return _error->Error(_("Unable to parse package file %s (1)"),
- d->Fd.Name().c_str());
+ if(Tag.Scan(d->Start,d->End - d->Start, false))
+ break;
+
+ if (Resize() == false)
+ return _error->Error(_("Unable to parse package file %s (1)"),
+ d->Fd.Name().c_str());
+
+ } while (Tag.Scan(d->Start,d->End - d->Start, false) == false);
}
+
d->Start += Tag.size();
d->iOffset += Tag.size();
@@ -233,7 +265,7 @@ bool pkgTagFile::Jump(pkgTagSection &Tag,unsigned long long Offset)
if (Fill() == false)
return false;
- if (Tag.Scan(d->Start, d->End - d->Start) == false)
+ if (Tag.Scan(d->Start, d->End - d->Start, false) == false)
return _error->Error(_("Unable to parse package file %s (2)"),d->Fd.Name().c_str());
return true;
@@ -243,27 +275,46 @@ bool pkgTagFile::Jump(pkgTagSection &Tag,unsigned long long Offset)
// ---------------------------------------------------------------------
/* */
pkgTagSection::pkgTagSection()
- : Section(0), TagCount(0), d(NULL), Stop(0)
+ : Section(0), d(NULL), Stop(0)
{
- memset(&Indexes, 0, sizeof(Indexes));
- memset(&AlphaIndexes, 0, sizeof(AlphaIndexes));
+ memset(&LookupTable, 0, sizeof(LookupTable));
}
/*}}}*/
// TagSection::Scan - Scan for the end of the header information /*{{{*/
-// ---------------------------------------------------------------------
-/* This looks for the first double new line in the data stream.
- It also indexes the tags in the section. */
-bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength)
+bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength, bool const Restart)
{
+ Section = Start;
const char *End = Start + MaxLength;
- Stop = Section = Start;
- memset(AlphaIndexes,0,sizeof(AlphaIndexes));
+
+ if (Restart == false && Tags.empty() == false)
+ {
+ Stop = Section + Tags.back().StartTag;
+ if (End <= Stop)
+ return false;
+ Stop = (const char *)memchr(Stop,'\n',End - Stop);
+ if (Stop == NULL)
+ return false;
+ ++Stop;
+ }
+ else
+ {
+ Stop = Section;
+ if (Tags.empty() == false)
+ {
+ memset(&LookupTable, 0, sizeof(LookupTable));
+ Tags.clear();
+ }
+ Tags.reserve(0x100);
+ }
+ size_t TagCount = Tags.size();
if (Stop == 0)
return false;
- TagCount = 0;
- while (TagCount+1 < sizeof(Indexes)/sizeof(Indexes[0]) && Stop < End)
+ TagData lastTagData(0);
+ lastTagData.EndTag = 0;
+ unsigned long lastTagHash = 0;
+ while (Stop < End)
{
TrimRecord(true,End);
@@ -275,12 +326,39 @@ bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength)
// Start a new index and add it to the hash
if (isspace(Stop[0]) == 0)
{
- Indexes[TagCount++] = Stop - Section;
- AlphaIndexes[AlphaHash(Stop,End)] = TagCount;
+ // store the last found tag
+ if (lastTagData.EndTag != 0)
+ {
+ if (LookupTable[lastTagHash] != 0)
+ lastTagData.NextInBucket = LookupTable[lastTagHash];
+ LookupTable[lastTagHash] = TagCount;
+ Tags.push_back(lastTagData);
+ }
+
+ ++TagCount;
+ lastTagData = TagData(Stop - Section);
+ // find the colon separating tag and value
+ char const * Colon = (char const *) memchr(Stop, ':', End - Stop);
+ if (Colon == NULL)
+ return false;
+ // find the end of the tag (which might or might not be the colon)
+ char const * EndTag = Colon;
+ --EndTag;
+ for (; EndTag > Stop && isspace(*EndTag) != 0; --EndTag)
+ ;
+ ++EndTag;
+ lastTagData.EndTag = EndTag - Section;
+ lastTagHash = AlphaHash(Stop, EndTag - Stop);
+ // find the beginning of the value
+ Stop = Colon + 1;
+ for (; isspace(*Stop) != 0; ++Stop);
+ if (Stop >= End)
+ return false;
+ lastTagData.StartValue = Stop - Section;
}
Stop = (const char *)memchr(Stop,'\n',End - Stop);
-
+
if (Stop == 0)
return false;
@@ -291,7 +369,16 @@ bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength)
// Double newline marks the end of the record
if (Stop+1 < End && Stop[1] == '\n')
{
- Indexes[TagCount] = Stop - Section;
+ if (lastTagData.EndTag != 0)
+ {
+ if (LookupTable[lastTagHash] != 0)
+ lastTagData.NextInBucket = LookupTable[lastTagHash];
+ LookupTable[lastTagHash] = TagCount;
+ Tags.push_back(lastTagData);
+ }
+
+ TagData const td(Stop - Section);
+ Tags.push_back(td);
TrimRecord(false,End);
return true;
}
@@ -320,8 +407,8 @@ void pkgTagSection::Trim()
for (; Stop > Section + 2 && (Stop[-2] == '\n' || Stop[-2] == '\r'); Stop--);
}
/*}}}*/
-// TagSection::Exists - return True if a tag exists /*{{{*/
-bool pkgTagSection::Exists(const char* const Tag)
+// TagSection::Exists - return True if a tag exists /*{{{*/
+bool pkgTagSection::Exists(const char* const Tag) const
{
unsigned int tmp;
return Find(Tag, tmp);
@@ -332,73 +419,43 @@ bool pkgTagSection::Exists(const char* const Tag)
/* This searches the section for a tag that matches the given string. */
bool pkgTagSection::Find(const char *Tag,unsigned int &Pos) const
{
- unsigned int Length = strlen(Tag);
- unsigned int I = AlphaIndexes[AlphaHash(Tag)];
- if (I == 0)
+ size_t const Length = strlen(Tag);
+ unsigned int Bucket = LookupTable[AlphaHash(Tag, Length)];
+ if (Bucket == 0)
return false;
- I--;
-
- for (unsigned int Counter = 0; Counter != TagCount; Counter++,
- I = (I+1)%TagCount)
+
+ for (; Bucket != 0; Bucket = Tags[Bucket - 1].NextInBucket)
{
- const char *St;
- St = Section + Indexes[I];
- if (strncasecmp(Tag,St,Length) != 0)
+ if ((Tags[Bucket - 1].EndTag - Tags[Bucket - 1].StartTag) != Length)
continue;
- // Make sure the colon is in the right place
- const char *C = St + Length;
- for (; isspace(*C) != 0; C++);
- if (*C != ':')
+ char const * const St = Section + Tags[Bucket - 1].StartTag;
+ if (strncasecmp(Tag,St,Length) != 0)
continue;
- Pos = I;
+
+ Pos = Bucket - 1;
return true;
}
Pos = 0;
return false;
}
- /*}}}*/
-// TagSection::Find - Locate a tag /*{{{*/
-// ---------------------------------------------------------------------
-/* This searches the section for a tag that matches the given string. */
bool pkgTagSection::Find(const char *Tag,const char *&Start,
const char *&End) const
{
- unsigned int Length = strlen(Tag);
- unsigned int I = AlphaIndexes[AlphaHash(Tag)];
- if (I == 0)
+ unsigned int Pos;
+ if (Find(Tag, Pos) == false)
return false;
- I--;
-
- for (unsigned int Counter = 0; Counter != TagCount; Counter++,
- I = (I+1)%TagCount)
- {
- const char *St;
- St = Section + Indexes[I];
- if (strncasecmp(Tag,St,Length) != 0)
- continue;
-
- // Make sure the colon is in the right place
- const char *C = St + Length;
- for (; isspace(*C) != 0; C++);
- if (*C != ':')
- continue;
- // Strip off the gunk from the start end
- Start = C;
- End = Section + Indexes[I+1];
- if (Start >= End)
- return _error->Error("Internal parsing error");
-
- for (; (isspace(*Start) != 0 || *Start == ':') && Start < End; Start++);
- for (; isspace(End[-1]) != 0 && End > Start; End--);
-
- return true;
- }
-
- Start = End = 0;
- return false;
+ Start = Section + Tags[Pos].StartValue;
+ // Strip off the gunk from the end
+ End = Section + Tags[Pos + 1].StartTag;
+ if (unlikely(Start > End))
+ return _error->Error("Internal parsing error");
+
+ for (; isspace(End[-1]) != 0 && End > Start; --End);
+
+ return true;
}
/*}}}*/
// TagSection::FindS - Find a string /*{{{*/
@@ -461,6 +518,17 @@ unsigned long long pkgTagSection::FindULL(const char *Tag, unsigned long long co
return Result;
}
/*}}}*/
+// TagSection::FindB - Find boolean value /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgTagSection::FindB(const char *Tag, bool const &Default) const
+{
+ const char *Start, *Stop;
+ if (Find(Tag, Start, Stop) == false)
+ return Default;
+ return StringToBool(string(Start, Stop));
+}
+ /*}}}*/
// TagSection::FindFlag - Locate a yes/no type flag /*{{{*/
// ---------------------------------------------------------------------
/* The bits marked in Flag are masked on/off in Flags */
@@ -493,6 +561,13 @@ bool pkgTagSection::FindFlag(unsigned long &Flags, unsigned long Flag,
return true;
}
/*}}}*/
+APT_PURE unsigned int pkgTagSection::Count() const { /*{{{*/
+ if (Tags.empty() == true)
+ return 0;
+ // the last element is just marking the end and isn't a real one
+ return Tags.size() - 1;
+}
+ /*}}}*/
// TFRewrite - Rewrite a control record /*{{{*/
// ---------------------------------------------------------------------
/* This writes the control record to stdout rewriting it as necessary. The
@@ -682,3 +757,5 @@ bool TFRewrite(FILE *Output,pkgTagSection const &Tags,const char *Order[],
return true;
}
/*}}}*/
+
+pkgTagSection::~pkgTagSection() {}
diff --git a/apt-pkg/tagfile.h b/apt-pkg/tagfile.h
index d5b62e76d..4cd99b2fc 100644
--- a/apt-pkg/tagfile.h
+++ b/apt-pkg/tagfile.h
@@ -25,6 +25,8 @@
#include <stdio.h>
#include <string>
+#include <vector>
+#include <list>
#ifndef APT_8_CLEANER_HEADERS
#include <apt-pkg/fileutl.h>
@@ -35,23 +37,20 @@ class FileFd;
class pkgTagSection
{
const char *Section;
- // We have a limit of 256 tags per section.
- unsigned int Indexes[256];
- unsigned int AlphaIndexes[0x100];
- unsigned int TagCount;
+ struct TagData {
+ unsigned int StartTag;
+ unsigned int EndTag;
+ unsigned int StartValue;
+ unsigned int NextInBucket;
+
+ TagData(unsigned int const StartTag) : StartTag(StartTag), EndTag(0), StartValue(0), NextInBucket(0) {}
+ };
+ std::vector<TagData> Tags;
+ unsigned int LookupTable[0x100];
+
// dpointer placeholder (for later in case we need it)
void *d;
- /* This very simple hash function for the last 8 letters gives
- very good performance on the debian package files */
- inline static unsigned long AlphaHash(const char *Text, const char *End = 0)
- {
- unsigned long Res = 0;
- for (; Text != End && *Text != ':' && *Text != 0; Text++)
- Res = ((unsigned long)(*Text) & 0xDF) ^ (Res << 1);
- return Res & 0xFF;
- }
-
protected:
const char *Stop;
@@ -63,23 +62,46 @@ class pkgTagSection
bool Find(const char *Tag,const char *&Start, const char *&End) const;
bool Find(const char *Tag,unsigned int &Pos) const;
std::string FindS(const char *Tag) const;
- signed int FindI(const char *Tag,signed long Default = 0) const ;
+ signed int FindI(const char *Tag,signed long Default = 0) const;
+ bool FindB(const char *Tag, bool const &Default = false) const;
unsigned long long FindULL(const char *Tag, unsigned long long const &Default = 0) const;
bool FindFlag(const char *Tag,unsigned long &Flags,
unsigned long Flag) const;
bool static FindFlag(unsigned long &Flags, unsigned long Flag,
const char* Start, const char* Stop);
- bool Scan(const char *Start,unsigned long MaxLength);
+
+ /** \brief searches the boundaries of the current section
+ *
+ * While parameter Start marks the beginning of the section, this method
+ * will search for the first double newline in the data stream which marks
+ * the end of the section. It also does a first pass over the content of
+ * the section parsing it as encountered for processing later on by Find
+ *
+ * @param Start is the beginning of the section
+ * @param MaxLength is the size of valid data in the stream pointed to by Start
+ * @param Restart if enabled internal state will be cleared, otherwise it is
+ * assumed that now more data is available in the stream and the parsing will
+ * start were it encountered insufficent data the last time.
+ *
+ * @return \b true if section end was found, \b false otherwise.
+ * Beware that internal state will be inconsistent if \b false is returned!
+ */
+ APT_MUSTCHECK bool Scan(const char *Start, unsigned long MaxLength, bool const Restart = true);
inline unsigned long size() const {return Stop - Section;};
void Trim();
virtual void TrimRecord(bool BeforeRecord, const char* &End);
-
- inline unsigned int Count() const {return TagCount;};
- bool Exists(const char* const Tag);
-
+
+ /** \brief amount of Tags in the current section
+ *
+ * Note: if a Tag is mentioned repeatly it will be counted multiple
+ * times, but only the last occurrence is available via Find methods.
+ */
+ unsigned int Count() const;
+ bool Exists(const char* const Tag) const;
+
inline void Get(const char *&Start,const char *&Stop,unsigned int I) const
- {Start = Section + Indexes[I]; Stop = Section + Indexes[I+1];}
-
+ {Start = Section + Tags[I].StartTag; Stop = Section + Tags[I+1].StartTag;}
+
inline void GetSection(const char *&Start,const char *&Stop) const
{
Start = Section;
@@ -87,7 +109,7 @@ class pkgTagSection
};
pkgTagSection();
- virtual ~pkgTagSection() {};
+ virtual ~pkgTagSection();
};
class pkgTagFilePrivate;
@@ -105,6 +127,8 @@ class pkgTagFile
unsigned long Offset();
bool Jump(pkgTagSection &Tag,unsigned long long Offset);
+ void Init(FileFd *F,unsigned long long Size = 32*1024);
+
pkgTagFile(FileFd *F,unsigned long long Size = 32*1024);
virtual ~pkgTagFile();
};
diff --git a/apt-pkg/update.cc b/apt-pkg/update.cc
index 5d5b19626..2908a4820 100644
--- a/apt-pkg/update.cc
+++ b/apt-pkg/update.cc
@@ -27,8 +27,8 @@ bool ListUpdate(pkgAcquireStatus &Stat,
pkgSourceList &List,
int PulseInterval)
{
- pkgAcquire Fetcher;
- if (Fetcher.Setup(&Stat, _config->FindDir("Dir::State::Lists")) == false)
+ pkgAcquire Fetcher(&Stat);
+ if (Fetcher.GetLock(_config->FindDir("Dir::State::Lists")) == false)
return false;
// Populate it with the source selection
diff --git a/apt-pkg/update.h b/apt-pkg/update.h
index 3835644de..e35cd14f6 100644
--- a/apt-pkg/update.h
+++ b/apt-pkg/update.h
@@ -11,7 +11,8 @@
#define PKGLIB_UPDATE_H
class pkgAcquireStatus;
-
+class pkgSourceList;
+class pkgAcquire;
bool ListUpdate(pkgAcquireStatus &progress, pkgSourceList &List, int PulseInterval=0);
bool AcquireUpdate(pkgAcquire &Fetcher, int const PulseInterval = 0,
diff --git a/apt-pkg/upgrade.cc b/apt-pkg/upgrade.cc
index 29b11937b..ca670bdf5 100644
--- a/apt-pkg/upgrade.cc
+++ b/apt-pkg/upgrade.cc
@@ -24,13 +24,14 @@
The problem resolver is used to resolve the problems.
*/
-bool pkgDistUpgrade(pkgDepCache &Cache)
+bool pkgDistUpgrade(pkgDepCache &Cache, OpProgress * const Progress)
{
std::string const solver = _config->Find("APT::Solver", "internal");
- if (solver != "internal") {
- OpTextProgress Prog(*_config);
- return EDSP::ResolveExternal(solver.c_str(), Cache, false, true, false, &Prog);
- }
+ if (solver != "internal")
+ return EDSP::ResolveExternal(solver.c_str(), Cache, false, true, false, Progress);
+
+ if (Progress != NULL)
+ Progress->OverallProgress(0, 100, 1, _("Calculating upgrade"));
pkgDepCache::ActionGroup group(Cache);
@@ -41,12 +42,18 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
if (I->CurrentVer != 0)
Cache.MarkInstall(I, false, 0, false);
+ if (Progress != NULL)
+ Progress->Progress(10);
+
/* Auto upgrade all installed packages, this provides the basis
for the installation */
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
if (I->CurrentVer != 0)
Cache.MarkInstall(I, true, 0, false);
+ if (Progress != NULL)
+ Progress->Progress(50);
+
/* Now, install each essential package which is not installed
(and not provided by another package in the same name group) */
std::string essential = _config->Find("pkgCacheGen::Essential", "all");
@@ -77,15 +84,24 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
Cache.MarkInstall(I, true, 0, false);
-
+
+ if (Progress != NULL)
+ Progress->Progress(55);
+
/* We do it again over all previously installed packages to force
conflict resolution on them all. */
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
if (I->CurrentVer != 0)
Cache.MarkInstall(I, false, 0, false);
+ if (Progress != NULL)
+ Progress->Progress(65);
+
pkgProblemResolver Fix(&Cache);
+ if (Progress != NULL)
+ Progress->Progress(95);
+
// Hold back held packages.
if (_config->FindB("APT::Ignore-Hold",false) == false)
{
@@ -98,18 +114,22 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
}
}
}
-
- return Fix.Resolve();
+
+ bool const success = Fix.Resolve(false, Progress);
+ if (Progress != NULL)
+ Progress->Done();
+ return success;
}
/*}}}*/
// AllUpgradeNoNewPackages - Upgrade but no removals or new pkgs /*{{{*/
-static bool pkgAllUpgradeNoNewPackages(pkgDepCache &Cache)
+static bool pkgAllUpgradeNoNewPackages(pkgDepCache &Cache, OpProgress * const Progress)
{
std::string const solver = _config->Find("APT::Solver", "internal");
- if (solver != "internal") {
- OpTextProgress Prog(*_config);
- return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, &Prog);
- }
+ if (solver != "internal")
+ return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, Progress);
+
+ if (Progress != NULL)
+ Progress->OverallProgress(0, 100, 1, _("Calculating upgrade"));
pkgDepCache::ActionGroup group(Cache);
@@ -131,8 +151,15 @@ static bool pkgAllUpgradeNoNewPackages(pkgDepCache &Cache)
if (I->CurrentVer != 0 && Cache[I].InstallVer != 0)
Cache.MarkInstall(I, false, 0, false);
}
-
- return Fix.ResolveByKeep();
+
+ if (Progress != NULL)
+ Progress->Progress(50);
+
+ // resolve remaining issues via keep
+ bool const success = Fix.ResolveByKeep(Progress);
+ if (Progress != NULL)
+ Progress->Done();
+ return success;
}
/*}}}*/
// AllUpgradeWithNewInstalls - Upgrade + install new packages as needed /*{{{*/
@@ -141,13 +168,14 @@ static bool pkgAllUpgradeNoNewPackages(pkgDepCache &Cache)
* Upgrade as much as possible without deleting anything (useful for
* stable systems)
*/
-static bool pkgAllUpgradeWithNewPackages(pkgDepCache &Cache)
+static bool pkgAllUpgradeWithNewPackages(pkgDepCache &Cache, OpProgress * const Progress)
{
std::string const solver = _config->Find("APT::Solver", "internal");
- if (solver != "internal") {
- OpTextProgress Prog(*_config);
- return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, &Prog);
- }
+ if (solver != "internal")
+ return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, Progress);
+
+ if (Progress != NULL)
+ Progress->OverallProgress(0, 100, 1, _("Calculating upgrade"));
pkgDepCache::ActionGroup group(Cache);
@@ -170,18 +198,30 @@ static bool pkgAllUpgradeWithNewPackages(pkgDepCache &Cache)
}
}
+ if (Progress != NULL)
+ Progress->Progress(10);
+
// then let auto-install loose
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
if (Cache[I].Install())
Cache.MarkInstall(I, true, 0, false);
+ if (Progress != NULL)
+ Progress->Progress(50);
+
// ... but it may remove stuff, we we need to clean up afterwards again
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
if (Cache[I].Delete() == true)
Cache.MarkKeep(I, false, false);
+ if (Progress != NULL)
+ Progress->Progress(60);
+
// resolve remaining issues via keep
- return Fix.ResolveByKeep();
+ bool const success = Fix.ResolveByKeep(Progress);
+ if (Progress != NULL)
+ Progress->Done();
+ return success;
}
/*}}}*/
// AllUpgrade - Upgrade as many packages as possible /*{{{*/
@@ -189,9 +229,9 @@ static bool pkgAllUpgradeWithNewPackages(pkgDepCache &Cache)
/* Right now the system must be consistent before this can be called.
It also will not change packages marked for install, it only tries
to install packages not marked for install */
-bool pkgAllUpgrade(pkgDepCache &Cache)
+bool pkgAllUpgrade(pkgDepCache &Cache, OpProgress * const Progress)
{
- return pkgAllUpgradeNoNewPackages(Cache);
+ return pkgAllUpgradeNoNewPackages(Cache, Progress);
}
/*}}}*/
// MinimizeUpgrade - Minimizes the set of packages to be upgraded /*{{{*/
@@ -239,24 +279,25 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache)
return true;
}
/*}}}*/
-// APT::Upgrade::Upgrade - Upgrade using a specific strategy /*{{{*/
-bool APT::Upgrade::Upgrade(pkgDepCache &Cache, int mode)
+// APT::Upgrade::Upgrade - Upgrade using a specific strategy /*{{{*/
+bool APT::Upgrade::Upgrade(pkgDepCache &Cache, int mode, OpProgress * const Progress)
{
- if (mode == 0)
- {
- return pkgDistUpgrade(Cache);
- }
+#if __GNUC__ >= 4
+ // the deprecated methods will become our privates, so that is fine
+ #pragma GCC diagnostic push
+ #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+#endif
+ if (mode == ALLOW_EVERYTHING)
+ return pkgDistUpgrade(Cache, Progress);
else if ((mode & ~FORBID_REMOVE_PACKAGES) == 0)
- {
- return pkgAllUpgradeWithNewPackages(Cache);
- }
+ return pkgAllUpgradeWithNewPackages(Cache, Progress);
else if ((mode & ~(FORBID_REMOVE_PACKAGES|FORBID_INSTALL_NEW_PACKAGES)) == 0)
- {
- return pkgAllUpgradeNoNewPackages(Cache);
- }
+ return pkgAllUpgradeNoNewPackages(Cache, Progress);
else
_error->Error("pkgAllUpgrade called with unsupported mode %i", mode);
-
+#if __GNUC__ >= 4
+ #pragma GCC diagnostic pop
+#endif
return false;
}
/*}}}*/
diff --git a/apt-pkg/upgrade.h b/apt-pkg/upgrade.h
index aa883df10..a3f693d86 100644
--- a/apt-pkg/upgrade.h
+++ b/apt-pkg/upgrade.h
@@ -10,23 +10,27 @@
#ifndef PKGLIB_UPGRADE_H
#define PKGLIB_UPGRADE_H
+#include <stddef.h>
+#include <apt-pkg/macros.h>
+
class pkgDepCache;
+class OpProgress;
namespace APT {
namespace Upgrade {
// FIXME: make this "enum class UpgradeMode {" once we enable c++11
enum UpgradeMode {
+ ALLOW_EVERYTHING = 0,
FORBID_REMOVE_PACKAGES = 1,
FORBID_INSTALL_NEW_PACKAGES = 2
};
- bool Upgrade(pkgDepCache &Cache, int UpgradeMode);
+ bool Upgrade(pkgDepCache &Cache, int UpgradeMode, OpProgress * const Progress = NULL);
}
}
// please use APT::Upgrade::Upgrade() instead
-bool pkgDistUpgrade(pkgDepCache &Cache);
-bool pkgAllUpgrade(pkgDepCache &Cache);
-bool pkgMinimizeUpgrade(pkgDepCache &Cache);
-
+APT_DEPRECATED bool pkgDistUpgrade(pkgDepCache &Cache, OpProgress * const Progress = NULL);
+APT_DEPRECATED bool pkgAllUpgrade(pkgDepCache &Cache, OpProgress * const Progress = NULL);
+bool pkgMinimizeUpgrade(pkgDepCache &Cache);
#endif
diff --git a/apt-private/acqprogress.cc b/apt-private/acqprogress.cc
index 0f5b53e50..14a53eacb 100644
--- a/apt-private/acqprogress.cc
+++ b/apt-private/acqprogress.cc
@@ -64,8 +64,6 @@ void AcqTextStatus::IMSHit(pkgAcquire::ItemDesc &Itm)
cout << '\r' << BlankLine << '\r';
cout << _("Hit ") << Itm.Description;
- if (Itm.Owner->FileSize != 0)
- cout << " [" << SizeToStr(Itm.Owner->FileSize) << "B]";
cout << endl;
Update = true;
}
@@ -119,6 +117,9 @@ void AcqTextStatus::Fail(pkgAcquire::ItemDesc &Itm)
if (Itm.Owner->Status == pkgAcquire::Item::StatDone)
{
cout << _("Ign ") << Itm.Description << endl;
+ if (Itm.Owner->ErrorText.empty() == false &&
+ _config->FindB("Acquire::Progress::Ignore::ShowErrorText", false) == true)
+ cout << " " << Itm.Owner->ErrorText << endl;
}
else
{
@@ -142,6 +143,9 @@ void AcqTextStatus::Stop()
if (Quiet <= 0)
cout << '\r' << BlankLine << '\r' << flush;
+ if (_config->FindB("quiet::NoStatistic", false) == true)
+ return;
+
if (FetchedBytes != 0 && _error->PendingError() == false)
ioprintf(cout,_("Fetched %sB in %s (%sB/s)\n"),
SizeToStr(FetchedBytes).c_str(),
@@ -170,7 +174,7 @@ bool AcqTextStatus::Pulse(pkgAcquire *Owner)
ScreenWidth = sizeof(Buffer)-1;
// Put in the percent done
- sprintf(S,"%.0f%%",((CurrentBytes + CurrentItems)*100.0)/(TotalBytes+TotalItems));
+ sprintf(S,"%.0f%%", Percent);
bool Shown = false;
for (pkgAcquire::Worker *I = Owner->WorkersBegin(); I != 0;
@@ -201,9 +205,9 @@ bool AcqTextStatus::Pulse(pkgAcquire *Owner)
S += strlen(S);
// Show the short mode string
- if (I->CurrentItem->Owner->Mode != 0)
+ if (I->CurrentItem->Owner->ActiveSubprocess.empty() == false)
{
- snprintf(S,End-S," %s",I->CurrentItem->Owner->Mode);
+ snprintf(S,End-S, " %s", I->CurrentItem->Owner->ActiveSubprocess.c_str());
S += strlen(S);
}
diff --git a/apt-private/private-cachefile.cc b/apt-private/private-cachefile.cc
index 5e955ac39..29e665245 100644
--- a/apt-private/private-cachefile.cc
+++ b/apt-private/private-cachefile.cc
@@ -32,8 +32,10 @@ int CacheFile::NameComp(const void *a,const void *b)
const pkgCache::Package &A = **(pkgCache::Package **)a;
const pkgCache::Package &B = **(pkgCache::Package **)b;
+ const pkgCache::Group * const GA = SortCache->GrpP + A.Group;
+ const pkgCache::Group * const GB = SortCache->GrpP + B.Group;
- return strcmp(SortCache->StrP + A.Name,SortCache->StrP + B.Name);
+ return strcmp(SortCache->StrP + GA->Name,SortCache->StrP + GB->Name);
}
/*}}}*/
// CacheFile::Sort - Sort by name /*{{{*/
diff --git a/apt-private/private-cachefile.h b/apt-private/private-cachefile.h
index dce7e0a3a..1fddabfbd 100644
--- a/apt-private/private-cachefile.h
+++ b/apt-private/private-cachefile.h
@@ -6,7 +6,20 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/pkgcache.h>
#include <apt-pkg/macros.h>
+#include <apt-pkg/sourcelist.h>
+#include <apti18n.h>
+// FIXME: we need to find a way to export this
+class APT_PUBLIC SourceList : public pkgSourceList
+{
+
+ public:
+ // Add custom metaIndex (e.g. local files)
+ void AddMetaIndex(metaIndex *mi) {
+ SrcList.push_back(mi);
+ }
+
+};
// class CacheFile - Cover class for some dependency cache functions /*{{{*/
// ---------------------------------------------------------------------
@@ -28,6 +41,16 @@ class APT_PUBLIC CacheFile : public pkgCacheFile
return false;
return true;
}
+ // FIXME: this can go once the "libapt-pkg" pkgSourceList has a way
+ // to add custom metaIndexes (or custom local files or so)
+ bool BuildSourceList(OpProgress */*Progress*/ = NULL) {
+ if (SrcList != NULL)
+ return true;
+ SrcList = new SourceList();
+ if (SrcList->ReadMainList() == false)
+ return _error->Error(_("The list of sources could not be read."));
+ return true;
+ }
bool Open(bool WithLock = true)
{
OpTextProgress Prog(*_config);
diff --git a/apt-private/private-cacheset.cc b/apt-private/private-cacheset.cc
index eb77be274..cb68024db 100644
--- a/apt-private/private-cacheset.cc
+++ b/apt-private/private-cacheset.cc
@@ -60,22 +60,22 @@ bool GetLocalitySortedVersionSet(pkgCacheFile &CacheFile,
if (insertCurrentVer == true)
{
if (P->CurrentVer != 0)
- vci->FromPackage(vci, CacheFile, P, APT::VersionContainerInterface::INSTALLED, helper);
+ vci->FromPackage(vci, CacheFile, P, APT::CacheSetHelper::INSTALLED, helper);
}
else if (insertUpgradable == true)
{
if(P.CurrentVer() && state.Upgradable())
- vci->FromPackage(vci, CacheFile, P, APT::VersionContainerInterface::CANDIDATE, helper);
+ vci->FromPackage(vci, CacheFile, P, APT::CacheSetHelper::CANDIDATE, helper);
}
else if (insertManualInstalled == true)
{
if (P.CurrentVer() &&
((*DepCache)[P].Flags & pkgCache::Flag::Auto) == false)
- vci->FromPackage(vci, CacheFile, P, APT::VersionContainerInterface::CANDIDATE, helper);
+ vci->FromPackage(vci, CacheFile, P, APT::CacheSetHelper::CANDIDATE, helper);
}
else
{
- if (vci->FromPackage(vci, CacheFile, P, APT::VersionContainerInterface::CANDIDATE, helper) == false)
+ if (vci->FromPackage(vci, CacheFile, P, APT::CacheSetHelper::CANDIDATE, helper) == false)
{
// no candidate, this may happen for packages in
// dpkg "deinstall ok config-file" state - we pick the first ver
diff --git a/apt-private/private-cacheset.h b/apt-private/private-cacheset.h
index ca8f4be5d..059c7637e 100644
--- a/apt-private/private-cacheset.h
+++ b/apt-private/private-cacheset.h
@@ -76,19 +76,16 @@ class CacheSetHelperVirtuals: public APT::CacheSetHelper {
public:
APT::PackageSet virtualPkgs;
- virtual pkgCache::VerIterator canNotFindCandidateVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) {
- virtualPkgs.insert(Pkg);
- return CacheSetHelper::canNotFindCandidateVer(Cache, Pkg);
+ virtual pkgCache::VerIterator canNotGetVersion(enum CacheSetHelper::VerSelector const select, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) {
+ if (select == NEWEST || select == CANDIDATE || select == ALL)
+ virtualPkgs.insert(Pkg);
+ return CacheSetHelper::canNotGetVersion(select, Cache, Pkg);
}
- virtual pkgCache::VerIterator canNotFindNewestVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) {
- virtualPkgs.insert(Pkg);
- return CacheSetHelper::canNotFindNewestVer(Cache, Pkg);
- }
-
- virtual void canNotFindAllVer(APT::VersionContainerInterface * vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) {
- virtualPkgs.insert(Pkg);
- CacheSetHelper::canNotFindAllVer(vci, Cache, Pkg);
+ virtual void canNotFindVersion(enum CacheSetHelper::VerSelector const select, APT::VersionContainerInterface * vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) {
+ if (select == NEWEST || select == CANDIDATE || select == ALL)
+ virtualPkgs.insert(Pkg);
+ return CacheSetHelper::canNotFindVersion(select, vci, Cache, Pkg);
}
CacheSetHelperVirtuals(bool const ShowErrors = true, GlobalError::MsgType const &ErrorType = GlobalError::NOTICE) : CacheSetHelper(ShowErrors, ErrorType) {}
@@ -190,7 +187,7 @@ public:
}
virtual pkgCache::VerIterator canNotFindCandidateVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) {
- APT::VersionSet const verset = tryVirtualPackage(Cache, Pkg, APT::VersionSet::CANDIDATE);
+ APT::VersionSet const verset = tryVirtualPackage(Cache, Pkg, CacheSetHelper::CANDIDATE);
if (verset.empty() == false)
return *(verset.begin());
else if (ShowError == true) {
@@ -203,7 +200,7 @@ public:
virtual pkgCache::VerIterator canNotFindNewestVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) {
if (Pkg->ProvidesList != 0)
{
- APT::VersionSet const verset = tryVirtualPackage(Cache, Pkg, APT::VersionSet::NEWEST);
+ APT::VersionSet const verset = tryVirtualPackage(Cache, Pkg, CacheSetHelper::NEWEST);
if (verset.empty() == false)
return *(verset.begin());
if (ShowError == true)
@@ -231,7 +228,7 @@ public:
}
APT::VersionSet tryVirtualPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg,
- APT::VersionSet::Version const &select) {
+ CacheSetHelper::VerSelector const select) {
/* This is a pure virtual package and there is a single available
candidate providing it. */
if (unlikely(Cache[Pkg].CandidateVer != 0) || Pkg->ProvidesList == 0)
diff --git a/apt-private/private-cmndline.cc b/apt-private/private-cmndline.cc
index a4490f5b4..079f81ee3 100644
--- a/apt-private/private-cmndline.cc
+++ b/apt-private/private-cmndline.cc
@@ -198,6 +198,7 @@ static bool addArgumentsAPTGet(std::vector<CommandLine::Args> &Args, char const
addArg(0,"only-source","APT::Get::Only-Source",0);
addArg(0,"arch-only","APT::Get::Arch-Only",0);
addArg(0,"allow-unauthenticated","APT::Get::AllowUnauthenticated",0);
+ addArg(0,"allow-insecure-repositories","Acquire::AllowInsecureRepositories",0);
addArg(0,"install-recommends","APT::Install-Recommends",CommandLine::Boolean);
addArg(0,"install-suggests","APT::Install-Suggests",CommandLine::Boolean);
addArg(0,"fix-policy","APT::Get::Fix-Policy-Broken",0);
diff --git a/apt-private/private-download.cc b/apt-private/private-download.cc
index be7d23c31..8cabf14b5 100644
--- a/apt-private/private-download.cc
+++ b/apt-private/private-download.cc
@@ -5,6 +5,7 @@
#include <apt-pkg/acquire-item.h>
#include <apt-pkg/configuration.h>
#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
#include <apt-pkg/strutl.h>
#include <apt-private/private-output.h>
@@ -14,9 +15,59 @@
#include <string>
#include <vector>
+#include <unistd.h>
+#include <sys/types.h>
+#include <pwd.h>
+#include <fcntl.h>
+#include <sys/vfs.h>
+#include <sys/statvfs.h>
+#include <errno.h>
+
#include <apti18n.h>
/*}}}*/
+bool CheckDropPrivsMustBeDisabled(pkgAcquire &Fetcher) /*{{{*/
+{
+ // no need/possibility to drop privs
+ if(getuid() != 0)
+ return true;
+
+ // the user does not want to drop privs
+ std::string SandboxUser = _config->Find("APT::Sandbox::User");
+ if (SandboxUser.empty())
+ return true;
+
+ struct passwd const * const pw = getpwnam(SandboxUser.c_str());
+ if (pw == NULL)
+ return true;
+
+ if (seteuid(pw->pw_uid) != 0)
+ return _error->Errno("seteuid", "seteuid %u failed", pw->pw_uid);
+
+ bool res = true;
+ // check if we can write to destfile
+ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin();
+ I != Fetcher.ItemsEnd() && res == true; ++I)
+ {
+ int fd = open((*I)->DestFile.c_str(), O_CREAT | O_RDWR, 0600);
+ if (fd < 0)
+ {
+ res = false;
+ std::string msg;
+ strprintf(msg, _("Can't drop privileges for downloading as file '%s' couldn't be accessed by user '%s'."),
+ (*I)->DestFile.c_str(), SandboxUser.c_str());
+ c0out << msg << std::endl;
+ _config->Set("APT::Sandbox::User", "");
+ }
+ close(fd);
+ }
+
+ if (seteuid(0) != 0)
+ return _error->Errno("seteuid", "seteuid %u failed", 0);
+
+ return res;
+}
+ /*}}}*/
// CheckAuth - check if each download comes form a trusted source /*{{{*/
bool CheckAuth(pkgAcquire& Fetcher, bool const PromptUser)
{
@@ -31,7 +82,7 @@ bool CheckAuth(pkgAcquire& Fetcher, bool const PromptUser)
return AuthPrompt(UntrustedList, PromptUser);
}
-bool AuthPrompt(std::string UntrustedList, bool const PromptUser)
+bool AuthPrompt(std::string const &UntrustedList, bool const PromptUser)
{
ShowList(c2out,_("WARNING: The following packages cannot be authenticated!"),UntrustedList,"");
@@ -98,3 +149,39 @@ bool AcquireRun(pkgAcquire &Fetcher, int const PulseInterval, bool * const Failu
return true;
}
/*}}}*/
+bool CheckFreeSpaceBeforeDownload(std::string const &Dir, unsigned long long FetchBytes)/*{{{*/
+{
+ uint32_t const RAMFS_MAGIC = 0x858458f6;
+ /* Check for enough free space, but only if we are actually going to
+ download */
+ if (_config->FindB("APT::Get::Print-URIs", false) == true ||
+ _config->FindB("APT::Get::Download", true) == false)
+ return true;
+
+ struct statvfs Buf;
+ if (statvfs(Dir.c_str(),&Buf) != 0) {
+ if (errno == EOVERFLOW)
+ return _error->WarningE("statvfs",_("Couldn't determine free space in %s"),
+ Dir.c_str());
+ else
+ return _error->Errno("statvfs",_("Couldn't determine free space in %s"),
+ Dir.c_str());
+ }
+ else
+ {
+ unsigned long long const FreeBlocks = _config->Find("APT::Sandbox::User").empty() ? Buf.f_bfree : Buf.f_bavail;
+ if (FreeBlocks < (FetchBytes / Buf.f_bsize))
+ {
+ struct statfs Stat;
+ if (statfs(Dir.c_str(),&Stat) != 0
+#if HAVE_STRUCT_STATFS_F_TYPE
+ || Stat.f_type != RAMFS_MAGIC
+#endif
+ )
+ return _error->Error(_("You don't have enough free space in %s."),
+ Dir.c_str());
+ }
+ }
+ return true;
+}
+ /*}}}*/
diff --git a/apt-private/private-download.h b/apt-private/private-download.h
index a90ac7eaa..0a0ac6b95 100644
--- a/apt-private/private-download.h
+++ b/apt-private/private-download.h
@@ -3,15 +3,21 @@
#include <apt-pkg/macros.h>
+#include <string>
+
class pkgAcquire;
+APT_PUBLIC bool CheckDropPrivsMustBeDisabled(pkgAcquire &Fetcher);
+
// Check if all files in the fetcher are authenticated
APT_PUBLIC bool CheckAuth(pkgAcquire& Fetcher, bool const PromptUser);
// show a authentication warning prompt and return true if the system
// should continue
-APT_PUBLIC bool AuthPrompt(std::string UntrustedList, bool const PromptUser);
+APT_PUBLIC bool AuthPrompt(std::string const &UntrustedList, bool const PromptUser);
APT_PUBLIC bool AcquireRun(pkgAcquire &Fetcher, int const PulseInterval, bool * const Failure, bool * const TransientNetworkFailure);
+APT_PUBLIC bool CheckFreeSpaceBeforeDownload(std::string const &Dir, unsigned long long FetchBytes);
+
#endif
diff --git a/apt-private/private-install.cc b/apt-private/private-install.cc
index e08cd8057..2a4c3eea5 100644
--- a/apt-private/private-install.cc
+++ b/apt-private/private-install.cc
@@ -20,16 +20,15 @@
#include <apt-pkg/packagemanager.h>
#include <apt-pkg/pkgcache.h>
#include <apt-pkg/upgrade.h>
+#include <apt-pkg/install-progress.h>
-#include <errno.h>
#include <stdlib.h>
#include <string.h>
-#include <sys/statfs.h>
-#include <sys/statvfs.h>
#include <algorithm>
#include <iostream>
#include <set>
#include <vector>
+#include <map>
#include <apt-private/acqprogress.h>
#include <apt-private/private-install.h>
@@ -117,14 +116,14 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask, bool Safety)
return false;
// Create the download object
- pkgAcquire Fetcher;
- AcqTextStatus Stat(ScreenWidth,_config->FindI("quiet",0));
+ AcqTextStatus Stat(ScreenWidth,_config->FindI("quiet",0));
+ pkgAcquire Fetcher(&Stat);
if (_config->FindB("APT::Get::Print-URIs", false) == true)
{
// force a hashsum for compatibility reasons
_config->CndSet("Acquire::ForceHash", "md5sum");
}
- else if (Fetcher.Setup(&Stat, _config->FindDir("Dir::Cache::Archives")) == false)
+ else if (Fetcher.GetLock(_config->FindDir("Dir::Cache::Archives")) == false)
return false;
// Read the source list
@@ -175,33 +174,9 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask, bool Safety)
if (_error->PendingError() == true)
return false;
- /* Check for enough free space, but only if we are actually going to
- download */
- if (_config->FindB("APT::Get::Print-URIs") == false &&
- _config->FindB("APT::Get::Download",true) == true)
- {
- struct statvfs Buf;
- std::string OutputDir = _config->FindDir("Dir::Cache::Archives");
- if (statvfs(OutputDir.c_str(),&Buf) != 0) {
- if (errno == EOVERFLOW)
- return _error->WarningE("statvfs",_("Couldn't determine free space in %s"),
- OutputDir.c_str());
- else
- return _error->Errno("statvfs",_("Couldn't determine free space in %s"),
- OutputDir.c_str());
- } else if (unsigned(Buf.f_bfree) < (FetchBytes - FetchPBytes)/Buf.f_bsize)
- {
- struct statfs Stat;
- if (statfs(OutputDir.c_str(),&Stat) != 0
-#if HAVE_STRUCT_STATFS_F_TYPE
- || unsigned(Stat.f_type) != RAMFS_MAGIC
-#endif
- )
- return _error->Error(_("You don't have enough free space in %s."),
- OutputDir.c_str());
- }
- }
-
+ if (CheckFreeSpaceBeforeDownload(_config->FindDir("Dir::Cache::Archives"), (FetchBytes - FetchPBytes)) == false)
+ return false;
+
// Fail safe check
if (_config->FindI("quiet",0) >= 2 ||
_config->FindB("APT::Get::Assume-Yes",false) == true)
@@ -558,9 +533,9 @@ bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache,
std::list<APT::VersionSet::Modifier> mods;
mods.push_back(APT::VersionSet::Modifier(MOD_INSTALL, "+",
- APT::VersionSet::Modifier::POSTFIX, APT::VersionSet::CANDIDATE));
+ APT::VersionSet::Modifier::POSTFIX, APT::CacheSetHelper::CANDIDATE));
mods.push_back(APT::VersionSet::Modifier(MOD_REMOVE, "-",
- APT::VersionSet::Modifier::POSTFIX, APT::VersionSet::NEWEST));
+ APT::VersionSet::Modifier::POSTFIX, APT::CacheSetHelper::NEWEST));
CacheSetHelperAPTGet helper(c0out);
verset = APT::VersionSet::GroupedFromCommandLine(Cache,
CmdL.FileList + 1, mods, fallback, helper);
@@ -617,15 +592,14 @@ bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache,
if (Fix != NULL)
{
// Call the scored problem resolver
+ OpTextProgress Progress(*_config);
+ bool const distUpgradeMode = strcmp(CmdL.FileList[0], "dist-upgrade") == 0 || strcmp(CmdL.FileList[0], "full-upgrade") == 0;
+
bool resolver_fail = false;
- if (UpgradeMode == 0)
- {
- if (strcmp(CmdL.FileList[0], "dist-upgrade") == 0 || strcmp(CmdL.FileList[0], "full-upgrade") == 0)
- resolver_fail = APT::Upgrade::Upgrade(Cache, 0);
- else
- resolver_fail = Fix->Resolve(true);
- } else
- resolver_fail = APT::Upgrade::Upgrade(Cache, UpgradeMode);
+ if (distUpgradeMode == true || UpgradeMode != APT::Upgrade::ALLOW_EVERYTHING)
+ resolver_fail = APT::Upgrade::Upgrade(Cache, UpgradeMode, &Progress);
+ else
+ resolver_fail = Fix->Resolve(true, &Progress);
if (resolver_fail == false && Cache->BrokenCount() == 0)
return false;
@@ -680,10 +654,34 @@ bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache,
bool DoInstall(CommandLine &CmdL)
{
CacheFile Cache;
+ // first check for local pkgs and add them to the cache
+ for (const char **I = CmdL.FileList; *I != 0; I++)
+ {
+ if(FileExists(*I))
+ {
+ // FIXME: make this more elegant
+ std::string TypeStr = flExtension(*I) + "-file";
+ pkgSourceList::Type *Type = pkgSourceList::Type::GetType(TypeStr.c_str());
+ if(Type != 0)
+ {
+ std::vector<metaIndex *> List;
+ std::map<std::string, std::string> Options;
+ if(Type->CreateItem(List, *I, "", "", Options))
+ {
+ // we have our own CacheFile that gives us a SourceList
+ // with superpowerz
+ SourceList *sources = (SourceList*)Cache.GetSourceList();
+ sources->AddMetaIndex(List[0]);
+ }
+ }
+ }
+ }
+
+ // then open the cache
if (Cache.OpenForInstall() == false ||
Cache.CheckDeps(CmdL.FileSize() != 1) == false)
return false;
-
+
std::map<unsigned short, APT::VersionSet> verset;
if(!DoCacheManipulationFromCommandLine(CmdL, Cache, verset, 0))
diff --git a/apt-private/private-install.h b/apt-private/private-install.h
index 8daa4a776..62276fbff 100644
--- a/apt-private/private-install.h
+++ b/apt-private/private-install.h
@@ -16,8 +16,6 @@ class CacheFile;
class CommandLine;
class pkgProblemResolver;
-#define RAMFS_MAGIC 0x858458f6
-
APT_PUBLIC bool DoInstall(CommandLine &Cmd);
bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache,
diff --git a/apt-private/private-list.cc b/apt-private/private-list.cc
index 536348640..aa3a2c24b 100644
--- a/apt-private/private-list.cc
+++ b/apt-private/private-list.cc
@@ -37,28 +37,20 @@ struct PackageSortAlphabetic /*{{{*/
return (l_name < r_name);
}
};
- /*}}}*/
-class PackageNameMatcher : public Matcher /*{{{*/
+
+class PackageNameMatcher : public Matcher
{
-#ifdef PACKAGE_MATCHER_ABI_COMPAT
-#define PackageMatcher PackageNameMatchesFnmatch
-#endif
public:
PackageNameMatcher(const char **patterns)
{
for(int i=0; patterns[i] != NULL; ++i)
{
std::string pattern = patterns[i];
-#ifdef PACKAGE_MATCHER_ABI_COMPAT
- APT::CacheFilter::PackageNameMatchesFnmatch *cachefilter = NULL;
- cachefilter = new APT::CacheFilter::PackageNameMatchesFnmatch(pattern);
-#else
APT::CacheFilter::PackageMatcher *cachefilter = NULL;
if(_config->FindB("APT::Cmd::Use-Regexp", false) == true)
cachefilter = new APT::CacheFilter::PackageNameMatchesRegEx(pattern);
else
cachefilter = new APT::CacheFilter::PackageNameMatchesFnmatch(pattern);
-#endif
filters.push_back(cachefilter);
}
}
diff --git a/apt-private/private-show.cc b/apt-private/private-show.cc
index 8ae6a6dac..289f035a6 100644
--- a/apt-private/private-show.cc
+++ b/apt-private/private-show.cc
@@ -141,16 +141,16 @@ bool ShowPackage(CommandLine &CmdL) /*{{{*/
{
pkgCacheFile CacheFile;
CacheSetHelperVirtuals helper(true, GlobalError::NOTICE);
- APT::VersionList::Version const select = _config->FindB("APT::Cache::AllVersions", false) ?
- APT::VersionList::ALL : APT::VersionList::CANDIDATE;
+ APT::CacheSetHelper::VerSelector const select = _config->FindB("APT::Cache::AllVersions", false) ?
+ APT::CacheSetHelper::ALL : APT::CacheSetHelper::CANDIDATE;
APT::VersionList const verset = APT::VersionList::FromCommandLine(CacheFile, CmdL.FileList + 1, select, helper);
for (APT::VersionList::const_iterator Ver = verset.begin(); Ver != verset.end(); ++Ver)
if (DisplayRecord(CacheFile, Ver, c1out) == false)
return false;
- if (select == APT::VersionList::CANDIDATE)
+ if (select == APT::CacheSetHelper::CANDIDATE)
{
- APT::VersionList const verset_all = APT::VersionList::FromCommandLine(CacheFile, CmdL.FileList + 1, APT::VersionList::ALL, helper);
+ APT::VersionList const verset_all = APT::VersionList::FromCommandLine(CacheFile, CmdL.FileList + 1, APT::CacheSetHelper::ALL, helper);
int const records = verset_all.size() - verset.size();
if (records > 0)
_error->Notice(P_("There is %i additional record. Please use the '-a' switch to see it", "There are %i additional records. Please use the '-a' switch to see them.", records), records);
diff --git a/apt-private/private-update.cc b/apt-private/private-update.cc
index 1cf3012ed..df77ac33a 100644
--- a/apt-private/private-update.cc
+++ b/apt-private/private-update.cc
@@ -47,9 +47,7 @@ bool DoUpdate(CommandLine &CmdL)
_config->CndSet("Acquire::ForceHash", "md5sum");
// get a fetcher
- pkgAcquire Fetcher;
- if (Fetcher.Setup(&Stat) == false)
- return false;
+ pkgAcquire Fetcher(&Stat);
// Populate it with the source selection and get all Indexes
// (GetAll=true)
diff --git a/apt-private/private-upgrade.cc b/apt-private/private-upgrade.cc
index 31f067576..d13a6af49 100644
--- a/apt-private/private-upgrade.cc
+++ b/apt-private/private-upgrade.cc
@@ -22,10 +22,8 @@ static bool UpgradeHelper(CommandLine &CmdL, int UpgradeFlags)
if (Cache.OpenForInstall() == false || Cache.CheckDeps() == false)
return false;
- c0out << _("Calculating upgrade... ") << std::flush;
if(!DoCacheManipulationFromCommandLine(CmdL, Cache, UpgradeFlags))
return false;
- c0out << _("Done") << std::endl;
return InstallPackages(Cache,true);
}
@@ -35,7 +33,7 @@ static bool UpgradeHelper(CommandLine &CmdL, int UpgradeFlags)
/* Intelligent upgrader that will install and remove packages at will */
bool DoDistUpgrade(CommandLine &CmdL)
{
- return UpgradeHelper(CmdL, 0);
+ return UpgradeHelper(CmdL, APT::Upgrade::ALLOW_EVERYTHING);
}
/*}}}*/
bool DoUpgrade(CommandLine &CmdL) /*{{{*/
diff --git a/buildlib/config.h.in b/buildlib/config.h.in
index 6b72fb393..c0fd2e8c6 100644
--- a/buildlib/config.h.in
+++ b/buildlib/config.h.in
@@ -28,6 +28,10 @@
/* If there is no socklen_t, define this for the netdb shim */
#undef NEED_SOCKLEN_T_DEFINE
+/* We need the getresuid() function */
+#undef HAVE_GETRESUID
+#undef HAVE_GETRESGID
+
/* Define to the size of the filesize containing structures */
#undef _FILE_OFFSET_BITS
diff --git a/cmdline/apt-cache.cc b/cmdline/apt-cache.cc
index ac0d48a36..0f4f7e1ce 100644
--- a/cmdline/apt-cache.cc
+++ b/cmdline/apt-cache.cc
@@ -191,7 +191,7 @@ static bool UnMet(CommandLine &CmdL)
{
CacheSetHelperVirtuals helper(true, GlobalError::NOTICE);
APT::VersionList verset = APT::VersionList::FromCommandLine(CacheFile, CmdL.FileList + 1,
- APT::VersionList::CANDIDATE, helper);
+ APT::CacheSetHelper::CANDIDATE, helper);
for (APT::VersionList::iterator V = verset.begin(); V != verset.end(); ++V)
if (ShowUnMet(V, Important) == false)
return false;
@@ -264,6 +264,46 @@ static bool DumpPackage(CommandLine &CmdL)
return true;
}
/*}}}*/
+// ShowHashTableStats - Show stats about a hashtable /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+template<class T>
+static void ShowHashTableStats(std::string Type,
+ T *StartP,
+ map_pointer_t *Hashtable,
+ unsigned long Size)
+{
+ // hashtable stats for the HashTable
+ unsigned long NumBuckets = Size;
+ unsigned long UsedBuckets = 0;
+ unsigned long UnusedBuckets = 0;
+ unsigned long LongestBucket = 0;
+ unsigned long ShortestBucket = NumBuckets;
+ unsigned long Entries = 0;
+ for (unsigned int i=0; i < NumBuckets; ++i)
+ {
+ T *P = StartP + Hashtable[i];
+ if(P == 0 || P == StartP)
+ {
+ ++UnusedBuckets;
+ continue;
+ }
+ ++UsedBuckets;
+ unsigned long ThisBucketSize = 0;
+ for (; P != StartP; P = StartP + P->Next)
+ ++ThisBucketSize;
+ Entries += ThisBucketSize;
+ LongestBucket = std::max(ThisBucketSize, LongestBucket);
+ ShortestBucket = std::min(ThisBucketSize, ShortestBucket);
+ }
+ cout << "Total buckets in " << Type << ": " << NumBuckets << std::endl;
+ cout << " Unused: " << UnusedBuckets << std::endl;
+ cout << " Used: " << UsedBuckets << std::endl;
+ cout << " Average entries: " << Entries/(double)NumBuckets << std::endl;
+ cout << " Longest: " << LongestBucket << std::endl;
+ cout << " Shortest: " << ShortestBucket << std::endl;
+}
+ /*}}}*/
// Stats - Dump some nice statistics /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -335,45 +375,84 @@ static bool Stats(CommandLine &)
SizeToStr(Cache->Head().DescFileCount*Cache->Head().DescFileSz) << ')' << endl;
cout << _("Total Provides mappings: ") << Cache->Head().ProvidesCount << " (" <<
SizeToStr(Cache->Head().ProvidesCount*Cache->Head().ProvidesSz) << ')' << endl;
-
- // String list stats
- unsigned long Size = 0;
- unsigned long Count = 0;
- for (pkgCache::StringItem *I = Cache->StringItemP + Cache->Head().StringList;
- I!= Cache->StringItemP; I = Cache->StringItemP + I->NextItem)
- {
- Count++;
- Size += strlen(Cache->StrP + I->String) + 1;
- }
- cout << _("Total globbed strings: ") << Count << " (" << SizeToStr(Size) << ')' << endl;
- unsigned long DepVerSize = 0;
+ // String list stats
+ std::set<map_stringitem_t> stritems;
+ for (pkgCache::GrpIterator G = Cache->GrpBegin(); G.end() == false; ++G)
+ stritems.insert(G->Name);
for (pkgCache::PkgIterator P = Cache->PkgBegin(); P.end() == false; ++P)
{
+ stritems.insert(P->Arch);
for (pkgCache::VerIterator V = P.VersionList(); V.end() == false; ++V)
{
+ if (V->VerStr != 0)
+ stritems.insert(V->VerStr);
+ if (V->Section != 0)
+ stritems.insert(V->Section);
+ stritems.insert(V->SourcePkgName);
+ stritems.insert(V->SourceVerStr);
for (pkgCache::DepIterator D = V.DependsList(); D.end() == false; ++D)
{
if (D->Version != 0)
- DepVerSize += strlen(D.TargetVer()) + 1;
+ stritems.insert(D->Version);
}
+ for (pkgCache::DescIterator D = V.DescriptionList(); D.end() == false; ++D)
+ {
+ stritems.insert(D->md5sum);
+ stritems.insert(D->language_code);
+ }
+ }
+ for (pkgCache::PrvIterator Prv = P.ProvidesList(); Prv.end() == false; ++Prv)
+ {
+ if (Prv->ProvideVersion != 0)
+ stritems.insert(Prv->ProvideVersion);
}
}
- cout << _("Total dependency version space: ") << SizeToStr(DepVerSize) << endl;
-
+ for (pkgCache::PkgFileIterator F = Cache->FileBegin(); F != Cache->FileEnd(); ++F)
+ {
+ stritems.insert(F->FileName);
+ stritems.insert(F->Archive);
+ stritems.insert(F->Codename);
+ stritems.insert(F->Component);
+ stritems.insert(F->Version);
+ stritems.insert(F->Origin);
+ stritems.insert(F->Label);
+ stritems.insert(F->Architecture);
+ stritems.insert(F->Site);
+ stritems.insert(F->IndexType);
+ }
+ unsigned long Size = 0;
+ for (std::set<map_stringitem_t>::const_iterator i = stritems.begin(); i != stritems.end(); ++i)
+ Size += strlen(Cache->StrP + *i) + 1;
+
+ cout << _("Total globbed strings: ") << stritems.size() << " (" << SizeToStr(Size) << ')' << endl;
+ stritems.clear();
+
unsigned long Slack = 0;
for (int I = 0; I != 7; I++)
Slack += Cache->Head().Pools[I].ItemSize*Cache->Head().Pools[I].Count;
cout << _("Total slack space: ") << SizeToStr(Slack) << endl;
-
+
unsigned long Total = 0;
- Total = Slack + Size + Cache->Head().DependsCount*Cache->Head().DependencySz +
- Cache->Head().VersionCount*Cache->Head().VersionSz +
- Cache->Head().PackageCount*Cache->Head().PackageSz +
- Cache->Head().VerFileCount*Cache->Head().VerFileSz +
- Cache->Head().ProvidesCount*Cache->Head().ProvidesSz;
+#define APT_CACHESIZE(X,Y) (Cache->Head().X * Cache->Head().Y)
+ Total = Slack + Size +
+ APT_CACHESIZE(GroupCount, GroupSz) +
+ APT_CACHESIZE(PackageCount, PackageSz) +
+ APT_CACHESIZE(VersionCount, VersionSz) +
+ APT_CACHESIZE(DescriptionCount, DescriptionSz) +
+ APT_CACHESIZE(DependsCount, DependencySz) +
+ APT_CACHESIZE(PackageFileCount, PackageFileSz) +
+ APT_CACHESIZE(VerFileCount, VerFileSz) +
+ APT_CACHESIZE(DescFileCount, DescFileSz) +
+ APT_CACHESIZE(ProvidesCount, ProvidesSz) +
+ (2 * Cache->Head().HashTableSize * sizeof(map_id_t));
cout << _("Total space accounted for: ") << SizeToStr(Total) << endl;
-
+#undef APT_CACHESIZE
+
+ // hashtable stats
+ ShowHashTableStats<pkgCache::Package>("PkgHashTable", Cache->PkgP, Cache->Head().PkgHashTable(), Cache->Head().HashTableSize);
+ ShowHashTableStats<pkgCache::Group>("GrpHashTable", Cache->GrpP, Cache->Head().GrpHashTable(), Cache->Head().HashTableSize);
+
return true;
}
/*}}}*/
@@ -579,7 +658,7 @@ static bool ShowDepends(CommandLine &CmdL, bool const RevDepends)
return false;
CacheSetHelperVirtuals helper(false);
- APT::VersionList verset = APT::VersionList::FromCommandLine(CacheFile, CmdL.FileList + 1, APT::VersionList::CANDIDATE, helper);
+ APT::VersionList verset = APT::VersionList::FromCommandLine(CacheFile, CmdL.FileList + 1, APT::CacheSetHelper::CANDIDATE, helper);
if (verset.empty() == true && helper.virtualPkgs.empty() == true)
return _error->Error(_("No packages found"));
std::vector<bool> Shown(Cache->Head().PackageCount);
@@ -647,7 +726,7 @@ static bool ShowDepends(CommandLine &CmdL, bool const RevDepends)
if (Recurse == true && Shown[Trg->ID] == false)
{
Shown[Trg->ID] = true;
- verset.insert(APT::VersionSet::FromPackage(CacheFile, Trg, APT::VersionSet::CANDIDATE, helper));
+ verset.insert(APT::VersionSet::FromPackage(CacheFile, Trg, APT::CacheSetHelper::CANDIDATE, helper));
}
}
@@ -666,7 +745,7 @@ static bool ShowDepends(CommandLine &CmdL, bool const RevDepends)
if (Recurse == true && Shown[V.ParentPkg()->ID] == false)
{
Shown[V.ParentPkg()->ID] = true;
- verset.insert(APT::VersionSet::FromPackage(CacheFile, V.ParentPkg(), APT::VersionSet::CANDIDATE, helper));
+ verset.insert(APT::VersionSet::FromPackage(CacheFile, V.ParentPkg(), APT::CacheSetHelper::CANDIDATE, helper));
}
}
@@ -761,9 +840,9 @@ static bool XVcg(CommandLine &CmdL)
// Load the list of packages from the command line into the show list
APT::CacheSetHelper helper(true, GlobalError::NOTICE);
- std::list<APT::PackageSet::Modifier> mods;
- mods.push_back(APT::PackageSet::Modifier(0, ",", APT::PackageSet::Modifier::POSTFIX));
- mods.push_back(APT::PackageSet::Modifier(1, "^", APT::PackageSet::Modifier::POSTFIX));
+ std::list<APT::CacheSetHelper::PkgModifier> mods;
+ mods.push_back(APT::CacheSetHelper::PkgModifier(0, ",", APT::PackageSet::Modifier::POSTFIX));
+ mods.push_back(APT::CacheSetHelper::PkgModifier(1, "^", APT::PackageSet::Modifier::POSTFIX));
std::map<unsigned short, APT::PackageSet> pkgsets =
APT::PackageSet::GroupedFromCommandLine(CacheFile, CmdL.FileList + 1, mods, 0, helper);
@@ -973,9 +1052,9 @@ static bool Dotty(CommandLine &CmdL)
// Load the list of packages from the command line into the show list
APT::CacheSetHelper helper(true, GlobalError::NOTICE);
- std::list<APT::PackageSet::Modifier> mods;
- mods.push_back(APT::PackageSet::Modifier(0, ",", APT::PackageSet::Modifier::POSTFIX));
- mods.push_back(APT::PackageSet::Modifier(1, "^", APT::PackageSet::Modifier::POSTFIX));
+ std::list<APT::CacheSetHelper::PkgModifier> mods;
+ mods.push_back(APT::CacheSetHelper::PkgModifier(0, ",", APT::PackageSet::Modifier::POSTFIX));
+ mods.push_back(APT::CacheSetHelper::PkgModifier(1, "^", APT::PackageSet::Modifier::POSTFIX));
std::map<unsigned short, APT::PackageSet> pkgsets =
APT::PackageSet::GroupedFromCommandLine(CacheFile, CmdL.FileList + 1, mods, 0, helper);
@@ -1231,7 +1310,7 @@ static bool DisplayRecord(pkgCacheFile &CacheFile, pkgCache::VerIterator V)
struct ExDescFile
{
pkgCache::DescFile *Df;
- map_ptrloc ID;
+ map_id_t ID;
};
// Search - Perform a search /*{{{*/
@@ -1429,8 +1508,8 @@ static bool ShowPackage(CommandLine &CmdL)
{
pkgCacheFile CacheFile;
CacheSetHelperVirtuals helper(true, GlobalError::NOTICE);
- APT::VersionList::Version const select = _config->FindB("APT::Cache::AllVersions", true) ?
- APT::VersionList::ALL : APT::VersionList::CANDIDATE;
+ APT::CacheSetHelper::VerSelector const select = _config->FindB("APT::Cache::AllVersions", true) ?
+ APT::CacheSetHelper::ALL : APT::CacheSetHelper::CANDIDATE;
APT::VersionList const verset = APT::VersionList::FromCommandLine(CacheFile, CmdL.FileList + 1, select, helper);
for (APT::VersionList::const_iterator Ver = verset.begin(); Ver != verset.end(); ++Ver)
if (DisplayRecord(CacheFile, Ver) == false)
diff --git a/cmdline/apt-dump-solver.cc b/cmdline/apt-dump-solver.cc
index 04e13bde9..f765234c5 100644
--- a/cmdline/apt-dump-solver.cc
+++ b/cmdline/apt-dump-solver.cc
@@ -40,6 +40,8 @@ int main(int argc,const char *argv[]) /*{{{*/
ShowHelp();
return 0;
}
+ // we really don't need anything
+ DropPrivileges();
FILE* input = fdopen(STDIN_FILENO, "r");
FILE* output = fopen("/tmp/dump.edsp", "w");
diff --git a/cmdline/apt-extracttemplates.cc b/cmdline/apt-extracttemplates.cc
index e4428e051..f95b9e5ba 100644
--- a/cmdline/apt-extracttemplates.cc
+++ b/cmdline/apt-extracttemplates.cc
@@ -52,7 +52,7 @@ pkgCache *DebFile::Cache = 0;
// ---------------------------------------------------------------------
/* */
DebFile::DebFile(const char *debfile)
- : File(debfile, FileFd::ReadOnly), Size(0), Control(NULL), ControlLen(0),
+ : File(debfile, FileFd::ReadOnly), Control(NULL), ControlLen(0),
DepOp(0), PreDepOp(0), Config(0), Template(0), Which(None)
{
}
@@ -103,10 +103,12 @@ bool DebFile::DoItem(Item &I, int &Fd)
if (strcmp(I.Name, "control") == 0)
{
delete [] Control;
- Control = new char[I.Size+1];
- Control[I.Size] = 0;
+ Control = new char[I.Size+3];
+ Control[I.Size] = '\n';
+ Control[I.Size + 1] = '\n';
+ Control[I.Size + 2] = '\0';
Which = IsControl;
- ControlLen = I.Size;
+ ControlLen = I.Size + 3;
// make it call the Process method below. this is so evil
Fd = -2;
}
@@ -138,7 +140,7 @@ bool DebFile::DoItem(Item &I, int &Fd)
// ---------------------------------------------------------------------
/* */
bool DebFile::Process(Item &/*I*/, const unsigned char *data,
- unsigned long size, unsigned long pos)
+ unsigned long long size, unsigned long long pos)
{
switch (Which)
{
@@ -162,9 +164,10 @@ bool DebFile::Process(Item &/*I*/, const unsigned char *data,
bool DebFile::ParseInfo()
{
if (Control == NULL) return false;
-
+
pkgTagSection Section;
- Section.Scan(Control, ControlLen);
+ if (Section.Scan(Control, ControlLen) == false)
+ return false;
Package = Section.FindS("Package");
Version = GetInstalledVer(Package);
diff --git a/cmdline/apt-extracttemplates.h b/cmdline/apt-extracttemplates.h
index 9cc3f5f25..829cdae75 100644
--- a/cmdline/apt-extracttemplates.h
+++ b/cmdline/apt-extracttemplates.h
@@ -20,7 +20,6 @@ class pkgCache;
class DebFile : public pkgDirStream
{
FileFd File;
- unsigned long Size;
char *Control;
unsigned long ControlLen;
@@ -29,7 +28,7 @@ public:
~DebFile();
bool DoItem(Item &I, int &fd);
bool Process(pkgDirStream::Item &I, const unsigned char *data,
- unsigned long size, unsigned long pos);
+ unsigned long long size, unsigned long long pos);
bool Go();
bool ParseInfo();
diff --git a/cmdline/apt-get.cc b/cmdline/apt-get.cc
index cfa79339b..e176a3350 100644
--- a/cmdline/apt-get.cc
+++ b/cmdline/apt-get.cc
@@ -78,8 +78,6 @@
#include <string.h>
#include <sys/ioctl.h>
#include <sys/stat.h>
-#include <sys/statfs.h>
-#include <sys/statvfs.h>
#include <sys/wait.h>
#include <unistd.h>
#include <algorithm>
@@ -195,7 +193,7 @@ static std::string GetReleaseForSourceRecord(pkgSourceList *SrcList,
// FindSrc - Find a source record /*{{{*/
// ---------------------------------------------------------------------
/* */
-static pkgSrcRecords::Parser *FindSrc(const char *Name,pkgRecords &Recs,
+static pkgSrcRecords::Parser *FindSrc(const char *Name,
pkgSrcRecords &SrcRecs,string &Src,
CacheFile &CacheFile)
{
@@ -303,16 +301,10 @@ static pkgSrcRecords::Parser *FindSrc(const char *Name,pkgRecords &Recs,
(VF.File().Archive() != 0 && VF.File().Archive() == RelTag) ||
(VF.File().Codename() != 0 && VF.File().Codename() == RelTag))
{
- pkgRecords::Parser &Parse = Recs.Lookup(VF);
- Src = Parse.SourcePkg();
- // no SourcePkg name, so it is the "binary" name
- if (Src.empty() == true)
- Src = TmpSrc;
+ Src = Ver.SourcePkgName();
// the Version we have is possibly fuzzy or includes binUploads,
- // so we use the Version of the SourcePkg (empty if same as package)
- VerTag = Parse.SourceVer();
- if (VerTag.empty() == true)
- VerTag = Ver.VerStr();
+ // so we use the Version of the SourcePkg
+ VerTag = Ver.SourceVerStr();
break;
}
}
@@ -343,10 +335,10 @@ static pkgSrcRecords::Parser *FindSrc(const char *Name,pkgRecords &Recs,
pkgCache::VerIterator Ver = Cache->GetCandidateVer(Pkg);
if (Ver.end() == false)
{
- pkgRecords::Parser &Parse = Recs.Lookup(Ver.FileList());
- Src = Parse.SourcePkg();
- if (VerTag.empty() == true)
- VerTag = Parse.SourceVer();
+ if (strcmp(Ver.SourcePkgName(),Ver.ParentPkg().Name()) != 0)
+ Src = Ver.SourcePkgName();
+ if (VerTag.empty() == true && strcmp(Ver.SourceVerStr(),Ver.VerStr()) != 0)
+ VerTag = Ver.SourceVerStr();
}
}
}
@@ -540,7 +532,7 @@ static bool DoDSelectUpgrade(CommandLine &)
}
// Now upgrade everything
- if (pkgAllUpgrade(Cache) == false)
+ if (APT::Upgrade::Upgrade(Cache, APT::Upgrade::FORBID_REMOVE_PACKAGES | APT::Upgrade::FORBID_INSTALL_NEW_PACKAGES) == false)
{
ShowBroken(c1out,Cache,false);
return _error->Error(_("Internal error, problem resolver broke stuff"));
@@ -555,30 +547,44 @@ static bool DoDSelectUpgrade(CommandLine &)
static bool DoClean(CommandLine &)
{
std::string const archivedir = _config->FindDir("Dir::Cache::archives");
- std::string const pkgcache = _config->FindFile("Dir::cache::pkgcache");
- std::string const srcpkgcache = _config->FindFile("Dir::cache::srcpkgcache");
+ std::string const listsdir = _config->FindDir("Dir::state::lists");
if (_config->FindB("APT::Get::Simulate") == true)
{
+ std::string const pkgcache = _config->FindFile("Dir::cache::pkgcache");
+ std::string const srcpkgcache = _config->FindFile("Dir::cache::srcpkgcache");
cout << "Del " << archivedir << "* " << archivedir << "partial/*"<< endl
+ << "Del " << listsdir << "partial/*" << endl
<< "Del " << pkgcache << " " << srcpkgcache << endl;
return true;
}
-
+
+ bool const NoLocking = _config->FindB("Debug::NoLocking",false);
// Lock the archive directory
FileFd Lock;
- if (_config->FindB("Debug::NoLocking",false) == false)
+ if (NoLocking == false)
{
int lock_fd = GetLock(archivedir + "lock");
if (lock_fd < 0)
- return _error->Error(_("Unable to lock the download directory"));
+ return _error->Error(_("Unable to lock directory %s"), archivedir.c_str());
Lock.Fd(lock_fd);
}
-
+
pkgAcquire Fetcher;
Fetcher.Clean(archivedir);
Fetcher.Clean(archivedir + "partial/");
+ if (NoLocking == false)
+ {
+ Lock.Close();
+ int lock_fd = GetLock(listsdir + "lock");
+ if (lock_fd < 0)
+ return _error->Error(_("Unable to lock directory %s"), listsdir.c_str());
+ Lock.Fd(lock_fd);
+ }
+
+ Fetcher.Clean(listsdir + "partial/");
+
pkgCacheFile::RemoveCaches();
return true;
@@ -632,15 +638,13 @@ static bool DoDownload(CommandLine &CmdL)
APT::CacheSetHelper helper(c0out);
APT::VersionSet verset = APT::VersionSet::FromCommandLine(Cache,
- CmdL.FileList + 1, APT::VersionSet::CANDIDATE, helper);
+ CmdL.FileList + 1, APT::CacheSetHelper::CANDIDATE, helper);
if (verset.empty() == true)
return false;
AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet", 0));
- pkgAcquire Fetcher;
- if (Fetcher.Setup(&Stat) == false)
- return false;
+ pkgAcquire Fetcher(&Stat);
pkgRecords Recs(Cache);
pkgSourceList *SrcList = Cache.GetSourceList();
@@ -670,6 +674,9 @@ static bool DoDownload(CommandLine &CmdL)
return true;
}
+ // Disable drop-privs if "_apt" can not write to the target dir
+ CheckDropPrivsMustBeDisabled(Fetcher);
+
if (_error->PendingError() == true || CheckAuth(Fetcher, false) == false)
return false;
@@ -731,15 +738,13 @@ static bool DoSource(CommandLine &CmdL)
pkgSourceList *List = Cache.GetSourceList();
// Create the text record parsers
- pkgRecords Recs(Cache);
pkgSrcRecords SrcRecs(*List);
if (_error->PendingError() == true)
return false;
// Create the download object
- AcqTextStatus Stat(ScreenWidth,_config->FindI("quiet",0));
- pkgAcquire Fetcher;
- Fetcher.SetLog(&Stat);
+ AcqTextStatus Stat(ScreenWidth,_config->FindI("quiet",0));
+ pkgAcquire Fetcher(&Stat);
SPtrArray<DscFile> Dsc = new DscFile[CmdL.FileSize()];
@@ -760,7 +765,7 @@ static bool DoSource(CommandLine &CmdL)
for (const char **I = CmdL.FileList + 1; *I != 0; I++, J++)
{
string Src;
- pkgSrcRecords::Parser *Last = FindSrc(*I,Recs,SrcRecs,Src,Cache);
+ pkgSrcRecords::Parser *Last = FindSrc(*I,SrcRecs,Src,Cache);
if (Last == 0) {
return _error->Error(_("Unable to find a source package for %s"),Src.c_str());
@@ -832,58 +837,36 @@ static bool DoSource(CommandLine &CmdL)
queued.insert(Last->Index().ArchiveURI(I->Path));
// check if we have a file with that md5 sum already localy
- if(!I->MD5Hash.empty() && FileExists(flNotDir(I->Path)))
- {
- FileFd Fd(flNotDir(I->Path), FileFd::ReadOnly);
- MD5Summation sum;
- sum.AddFD(Fd.Fd(), Fd.Size());
- Fd.Close();
- if((string)sum.Result() == I->MD5Hash)
+ std::string localFile = flNotDir(I->Path);
+ if (FileExists(localFile) == true)
+ if(I->Hashes.VerifyFile(localFile) == true)
{
ioprintf(c1out,_("Skipping already downloaded file '%s'\n"),
- flNotDir(I->Path).c_str());
+ localFile.c_str());
continue;
}
+
+ // see if we have a hash (Acquire::ForceHash is the only way to have none)
+ if (I->Hashes.usable() == false && _config->FindB("APT::Get::AllowUnauthenticated",false) == false)
+ {
+ ioprintf(c1out, "Skipping download of file '%s' as requested hashsum is not available for authentication\n",
+ localFile.c_str());
+ continue;
}
new pkgAcqFile(&Fetcher,Last->Index().ArchiveURI(I->Path),
- I->MD5Hash,I->Size,
- Last->Index().SourceInfo(*Last,*I),Src);
+ I->Hashes, I->Size, Last->Index().SourceInfo(*Last,*I), Src);
}
}
- // check authentication status of the source as well
- if (UntrustedList != "" && !AuthPrompt(UntrustedList, false))
- return false;
-
// Display statistics
unsigned long long FetchBytes = Fetcher.FetchNeeded();
unsigned long long FetchPBytes = Fetcher.PartialPresent();
unsigned long long DebBytes = Fetcher.TotalNeeded();
- // Check for enough free space
- struct statvfs Buf;
- string OutputDir = ".";
- if (statvfs(OutputDir.c_str(),&Buf) != 0) {
- if (errno == EOVERFLOW)
- return _error->WarningE("statvfs",_("Couldn't determine free space in %s"),
- OutputDir.c_str());
- else
- return _error->Errno("statvfs",_("Couldn't determine free space in %s"),
- OutputDir.c_str());
- } else if (unsigned(Buf.f_bfree) < (FetchBytes - FetchPBytes)/Buf.f_bsize)
- {
- struct statfs Stat;
- if (statfs(OutputDir.c_str(),&Stat) != 0
-#if HAVE_STRUCT_STATFS_F_TYPE
- || unsigned(Stat.f_type) != RAMFS_MAGIC
-#endif
- ) {
- return _error->Error(_("You don't have enough free space in %s"),
- OutputDir.c_str());
- }
- }
-
+ if (CheckFreeSpaceBeforeDownload(".", (FetchBytes - FetchPBytes)) == false)
+ return false;
+
// Number of bytes
if (DebBytes != FetchBytes)
//TRANSLATOR: The required space between number and unit is already included
@@ -902,7 +885,7 @@ static bool DoSource(CommandLine &CmdL)
ioprintf(cout,_("Fetch source %s\n"),Dsc[I].Package.c_str());
return true;
}
-
+
// Just print out the uris an exit if the --print-uris flag was used
if (_config->FindB("APT::Get::Print-URIs") == true)
{
@@ -913,6 +896,13 @@ static bool DoSource(CommandLine &CmdL)
return true;
}
+ // Disable drop-privs if "_apt" can not write to the target dir
+ CheckDropPrivsMustBeDisabled(Fetcher);
+
+ // check authentication status of the source as well
+ if (UntrustedList != "" && !AuthPrompt(UntrustedList, false))
+ return false;
+
// Run it
bool Failed = false;
if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true)
@@ -1035,17 +1025,10 @@ static bool DoBuildDep(CommandLine &CmdL)
pkgSourceList *List = Cache.GetSourceList();
// Create the text record parsers
- pkgRecords Recs(Cache);
pkgSrcRecords SrcRecs(*List);
if (_error->PendingError() == true)
return false;
- // Create the download object
- AcqTextStatus Stat(ScreenWidth,_config->FindI("quiet",0));
- pkgAcquire Fetcher;
- if (Fetcher.Setup(&Stat) == false)
- return false;
-
bool StripMultiArch;
string hostArch = _config->Find("APT::Get::Host-Architecture");
if (hostArch.empty() == false)
@@ -1062,7 +1045,35 @@ static bool DoBuildDep(CommandLine &CmdL)
for (const char **I = CmdL.FileList + 1; *I != 0; I++, J++)
{
string Src;
- pkgSrcRecords::Parser *Last = FindSrc(*I,Recs,SrcRecs,Src,Cache);
+ pkgSrcRecords::Parser *Last = 0;
+
+ // an unpacked debian source tree
+ using APT::String::Startswith;
+ if ((Startswith(*I, "./") || Startswith(*I, "/")) &&
+ DirectoryExists(*I))
+ {
+ ioprintf(c1out, _("Note, using directory '%s' to get the build dependencies\n"), *I);
+ // FIXME: how can we make this more elegant?
+ std::string TypeName = "debian/control File Source Index";
+ pkgIndexFile::Type *Type = pkgIndexFile::Type::GetType(TypeName.c_str());
+ if(Type != NULL)
+ Last = Type->CreateSrcPkgParser(*I);
+ }
+ // if its a local file (e.g. .dsc) use this
+ else if (FileExists(*I))
+ {
+ ioprintf(c1out, _("Note, using file '%s' to get the build dependencies\n"), *I);
+
+ // see if we can get a parser for this pkgIndexFile type
+ string TypeName = flExtension(*I) + " File Source Index";
+ pkgIndexFile::Type *Type = pkgIndexFile::Type::GetType(TypeName.c_str());
+ if(Type != NULL)
+ Last = Type->CreateSrcPkgParser(*I);
+ } else {
+ // normal case, search the cache for the source file
+ Last = FindSrc(*I,SrcRecs,Src,Cache);
+ }
+
if (Last == 0)
return _error->Error(_("Unable to find a source package for %s"),Src.c_str());
@@ -1080,7 +1091,7 @@ static bool DoBuildDep(CommandLine &CmdL)
}
else if (Last->BuildDepends(BuildDeps, _config->FindB("APT::Get::Arch-Only", false), StripMultiArch) == false)
return _error->Error(_("Unable to get build-dependency information for %s"),Src.c_str());
-
+
// Also ensure that build-essential packages are present
Configuration::Item const *Opts = _config->Tree("APT::Build-Essential");
if (Opts)
@@ -1411,21 +1422,15 @@ static bool DoBuildDep(CommandLine &CmdL)
* pool/ next to the deb itself)
* Example return: "pool/main/a/apt/apt_0.8.8ubuntu3"
*/
-static string GetChangelogPath(CacheFile &Cache,
- pkgCache::PkgIterator Pkg,
+static string GetChangelogPath(CacheFile &Cache,
pkgCache::VerIterator Ver)
{
- string path;
-
pkgRecords Recs(Cache);
pkgRecords::Parser &rec=Recs.Lookup(Ver.FileList());
- string srcpkg = rec.SourcePkg().empty() ? Pkg.Name() : rec.SourcePkg();
- string ver = Ver.VerStr();
- // if there is a source version it always wins
- if (rec.SourceVer() != "")
- ver = rec.SourceVer();
- path = flNotFile(rec.FileName());
- path += srcpkg + "_" + StripEpoch(ver);
+ string path = flNotFile(rec.FileName());
+ path.append(Ver.SourcePkgName());
+ path.append("_");
+ path.append(StripEpoch(Ver.SourceVerStr()));
return path;
}
/*}}}*/
@@ -1439,7 +1444,6 @@ static string GetChangelogPath(CacheFile &Cache,
* http://packages.medibuntu.org/pool/non-free/m/mplayer/mplayer_1.0~rc4~try1.dsfg1-1ubuntu1+medibuntu1.changelog
*/
static bool GuessThirdPartyChangelogUri(CacheFile &Cache,
- pkgCache::PkgIterator Pkg,
pkgCache::VerIterator Ver,
string &out_uri)
{
@@ -1454,7 +1458,7 @@ static bool GuessThirdPartyChangelogUri(CacheFile &Cache,
return false;
// get archive uri for the binary deb
- string path_without_dot_changelog = GetChangelogPath(Cache, Pkg, Ver);
+ string path_without_dot_changelog = GetChangelogPath(Cache, Ver);
out_uri = index->ArchiveURI(path_without_dot_changelog + ".changelog");
// now strip away the filename and add srcpkg_srcver.changelog
@@ -1472,36 +1476,34 @@ static bool DownloadChangelog(CacheFile &CacheFile, pkgAcquire &Fetcher,
* GuessThirdPartyChangelogUri for details how)
*/
{
- string path;
- string descr;
- string server;
- string changelog_uri;
-
- // data structures we need
- pkgCache::PkgIterator Pkg = Ver.ParentPkg();
-
// make the server root configurable
- server = _config->Find("Apt::Changelogs::Server",
+ string const server = _config->Find("Apt::Changelogs::Server",
"http://packages.debian.org/changelogs");
- path = GetChangelogPath(CacheFile, Pkg, Ver);
+ string const path = GetChangelogPath(CacheFile, Ver);
+ string changelog_uri;
strprintf(changelog_uri, "%s/%s/changelog", server.c_str(), path.c_str());
if (_config->FindB("APT::Get::Print-URIs", false) == true)
{
std::cout << '\'' << changelog_uri << '\'' << std::endl;
return true;
}
+ pkgCache::PkgIterator const Pkg = Ver.ParentPkg();
+ string descr;
strprintf(descr, _("Changelog for %s (%s)"), Pkg.Name(), changelog_uri.c_str());
// queue it
new pkgAcqFile(&Fetcher, changelog_uri, "", 0, descr, Pkg.Name(), "ignored", targetfile);
+ // Disable drop-privs if "_apt" can not write to the target dir
+ CheckDropPrivsMustBeDisabled(Fetcher);
+
// try downloading it, if that fails, try third-party-changelogs location
// FIXME: Fetcher.Run() is "Continue" even if I get a 404?!?
Fetcher.Run();
if (!FileExists(targetfile))
{
string third_party_uri;
- if (GuessThirdPartyChangelogUri(CacheFile, Pkg, Ver, third_party_uri))
+ if (GuessThirdPartyChangelogUri(CacheFile, Ver, third_party_uri))
{
strprintf(descr, _("Changelog for %s (%s)"), Pkg.Name(), third_party_uri.c_str());
new pkgAcqFile(&Fetcher, third_party_uri, "", 0, descr, Pkg.Name(), "ignored", targetfile);
@@ -1526,7 +1528,7 @@ static bool DoChangelog(CommandLine &CmdL)
APT::CacheSetHelper helper(c0out);
APT::VersionList verset = APT::VersionList::FromCommandLine(Cache,
- CmdL.FileList + 1, APT::VersionList::CANDIDATE, helper);
+ CmdL.FileList + 1, APT::CacheSetHelper::CANDIDATE, helper);
if (verset.empty() == true)
return false;
pkgAcquire Fetcher;
@@ -1541,7 +1543,7 @@ static bool DoChangelog(CommandLine &CmdL)
}
AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0));
- Fetcher.Setup(&Stat);
+ Fetcher.SetLog(&Stat);
bool const downOnly = _config->FindB("APT::Get::Download-Only", false);
diff --git a/cmdline/apt-helper.cc b/cmdline/apt-helper.cc
index dd43ea1bc..27abb2013 100644
--- a/cmdline/apt-helper.cc
+++ b/cmdline/apt-helper.cc
@@ -48,10 +48,9 @@ static bool DoDownloadFile(CommandLine &CmdL)
if (CmdL.FileSize() <= 2)
return _error->Error(_("Must specify at least one pair url/filename"));
-
- pkgAcquire Fetcher;
AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0));
- Fetcher.Setup(&Stat);
+ pkgAcquire Fetcher(&Stat);
+
std::string download_uri = CmdL.FileList[1];
std::string targetfile = CmdL.FileList[2];
std::string hash;
@@ -60,7 +59,10 @@ static bool DoDownloadFile(CommandLine &CmdL)
// we use download_uri as descr and targetfile as short-descr
new pkgAcqFile(&Fetcher, download_uri, hash, 0, download_uri, targetfile,
"dest-dir-ignored", targetfile);
- Fetcher.Run();
+
+ // Disable drop-privs if "_apt" can not write to the target dir
+ CheckDropPrivsMustBeDisabled(Fetcher);
+
bool Failed = false;
if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true ||
FileExists(targetfile) == false)
diff --git a/cmdline/apt-internal-solver.cc b/cmdline/apt-internal-solver.cc
index 5fda7b6a0..92a4429e5 100644
--- a/cmdline/apt-internal-solver.cc
+++ b/cmdline/apt-internal-solver.cc
@@ -76,6 +76,9 @@ int main(int argc,const char *argv[]) /*{{{*/
{'o',"option",0,CommandLine::ArbItem},
{0,0,0,0}};
+ // we really don't need anything
+ DropPrivileges();
+
CommandLine CmdL(Args,_config);
if (pkgInitConfig(*_config) == false ||
CmdL.Parse(argc,argv) == false) {
@@ -172,10 +175,10 @@ int main(int argc,const char *argv[]) /*{{{*/
std::string failure;
if (upgrade == true) {
- if (pkgAllUpgrade(CacheFile) == false)
+ if (APT::Upgrade::Upgrade(CacheFile, APT::Upgrade::FORBID_REMOVE_PACKAGES | APT::Upgrade::FORBID_INSTALL_NEW_PACKAGES) == false)
failure = "ERR_UNSOLVABLE_UPGRADE";
} else if (distUpgrade == true) {
- if (pkgDistUpgrade(CacheFile) == false)
+ if (APT::Upgrade::Upgrade(CacheFile, APT::Upgrade::ALLOW_EVERYTHING) == false)
failure = "ERR_UNSOLVABLE_DIST_UPGRADE";
} else if (Fix.Resolve() == false)
failure = "ERR_UNSOLVABLE";
diff --git a/cmdline/apt-key.in b/cmdline/apt-key.in
index 0774cf4b7..7a3852ee8 100644
--- a/cmdline/apt-key.in
+++ b/cmdline/apt-key.in
@@ -3,29 +3,6 @@
set -e
unset GREP_OPTIONS
-GPG_CMD="gpg --ignore-time-conflict --no-options --no-default-keyring"
-
-# gpg needs (in different versions more or less) files to function correctly,
-# so we give it its own homedir and generate some valid content for it
-GPGHOMEDIR="$(mktemp -d)"
-CURRENTTRAP="${CURRENTTRAP} rm -rf '${GPGHOMEDIR}';"
-trap "${CURRENTTRAP}" 0 HUP INT QUIT ILL ABRT FPE SEGV PIPE TERM
-chmod 700 "$GPGHOMEDIR"
-# We don't use a secret keyring, of course, but gpg panics and
-# implodes if there isn't one available - and writeable for imports
-SECRETKEYRING="${GPGHOMEDIR}/secring.gpg"
-touch $SECRETKEYRING
-GPG_CMD="$GPG_CMD --homedir $GPGHOMEDIR"
-# create the trustdb with an (empty) dummy keyring
-# older gpgs required it, newer gpgs even warn that it isn't needed,
-# but require it nonetheless for some commands, so we just play safe
-# here for the foreseeable future and create a dummy one
-$GPG_CMD --quiet --check-trustdb --keyring $SECRETKEYRING >/dev/null 2>&1
-# tell gpg that it shouldn't try to maintain a trustdb file
-GPG_CMD="$GPG_CMD --no-auto-check-trustdb --trust-model always"
-
-GPG="$GPG_CMD"
-
APT_DIR="/"
eval $(apt-config shell APT_DIR Dir)
@@ -37,22 +14,26 @@ REMOVED_KEYS='&keyring-removed-filename;'
eval $(apt-config shell REMOVED_KEYS APT::Key::RemovedKeys)
ARCHIVE_KEYRING_URI='&keyring-uri;'
eval $(apt-config shell ARCHIVE_KEYRING_URI APT::Key::ArchiveKeyringURI)
-TMP_KEYRING=${APT_DIR}/var/lib/apt/keyrings/maybe-import-keyring.gpg
+
+aptkey_echo() { echo "$@"; }
requires_root() {
if [ "$(id -u)" -ne 0 ]; then
- echo >&1 "ERROR: This command can only be used by root."
+ echo >&2 "ERROR: This command can only be used by root."
exit 1
fi
}
-# gpg defaults to mode 0600 for new keyrings. Create one with 0644 instead.
-init_keyring() {
- for path; do
- if ! [ -e "$path" ]; then
- touch -- "$path"
- chmod 0644 -- "$path"
- fi
+get_fingerprints_of_keyring() {
+ $GPG_CMD --keyring "$1" --with-colons --fingerprint | while read publine; do
+ # search for a public key
+ if [ "${publine%%:*}" != 'pub' ]; then continue; fi
+ # search for the associated fingerprint (should be the very next line)
+ while read fprline; do
+ if [ "${fprline%%:*}" = 'sub' ]; then break; # should never happen
+ elif [ "${fprline%%:*}" != 'fpr' ]; then continue; fi
+ echo "$fprline" | cut -d':' -f 10
+ done
done
}
@@ -61,11 +42,11 @@ add_keys_with_verify_against_master_keyring() {
MASTER=$2
if [ ! -f "$ADD_KEYRING" ]; then
- echo "ERROR: '$ADD_KEYRING' not found"
+ echo >&2 "ERROR: '$ADD_KEYRING' not found"
return
- fi
+ fi
if [ ! -f "$MASTER" ]; then
- echo "ERROR: '$MASTER' not found"
+ echo >&2 "ERROR: '$MASTER' not found"
return
fi
@@ -73,7 +54,7 @@ add_keys_with_verify_against_master_keyring() {
# is honored. so:
# all keys that are exported must have a valid signature
# from a key in the $distro-master-keyring
- add_keys=`$GPG_CMD --keyring $ADD_KEYRING --with-colons --list-keys | grep ^pub | cut -d: -f5`
+ add_keys="$(get_fingerprints_of_keyring "$ADD_KEYRING")"
all_add_keys=`$GPG_CMD --keyring $ADD_KEYRING --with-colons --list-keys | grep ^[ps]ub | cut -d: -f5`
master_keys=`$GPG_CMD --keyring $MASTER --with-colons --list-keys | grep ^pub | cut -d: -f5`
@@ -86,24 +67,28 @@ add_keys_with_verify_against_master_keyring() {
fi
done
done
-
+
for add_key in $add_keys; do
# export the add keyring one-by-one
- rm -f $TMP_KEYRING
- $GPG_CMD --keyring $ADD_KEYRING --output $TMP_KEYRING --export $add_key
- # check if signed with the master key and only add in this case
- ADDED=0
+ local TMP_KEYRING="${GPGHOMEDIR}/tmp-keyring.gpg"
+ $GPG_CMD --batch --yes --keyring "$ADD_KEYRING" --output "$TMP_KEYRING" --export "$add_key"
+ if ! $GPG_CMD --batch --yes --keyring "$TMP_KEYRING" --import "$MASTER" > "${GPGHOMEDIR}/gpgoutput.log" 2>&1; then
+ cat "${GPGHOMEDIR}/gpgoutput.log"
+ false
+ fi
+ # check if signed with the master key and only add in this case
+ ADDED=0
for master_key in $master_keys; do
- if $GPG_CMD --keyring $MASTER --keyring $TMP_KEYRING --check-sigs --with-colons $add_key | grep '^sig:!:' | cut -d: -f5 | grep -q $master_key; then
- $GPG --import $TMP_KEYRING
+ if $GPG_CMD --keyring $TMP_KEYRING --check-sigs --with-colons $add_key | grep '^sig:!:' | cut -d: -f5 | grep -q $master_key; then
+ $GPG_CMD --batch --yes --keyring "$ADD_KEYRING" --export "$add_key" | $GPG --batch --yes --import
ADDED=1
fi
done
if [ $ADDED = 0 ]; then
echo >&2 "Key '$add_key' not added. It is not signed with a master key"
fi
+ rm -f "${TMP_KEYRING}"
done
- rm -f $TMP_KEYRING
}
# update the current archive signing keyring from a network URI
@@ -121,7 +106,6 @@ net_update() {
echo >&2 "ERROR: Your distribution is not supported in net-update as no uri for the archive-keyring is set"
exit 1
fi
- requires_root
# in theory we would need to depend on wget for this, but this feature
# isn't useable in debian anyway as we have no keyring uri nor a master key
if ! which wget >/dev/null 2>&1; then
@@ -142,7 +126,7 @@ net_update() {
fi
new_mtime=$(stat -c %Y $keyring)
if [ $new_mtime -ne $old_mtime ]; then
- echo "Checking for new archive signing keys now"
+ aptkey_echo "Checking for new archive signing keys now"
add_keys_with_verify_against_master_keyring $keyring $MASTER_KEYRING
fi
}
@@ -153,7 +137,6 @@ update() {
echo >&2 "Is the &keyring-package; package installed?"
exit 1
fi
- requires_root
# add new keys from the package;
@@ -166,71 +149,159 @@ update() {
if [ -r "$REMOVED_KEYS" ]; then
# remove no-longer supported/used keys
- keys=`$GPG_CMD --keyring $REMOVED_KEYS --with-colons --list-keys | grep ^pub | cut -d: -f5`
- for key in $keys; do
- if $GPG --list-keys --with-colons | grep ^pub | cut -d: -f5 | grep -q $key; then
- $GPG --quiet --batch --delete-key --yes ${key}
- fi
+ get_fingerprints_of_keyring "$REMOVED_KEYS" | while read key; do
+ foreach_keyring_do 'remove_key_from_keyring' "$key"
done
else
- echo "Warning: removed keys keyring $REMOVED_KEYS missing or not readable" >&2
+ echo >&2 "Warning: removed keys keyring $REMOVED_KEYS missing or not readable"
fi
}
remove_key_from_keyring() {
- local GPG="$GPG_CMD --keyring $1"
- # check if the key is in this keyring: the key id is in the 5 column at the end
- if ! $GPG --with-colons --list-keys 2>&1 | grep -q "^pub:[^:]*:[^:]*:[^:]*:[0-9A-F]\+$2:"; then
- return
- fi
- if [ ! -w "$1" ]; then
- echo >&2 "Key ${2} is in keyring ${1}, but can't be removed as it is read only."
- return
+ local KEYRINGFILE="$1"
+ shift
+ # non-existent keyrings have by definition no keys
+ if [ ! -e "$KEYRINGFILE" ]; then
+ return
fi
- # check if it is the only key in the keyring and if so remove the keyring altogether
- if [ '1' = "$($GPG --with-colons --list-keys | grep "^pub:[^:]*:[^:]*:[^:]*:[0-9A-F]\+:" | wc -l)" ]; then
- mv -f "$1" "${1}~" # behave like gpg
- return
- fi
- # we can't just modify pointed to files as these might be in /usr or something
- local REALTARGET
- if [ -L "$1" ]; then
- REALTARGET="$(readlink -f "$1")"
- mv -f "$1" "${1}.dpkg-tmp"
- cp -a "$REALTARGET" "$1"
- ls "$(dirname $1)"
- fi
- # delete the key from the keyring
- $GPG --batch --delete-key --yes "$2"
- if [ -n "$REALTARGET" ]; then
- # the real backup is the old link, not the copy we made
- mv -f "${1}.dpkg-tmp" "${1}~"
- fi
-}
-remove_key() {
- requires_root
+ local GPG="$GPG_CMD --keyring $KEYRINGFILE"
+ for KEY in "$@"; do
+ # check if the key is in this keyring: the key id is in the 5 column at the end
+ if ! get_fingerprints_of_keyring "$KEYRINGFILE" | grep -q "^[0-9A-F]*${KEY}$"; then
+ continue
+ fi
+ if [ ! -w "$KEYRINGFILE" ]; then
+ echo >&2 "Key ${KEY} is in keyring ${KEYRINGFILE}, but can't be removed as it is read only."
+ continue
+ fi
+ # check if it is the only key in the keyring and if so remove the keyring altogether
+ if [ '1' = "$(get_fingerprints_of_keyring "$KEYRINGFILE" | wc -l)" ]; then
+ mv -f "$KEYRINGFILE" "${KEYRINGFILE}~" # behave like gpg
+ return
+ fi
+ # we can't just modify pointed to files as these might be in /usr or something
+ local REALTARGET
+ if [ -L "$KEYRINGFILE" ]; then
+ REALTARGET="$(readlink -f "$KEYRINGFILE")"
+ mv -f "$KEYRINGFILE" "${KEYRINGFILE}.dpkg-tmp"
+ cp -a "$REALTARGET" "$KEYRINGFILE"
+ fi
+ # delete the key from the keyring
+ $GPG --batch --delete-key --yes "$KEY"
+ if [ -n "$REALTARGET" ]; then
+ # the real backup is the old link, not the copy we made
+ mv -f "${KEYRINGFILE}.dpkg-tmp" "${KEYRINGFILE}~"
+ fi
+ done
+}
- # if a --keyring was given, just remove from there
- if [ -n "$FORCED_KEYRING" ]; then
- remove_key_from_keyring "$FORCED_KEYRING" "$1"
- else
+foreach_keyring_do() {
+ local ACTION="$1"
+ shift
+ # if a --keyring was given, just remove from there
+ if [ -n "$FORCED_KEYRING" ]; then
+ $ACTION "$FORCED_KEYRING" "$@"
+ else
# otherwise all known keyrings are up for inspection
- local TRUSTEDFILE="/etc/apt/trusted.gpg"
- eval $(apt-config shell TRUSTEDFILE Apt::GPGV::TrustedKeyring)
- eval $(apt-config shell TRUSTEDFILE Dir::Etc::Trusted/f)
- remove_key_from_keyring "$TRUSTEDFILE" "$1"
- TRUSTEDPARTS="/etc/apt/trusted.gpg.d"
+ if [ -s "$TRUSTEDFILE" ]; then
+ $ACTION "$TRUSTEDFILE" "$@"
+ fi
+ local TRUSTEDPARTS="/etc/apt/trusted.gpg.d"
eval $(apt-config shell TRUSTEDPARTS Dir::Etc::TrustedParts/d)
if [ -d "$TRUSTEDPARTS" ]; then
+ # strip / suffix as gpg will double-slash in that case (#665411)
+ local STRIPPED_TRUSTEDPARTS="${TRUSTEDPARTS%/}"
+ if [ "${STRIPPED_TRUSTEDPARTS}/" = "$TRUSTEDPARTS" ]; then
+ TRUSTEDPARTS="$STRIPPED_TRUSTEDPARTS"
+ fi
for trusted in $(run-parts --list "$TRUSTEDPARTS" --regex '^.*\.gpg$'); do
- remove_key_from_keyring "$trusted" "$1"
+ if [ -s "$trusted" ]; then
+ $ACTION "$trusted" "$@"
+ fi
done
fi
+ fi
+}
+
+run_cmd_on_keyring() {
+ local KEYRINGFILE="$1"
+ shift
+ # fingerprint and co will fail if key isn't in this keyring
+ $GPG_CMD --keyring "$KEYRINGFILE" --batch "$@" 2>/dev/null || true
+}
+
+import_keys_from_keyring() {
+ local IMPORT="$1"
+ local KEYRINGFILE="$2"
+ if ! $GPG_CMD --keyring "$KEYRINGFILE" --batch --import "$IMPORT" > "${GPGHOMEDIR}/gpgoutput.log" 2>&1; then
+ cat "${GPGHOMEDIR}/gpgoutput.log"
+ false
+ fi
+}
+
+merge_keys_into_keyrings() {
+ local KEYRINGFILE="$1"
+ local IMPORT="$2"
+ if ! $GPG_CMD --keyring "$KEYRINGFILE" --batch --import --import-options 'merge-only' "$IMPORT" > "${GPGHOMEDIR}/gpgoutput.log" 2>&1; then
+ cat "${GPGHOMEDIR}/gpgoutput.log"
+ false
+ fi
+}
+
+merge_back_changes() {
+ if [ -n "$FORCED_KEYRING" ]; then
+ # if the keyring was forced merge is already done
+ return
+ fi
+ if [ -s "${GPGHOMEDIR}/pubring.gpg" ]; then
+ # merge all updated keys
+ foreach_keyring_do 'merge_keys_into_keyrings' "${GPGHOMEDIR}/pubring.gpg"
+ fi
+ # look for keys which were added or removed
+ get_fingerprints_of_keyring "${GPGHOMEDIR}/pubring.orig.gpg" > "${GPGHOMEDIR}/pubring.orig.keylst"
+ get_fingerprints_of_keyring "${GPGHOMEDIR}/pubring.gpg" > "${GPGHOMEDIR}/pubring.keylst"
+ sort "${GPGHOMEDIR}/pubring.keylst" "${GPGHOMEDIR}/pubring.orig.keylst" | uniq --unique | while read key; do
+ if grep -q "^${key}$" "${GPGHOMEDIR}/pubring.orig.keylst"; then
+ # key isn't part of new keyring, so remove
+ foreach_keyring_do 'remove_key_from_keyring' "$key"
+ elif grep -q "^${key}$" "${GPGHOMEDIR}/pubring.keylst"; then
+ # key is part of new keyring, so we need to import it
+ create_new_keyring "$TRUSTEDFILE"
+ if ! $GPG --batch --yes --export "$key" | $GPG_CMD --keyring "$TRUSTEDFILE" --batch --yes --import > "${GPGHOMEDIR}/gpgoutput.log" 2>&1; then
+ cat "${GPGHOMEDIR}/gpgoutput.log"
+ false
+ fi
+ else
+ echo >&2 "Errror: Key ${key} (dis)appeared out of nowhere"
+ fi
+ done
+}
+
+setup_merged_keyring() {
+ if [ -z "$FORCED_KEYRING" ]; then
+ foreach_keyring_do 'import_keys_from_keyring' "${GPGHOMEDIR}/pubring.gpg"
+ if [ -r "${GPGHOMEDIR}/pubring.gpg" ]; then
+ cp -a "${GPGHOMEDIR}/pubring.gpg" "${GPGHOMEDIR}/pubring.orig.gpg"
+ else
+ touch "${GPGHOMEDIR}/pubring.gpg" "${GPGHOMEDIR}/pubring.orig.gpg"
+ fi
+ GPG="$GPG --keyring ${GPGHOMEDIR}/pubring.gpg"
+ else
+ GPG="$GPG --keyring $TRUSTEDFILE"
+ create_new_keyring "$TRUSTEDFILE"
fi
- echo "OK"
}
+create_new_keyring() {
+ # gpg defaults to mode 0600 for new keyrings. Create one with 0644 instead.
+ if ! [ -e "$TRUSTEDFILE" ]; then
+ if [ -w "$(dirname "$TRUSTEDFILE")" ]; then
+ touch -- "$TRUSTEDFILE"
+ chmod 0644 -- "$TRUSTEDFILE"
+ fi
+ fi
+}
usage() {
echo "Usage: apt-key [--keyring file] [command] [arguments]"
@@ -256,17 +327,19 @@ while [ -n "$1" ]; do
shift
TRUSTEDFILE="$1"
FORCED_KEYRING="$1"
- if [ -r "$TRUSTEDFILE" ] || [ "$2" = 'add' ] || [ "$2" = 'adv' ]; then
- GPG="$GPG --keyring $TRUSTEDFILE --primary-keyring $TRUSTEDFILE"
- else
- echo >&2 "Error: The specified keyring »$TRUSTEDFILE« is missing or not readable"
- exit 1
- fi
+ ;;
+ --secret-keyring)
shift
+ FORCED_SECRET_KEYRING="$1"
+ ;;
+ --readonly)
+ merge_back_changes() { true; }
;;
--fakeroot)
requires_root() { true; }
- shift
+ ;;
+ --quiet)
+ aptkey_echo() { true; }
;;
--*)
echo >&2 "Unknown option: $1"
@@ -275,28 +348,13 @@ while [ -n "$1" ]; do
*)
break;;
esac
+ shift
done
if [ -z "$TRUSTEDFILE" ]; then
TRUSTEDFILE="/etc/apt/trusted.gpg"
eval $(apt-config shell TRUSTEDFILE Apt::GPGV::TrustedKeyring)
eval $(apt-config shell TRUSTEDFILE Dir::Etc::Trusted/f)
- if [ -r "$TRUSTEDFILE" ]; then
- GPG="$GPG --keyring $TRUSTEDFILE"
- fi
- GPG="$GPG --primary-keyring $TRUSTEDFILE"
- TRUSTEDPARTS="/etc/apt/trusted.gpg.d"
- eval $(apt-config shell TRUSTEDPARTS Dir::Etc::TrustedParts/d)
- if [ -d "$TRUSTEDPARTS" ]; then
- # strip / suffix as gpg will double-slash in that case (#665411)
- STRIPPED_TRUSTEDPARTS="${TRUSTEDPARTS%/}"
- if [ "${STRIPPED_TRUSTEDPARTS}/" = "$TRUSTEDPARTS" ]; then
- TRUSTEDPARTS="$STRIPPED_TRUSTEDPARTS"
- fi
- for trusted in $(run-parts --list "$TRUSTEDPARTS" --regex '^.*\.gpg$'); do
- GPG="$GPG --keyring $trusted"
- done
- fi
fi
command="$1"
@@ -306,52 +364,103 @@ if [ -z "$command" ]; then
fi
shift
-if [ "$command" != "help" ] && ! which gpg >/dev/null 2>&1; then
- echo >&2 "Warning: gnupg does not seem to be installed."
- echo >&2 "Warning: apt-key requires gnupg for most operations."
- echo >&2
+if [ "$command" != "help" ]; then
+ eval $(apt-config shell GPG_EXE Apt::Key::gpgcommand)
+
+ if [ -n "$GPG_EXE" ] && which "$GPG_EXE" >/dev/null 2>&1; then
+ true
+ elif which gpg >/dev/null 2>&1; then
+ GPG_EXE="gpg"
+ elif which gpg2 >/dev/null 2>&1; then
+ GPG_EXE="gpg2"
+ else
+ echo >&2 "Error: gnupg or gnupg2 do not seem to be installed,"
+ echo >&2 "Error: but apt-key requires gnupg or gnupg2 for operation."
+ echo >&2
+ exit 255
+ fi
+
+ GPG_CMD="$GPG_EXE --ignore-time-conflict --no-options --no-default-keyring"
+
+ # gpg needs (in different versions more or less) files to function correctly,
+ # so we give it its own homedir and generate some valid content for it
+ if [ ! -d "$TMPDIR" ]; then
+ unset TMPDIR
+ fi
+ GPGHOMEDIR="$(mktemp -d)"
+ CURRENTTRAP="${CURRENTTRAP} rm -rf '${GPGHOMEDIR}';"
+ trap "${CURRENTTRAP}" 0 HUP INT QUIT ILL ABRT FPE SEGV PIPE TERM
+ chmod 700 "$GPGHOMEDIR"
+ # We don't use a secret keyring, of course, but gpg panics and
+ # implodes if there isn't one available - and writeable for imports
+ SECRETKEYRING="${GPGHOMEDIR}/secring.gpg"
+ touch $SECRETKEYRING
+ GPG_CMD="$GPG_CMD --homedir $GPGHOMEDIR"
+ # create the trustdb with an (empty) dummy keyring
+ # older gpgs required it, newer gpgs even warn that it isn't needed,
+ # but require it nonetheless for some commands, so we just play safe
+ # here for the foreseeable future and create a dummy one
+ $GPG_CMD --quiet --check-trustdb --keyring $SECRETKEYRING >/dev/null 2>&1
+ # tell gpg that it shouldn't try to maintain a trustdb file
+ GPG_CMD="$GPG_CMD --no-auto-check-trustdb --trust-model always"
+ GPG="$GPG_CMD"
+
+ # for advanced operations, we might really need a secret keyring after all
+ if [ -n "$FORCED_SECRET_KEYRING" ] && [ -r "$FORCED_SECRET_KEYRING" ]; then
+ rm -f "$SECRETKEYRING"
+ cp -a "$FORCED_SECRET_KEYRING" "$SECRETKEYRING"
+ fi
fi
case "$command" in
add)
- requires_root
- init_keyring "$TRUSTEDFILE"
- $GPG --quiet --batch --import "$1"
- echo "OK"
+ requires_root
+ setup_merged_keyring
+ $GPG --quiet --batch --import "$@"
+ merge_back_changes
+ aptkey_echo "OK"
;;
del|rm|remove)
- init_keyring "$TRUSTEDFILE"
- remove_key "$1"
+ requires_root
+ foreach_keyring_do 'remove_key_from_keyring' "$@"
+ aptkey_echo "OK"
;;
update)
- init_keyring "$TRUSTEDFILE"
+ requires_root
+ setup_merged_keyring
update
+ merge_back_changes
;;
net-update)
- init_keyring "$TRUSTEDFILE"
+ requires_root
+ setup_merged_keyring
net_update
+ merge_back_changes
;;
list)
- init_keyring "$TRUSTEDFILE"
- $GPG --batch --list-keys
- ;;
+ foreach_keyring_do 'run_cmd_on_keyring' --list-keys "$@"
+ ;;
finger*)
- init_keyring "$TRUSTEDFILE"
- $GPG --batch --fingerprint
- ;;
- export)
- init_keyring "$TRUSTEDFILE"
- $GPG --armor --export "$1"
- ;;
- exportall)
- init_keyring "$TRUSTEDFILE"
- $GPG --armor --export
- ;;
+ foreach_keyring_do 'run_cmd_on_keyring' --fingerprint "$@"
+ ;;
+ export|exportall)
+ foreach_keyring_do 'import_keys_from_keyring' "${GPGHOMEDIR}/pubring.gpg"
+ $GPG_CMD --keyring "${GPGHOMEDIR}/pubring.gpg" --armor --export "$@"
+ ;;
adv*)
- init_keyring "$TRUSTEDFILE"
- echo "Executing: $GPG $*"
- $GPG $*
- ;;
+ setup_merged_keyring
+ aptkey_echo "Executing: $GPG $*"
+ $GPG "$@"
+ merge_back_changes
+ ;;
+ verify)
+ setup_merged_keyring
+ if which gpgv >/dev/null 2>&1; then
+ gpgv --homedir "${GPGHOMEDIR}" --keyring "${GPGHOMEDIR}/pubring.gpg" --ignore-time-conflict "$@"
+ else
+ $GPG --verify "$@"
+ fi
+ ;;
help)
usage
;;
diff --git a/configure.ac b/configure.ac
index 2e591cdf2..5d0e0a9db 100644
--- a/configure.ac
+++ b/configure.ac
@@ -18,7 +18,7 @@ AC_CONFIG_AUX_DIR(buildlib)
AC_CONFIG_HEADER(include/config.h:buildlib/config.h.in include/apti18n.h:buildlib/apti18n.h.in)
PACKAGE="apt"
-PACKAGE_VERSION="1.0.9.1"
+PACKAGE_VERSION="1.1~exp3"
PACKAGE_MAIL="APT Development Team <deity@lists.debian.org>"
AC_DEFINE_UNQUOTED(PACKAGE,"$PACKAGE")
AC_DEFINE_UNQUOTED(PACKAGE_VERSION,"$PACKAGE_VERSION")
@@ -172,6 +172,12 @@ AC_EGREP_HEADER(h_errno, netdb.h, [AC_MSG_RESULT(normal)],
[AC_MSG_ERROR("not found.")])
])
+
+dnl check for setuid checking function
+AC_CHECK_FUNCS(getresuid getresgid)
+AC_SUBST(HAVE_GETRESUID)
+AC_SUBST(HAVE_GETRESGID)
+
dnl Check for doxygen
AC_PATH_PROG(DOXYGEN, doxygen)
diff --git a/debian/apt.postinst b/debian/apt.postinst
index fd3e273bb..b0a5da7d8 100644..100755
--- a/debian/apt.postinst
+++ b/debian/apt.postinst
@@ -15,6 +15,19 @@ set -e
case "$1" in
configure)
+ if dpkg --compare-versions "$2" lt 1.1~exp4; then
+ # apt-key before 0.9.10 could leave empty keyrings around
+ find /etc/apt/trusted.gpg.d/ -name '*.gpg' | while read keyring; do
+ if ! test -s "$keyring"; then
+ rm -f "$keyring"
+ fi
+ done
+ # apt-key before 0.9.8.2 could create 0600 trusted.gpg file
+ if test -e /etc/apt/trusted.gpg ; then
+ chmod -f 0644 /etc/apt/trusted.gpg || true
+ fi
+ fi
+
if dpkg --compare-versions "$2" lt-nl 0.9.9.5; then
# we are using tmpfiles for both
rm -f /etc/apt/trustdb.gpg
@@ -26,6 +39,16 @@ case "$1" in
fi
fi
+ # add unprivileged user for the apt methods
+ adduser --force-badname --system -home /var/empty \
+ --no-create-home --quiet _apt || true
+
+ # deal with upgrades from experimental
+ if dpkg --compare-versions "$2" 'eq' '1.1~exp3'; then
+ # libapt will setup partial/ at runtime
+ chown -R root:root /var/lib/apt/lists /var/cache/apt/archives || true
+ fi
+
# ensure tighter permissons on the logs, see LP: #975199
if dpkg --compare-versions "$2" lt-nl 0.9.7.7; then
# ensure permissions are right
diff --git a/debian/postrm b/debian/apt.postrm
index ae1e18d33..ae1e18d33 100755
--- a/debian/postrm
+++ b/debian/apt.postrm
diff --git a/debian/changelog b/debian/changelog
index 3896c0531..9d13e9da0 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,210 @@
+apt (1.1~exp6) experimental; urgency=medium
+
+ [ josch ]
+ * implement the updated build profile spec
+
+ [ Michael Vogt ]
+ * methods/rsh.cc: replace strcat with std::string (Closes: #76442)
+ * Add new configallowinsecurerepositories to the test framework
+
+ [ Guillem Jover ]
+ * Update Status field values handling
+
+ [ David Kalnischkies ]
+ * don't drop privileges if _apt has not enough rights
+ * check for available space, excluding root reserved blocks
+
+ -- Michael Vogt <mvo@debian.org> Wed, 15 Oct 2014 07:47:36 +0200
+
+apt (1.1~exp5) experimental; urgency=medium
+
+ [ Michael Vogt ]
+ * Only rename StatError files in AbortTransaction()
+ * Document Acquire{MaxReleaseFileSize,AllowInsecureRepositories,
+ AllowDowngradeToInsecureRepositories} and
+ --no-allow-insecure-repositories
+ * Fix backward compatiblity of the new pkgAcquireMethod::DropPrivsOrDie()
+ * Change default of Acquire::AllowInsecureRepositories to "true"
+ so that this change is less disruptive, this will be switched
+ to "false" again after jessie
+
+ [ David Kalnischkies ]
+ * remove useless pdiff filename output (Closes: 764737)
+ * make --allow-insecure-repositories message an error
+ * display a warning for unsigned repos
+ * trusted=yes sources are secure, we just don't know why
+
+ -- Michael Vogt <mvo@debian.org> Mon, 13 Oct 2014 16:15:22 +0200
+
+apt (1.1~exp4) experimental; urgency=medium
+
+ [ Michael Vogt ]
+ * Merge sid version 1.0.9.2
+ * feature/acq-trans:
+ - Make apt-get update more transactional by keeping all data from
+ a sources.list line in partial/ until all data is good and only
+ then move it into lists/ in one step
+ - add new -o Debug::Acquire::Transaction=1 debug option
+ * feature/expected-size:
+ Do not download more data in the mehotds than expected if we know
+ the size. For the InRelease/Release/Release.gpg add new
+ Acquire::MaxReleaseFileSize that defaults to 10Mb for now
+ * Verify the the hashes of the downloaded compressed files early
+ * Only load unauthenticated data into our parsers when the user
+ explicitly asked for it via --allow-insecure-repositories
+ (Acquire::AllowInsecureRepositories)
+ * Print warning when trying to use unauthenticated repositories
+ * Use /var/empty as the homedir for _apt
+ * Revert making pkgAcquire::Item::DescURI() "const" to not break
+ API
+ * Do not allow going from a authenticated to unauthenticated repository
+ * Add missing "adduser" dependency (for the new _apt user)
+ Thanks to Russ Allbery (Closes: #763004)
+ * Test if TMPDIR is a directory in apt-key and if not unset it
+ * add early verification for the .diff/Index download
+ * Bump library version to libapt-pkg4.14
+ * Rework pkgAcqMeta{Index,Sig,ClearSig}::{Done,Failed]() for readability
+ * Ignore EINVAL from prctl(PR_SET_NO_NEW_PRIVS) (closes: 764066)
+
+ [ David Kalnischkies ]
+ * deprecate Pkg->Name in favor of Grp->Name
+ * drop stored StringItems in favor of in-memory mappings
+ * de-duplicate version strings in the cache
+ * fix progress output for (dist-)upgrade calculation
+ * move PCI::From* methods into CacheSetHelper class (Closes: 686221)
+ * add a (hidden) --quiet option for apt-key
+ * only create new trusted.gpg if directory is writeable
+ * support (multiple) arguments properly in apt-key
+ * set a primary-keyring only if we have access to it
+ * merge fragment keyrings in apt-key to avoid hitting gpg limits
+ (Closes: 733028)
+ * use apt-key adv (+ gnupg) instead of gpgv for verify
+ * support gnupg2 as drop-in replacement for gnupg
+ * allow to specify fingerprints in 'apt-key del'
+ * use only one --keyring in gpg interactions
+ * add and use 'apt-key verify' which prefers gpgv over gpg
+ * remove empty keyrings in trusted.gpg.d on upgrade
+ * store source name and version in binary cache
+ * allow fetcher setup without directory creation (Closes: 762898)
+ * cleanup partial directory of lists in apt-get clean (Closes: #762889)
+ * allow options between command and -- on commandline
+ * update symbols file
+ * support parsing of all hashes for pdiff
+ * ensure world-readability for trusted.gpg in postinst (Closes: 647001)
+ * ensure partial dirs are 0700 and owned by _apt:root
+ * use _apt:root only for partial directories
+ * display errortext for all Err
+ * set PR_SET_NO_NEW_PRIVS also if run as non-root
+
+ [ James McCoy ]
+ * ensure apt-key del handles 16-byte key ids (Closes: 754436)
+
+ [ Kenshi Muto ]
+ * Japanese program translation update (Closes: 763033)
+
+ [ Trần Ngọc Quân ]
+ * Set STRIP_FROM_PATH for doxygen
+
+ [ Mert Dirik ]
+ * Turkish program translation update (Closes: 763379)
+
+ [ Guillem Jover ]
+ * apt-get: Create the temporary downloaded changelog inside tmpdir
+
+ [ Miroslav Kure ]
+ * [l10n] Updated Czech translation of apt (Closes: #764055)
+
+ -- Michael Vogt <mvo@ubuntu.com> Wed, 08 Oct 2014 09:37:35 +0200
+
+apt (1.1~exp3) experimental; urgency=medium
+
+ [ Michael Vogt ]
+ * merged changes from debian/sid up to 1.0.9.1
+ * Make /var/lib/apt/lists and /var/cache/apt/archives owned
+ by the new _apt user
+ * Drop Privileges in the following acquire methods:
+ copy, http, https, ftp, gpgv, gzip/bzip2/lzma/xz
+ * DropPrivs: Improvements based on feedback from error@debian.org
+
+ [ Julian Andres Klode ]
+ * DropPriv: Really call seteuid and not setuid, and add more checks
+ * Use _apt as our unprivileged user name
+ * DropPrivs: Also check for saved set-user-ID and set-group-ID
+ * methods: Fail if we cannot drop privileges
+ * DropPrivs: Also check for saved set-user-ID and set-group-ID
+
+ -- Michael Vogt <mvo@debian.org> Wed, 24 Sep 2014 22:30:09 +0200
+
+apt (1.1~exp2) experimental; urgency=medium
+
+ [ Guillem Jover ]
+ * Add new Base256ToNum long long overload function
+ * Fix ar and tar code to be LFS-safe (Closes: #742882)
+
+ [ Michael Vogt ]
+ * increase libapt-inst to version 1.6
+ * Only allow "apt-get build-dep path" when path starts with ./ or /
+ * Allow passing a full path to apt-get install /foo/bar.deb (CLoses: #752327)
+ * merge changes from the 1.0.6 upload
+
+ -- Michael Vogt <mvo@debian.org> Thu, 10 Jul 2014 13:18:08 +0200
+
+apt (1.1~exp1) experimental; urgency=low
+
+ [ David Kalnischkies ]
+ * [API Break] change "std::string pkgAcquire::Item::DescURI()" to
+ "std::string pkgAcquire::Item::DescURI() const"
+ * [ABI-Break] increase hashtable size for packages/groups by factor 5
+ * [ABI-Break] cleanup datatypes mix used in binary cache
+ * [internal API-Break] remove the Section member from package struct
+ * use 'best' hash for source authentication (LP: 1098738)
+ * use HashStringList in the acquire system
+ * deal with hashes in ftparchive more dynamic as well
+ * reenable pipelining via hashsum reordering support
+ * parse and retrieve multiple Descriptions in one record
+ * improve pkgTagSection scanning and parsing
+ * invalid cache if architecture set doesn't match (Closes: 745036)
+
+ [ Michael Vogt ]
+ * add support for "apt-get build-dep foo.dsc"
+ * add support for "apt-get build-dep unpacked-source-dir"
+ * add support for "apt-get install foo_1.0_all.deb"
+ * make "apt-get update" progress much more accurate by loading the
+ sizes of the targets into the fetcher early
+ * Implement simple by-hash for apt update to improve reliability of
+ the update. Apt will try to fetch the Packages file via
+ /by-hash/$hash_type/$hash_value if the repo supports that.
+ - add APT::Acquire::$(host)::By-Hash=1 knob
+ - add Acquire-By-Hash=1 to Release file
+ * add Debug::Acquire::Progress debug option
+ * [ABI-Break] lp:~mvo/apt/source-hashes:
+ - use sha{512,256,1} for deb-src when available LP: #1098738
+ * [ABI-Break] stop exporting the accidently exported parsenetrc() symbol
+ * [ABI-Break] remove the PACKAGE_MATCHER_ABI_COMPAT defines
+ * [ABI BREAK] apt-pkg/pkgcache.h:
+ - adjust pkgCache::State::VerPriority enum, to match reality
+ * test/integration/test-debsrc-hashes:
+ - add integration test, thanks to Daniel Hartwig
+ * [ABI-Break] remove the PACKAGE_MATCHER_ABI_COMPAT defines
+ * [ABI-Break] Pass struct IndexTarget/indexRecords to
+ pkgAcqIndex{,Merge}Diffs
+ * [internal API-Break] rename pkgCache::Package::NextPackage to
+ pkgCache::Package::Next
+ * Calculate Percent as part of pkgAcquireStatus to provide a weighted
+ percent for both items and bytes
+ * apt-pkg/contrib/macros.h: bump library version to 4.13
+ * apt-private/acqprogress.cc: do not show file size on IMSHit, it wasn't
+ fetched
+ * Fix warnings from clang -Wall/clang -fsanitize=address
+ * add DropPrivs() and drop privileges to nobody when running the
+ the buildin apt and dump solvers
+ * lp:~mvo/apt/webserver-simulate-broken-with-fix346386:
+ - fix invalid InRelease file download checking and add regression
+ test to server broken files to the buildin test webserver
+ - add regression test for LP: #34638
+
+ -- Michael Vogt <mvo@debian.org> Thu, 19 Jun 2014 12:01:48 +0200
+
apt (1.0.9.2) unstable; urgency=medium
[ Michael Vogt ]
diff --git a/debian/control b/debian/control
index 0437aa737..0ff611dcd 100644
--- a/debian/control
+++ b/debian/control
@@ -18,7 +18,7 @@ XS-Testsuite: autopkgtest
Package: apt
Architecture: any
-Depends: ${shlibs:Depends}, ${misc:Depends}, ${apt:keyring}, gnupg
+Depends: ${shlibs:Depends}, ${misc:Depends}, ${apt:keyring}, gnupg | gnupg2, adduser
Replaces: manpages-pl (<< 20060617-3~), manpages-it (<< 2.80-4~), sun-java6-jdk (>> 0), sun-java5-jdk (>> 0), openjdk-6-jdk (<< 6b24-1.11-0ubuntu1~)
Breaks: manpages-pl (<< 20060617-3~), manpages-it (<< 2.80-4~), sun-java6-jdk (>> 0), sun-java5-jdk (>> 0), openjdk-6-jdk (<< 6b24-1.11-0ubuntu1~)
Conflicts: python-apt (<< 0.7.93.2~)
@@ -38,12 +38,12 @@ Description: commandline package manager
* apt-config as an interface to the configuration settings
* apt-key as an interface to manage authentication keys
-Package: libapt-pkg4.12
+Package: libapt-pkg4.14
Architecture: any
Multi-Arch: same
Pre-Depends: ${misc:Pre-Depends}
Depends: ${shlibs:Depends}, ${misc:Depends}
-Breaks: apt (<< 0.9.4~), libapt-inst1.5 (<< 0.9.9~)
+Breaks: apt (<< 1.1~exp4), libapt-inst1.5 (<< 0.9.9~)
Section: libs
Description: package management runtime library
This library provides the common functionality for searching and
@@ -61,7 +61,7 @@ Description: package management runtime library
http, rsh as well as an interface to add more transports like
https (apt-transport-https) and debtorrent (apt-transport-debtorrent).
-Package: libapt-inst1.5
+Package: libapt-inst1.6
Architecture: any
Multi-Arch: same
Pre-Depends: ${misc:Pre-Depends}
diff --git a/debian/gbp.conf b/debian/gbp.conf
index ec6d9894e..135522d40 100644
--- a/debian/gbp.conf
+++ b/debian/gbp.conf
@@ -1,7 +1,7 @@
[DEFAULT]
prebuild = ./prepare-release pre-export
postbuild = ./prepare-release post-build
-debian-branch = debian/sid
+debian-branch = debian/experimental
debian-tag = %(version)s
export-dir = ../build-area
sign-tags = True \ No newline at end of file
diff --git a/debian/libapt-inst1.5.install.in b/debian/libapt-inst1.6.install.in
index 8bcce2c28..8bcce2c28 100644
--- a/debian/libapt-inst1.5.install.in
+++ b/debian/libapt-inst1.6.install.in
diff --git a/debian/libapt-inst1.5.symbols b/debian/libapt-inst1.6.symbols
index 8ce707287..74c4665a2 100644
--- a/debian/libapt-inst1.5.symbols
+++ b/debian/libapt-inst1.6.symbols
@@ -1,9 +1,9 @@
-libapt-inst.so.1.5 libapt-inst1.5 #MINVER#
+libapt-inst.so.1.6 libapt-inst1.6 #MINVER#
* Build-Depends-Package: libapt-pkg-dev
(c++)"ExtractTar::Done(bool)@Base" 0.8.0
(c++)"ExtractTar::Go(pkgDirStream&)@Base" 0.8.0
(c++)"ExtractTar::StartGzip()@Base" 0.8.0
- (c++)"ExtractTar::ExtractTar(FileFd&, unsigned long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
+ (c++)"ExtractTar::ExtractTar(FileFd&, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.0.5
(c++)"ExtractTar::~ExtractTar()@Base" 0.8.0
(c++)"debDebFile::GotoMember(char const*)@Base" 0.8.0
(c++)"debDebFile::CheckMember(char const*)@Base" 0.8.0
@@ -11,10 +11,10 @@ libapt-inst.so.1.5 libapt-inst1.5 #MINVER#
(c++)"debDebFile::ControlExtract::~ControlExtract()@Base" 0.8.0
(c++)"debDebFile::ExtractTarMember(pkgDirStream&, char const*)@Base" 0.9.15.4
(c++)"debDebFile::ExtractArchive(pkgDirStream&)@Base" 0.8.0
- (c++)"debDebFile::MemControlExtract::TakeControl(void const*, unsigned long)@Base" 0.8.0
+ (c++)"debDebFile::MemControlExtract::TakeControl(void const*, unsigned long long)@Base" 1.0.5
(c++)"debDebFile::MemControlExtract::Read(debDebFile&)@Base" 0.8.0
(c++)"debDebFile::MemControlExtract::DoItem(pkgDirStream::Item&, int&)@Base" 0.8.0
- (c++)"debDebFile::MemControlExtract::Process(pkgDirStream::Item&, unsigned char const*, unsigned long, unsigned long)@Base" 0.8.0
+ (c++)"debDebFile::MemControlExtract::Process(pkgDirStream::Item&, unsigned char const*, unsigned long long, unsigned long long)@Base" 1.0.5
(c++)"debDebFile::MemControlExtract::~MemControlExtract()@Base" 0.8.0
(c++)"debDebFile::debDebFile(FileFd&)@Base" 0.8.0
(c++)"pkgExtract::FinishedFile(pkgDirStream::Item&, int)@Base" 0.8.0
@@ -41,7 +41,7 @@ libapt-inst.so.1.5 libapt-inst1.5 #MINVER#
(c++)"pkgDirStream::FinishedFile(pkgDirStream::Item&, int)@Base" 0.8.0
(c++)"pkgDirStream::Fail(pkgDirStream::Item&, int)@Base" 0.8.0
(c++)"pkgDirStream::DoItem(pkgDirStream::Item&, int&)@Base" 0.8.0
- (c++)"pkgDirStream::Process(pkgDirStream::Item&, unsigned char const*, unsigned long, unsigned long)@Base" 0.8.0
+ (c++)"pkgDirStream::Process(pkgDirStream::Item&, unsigned char const*, unsigned long long, unsigned long long)@Base" 1.0.5
(c++)"pkgDirStream::~pkgDirStream()@Base" 0.8.0
(c++|optional)"pkgCache::DepIterator::operator++(int)@Base" 0.8.0
(c++|optional)"pkgCache::DepIterator::operator++()@Base" 0.8.0
diff --git a/debian/libapt-pkg4.12.install.in b/debian/libapt-pkg4.14.install.in
index 56bed39d3..56bed39d3 100644
--- a/debian/libapt-pkg4.12.install.in
+++ b/debian/libapt-pkg4.14.install.in
diff --git a/debian/libapt-pkg4.12.symbols b/debian/libapt-pkg4.14.symbols
index 3fa128cff..e2efc57f6 100644
--- a/debian/libapt-pkg4.12.symbols
+++ b/debian/libapt-pkg4.14.symbols
@@ -1,4 +1,4 @@
-libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
+libapt-pkg.so.4.14 libapt-pkg4.14 #MINVER#
* Build-Depends-Package: libapt-pkg-dev
TFRewritePackageOrder@Base 0.8.0
TFRewriteSourceOrder@Base 0.8.0
@@ -10,7 +10,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"ReadPinDir(pkgPolicy&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"RunScripts(char const*)@Base" 0.8.0
(c++)"SafeGetCWD()@Base" 0.8.0
- (c++)"parsenetrc(char*, char*, char*, char*)@Base" 0.8.0
(c++)"QuoteString(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, char const*)@Base" 0.8.0
(c++)"ReadPinFile(pkgPolicy&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"RegexChoice(RxChoiceList*, char const**, char const**)@Base" 0.8.0
@@ -22,6 +21,7 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"StringToBool(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, int)@Base" 0.8.0
(c++)"UnmountCdrom(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"_GetErrorObj()@Base" 0.8.0
+ (c++)"Base256ToNum(char const*, unsigned long long&, unsigned int)@Base" 1.0.5
(c++)"pkgFixBroken(pkgDepCache&)@Base" 0.8.0
(c++)"DeQuoteString(__gnu_cxx::__normal_iterator<char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > > const&, __gnu_cxx::__normal_iterator<char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > > const&)@Base" 0.8.0
(c++)"DeQuoteString(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.0
@@ -29,13 +29,12 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"ReadConfigDir(Configuration&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool const&, unsigned int const&)@Base" 0.8.0
(c++)"URItoFileName(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.0
(c++)"UTF8ToCodeset(char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >*)@Base" 0.8.0
- (c++)"pkgAllUpgrade(pkgDepCache&)@Base" 0.8.0
(c++)"pkgInitConfig(Configuration&)@Base" 0.8.0
(c++)"pkgInitSystem(Configuration&, pkgSystem*&)@Base" 0.8.0
(c++)"safe_snprintf(char*, char*, char const*, ...)@Base" 0.8.0
(c++)"stringcasecmp(__gnu_cxx::__normal_iterator<char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > >, __gnu_cxx::__normal_iterator<char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > >, char const*, char const*)@Base" 0.8.0
(c++)"stringcasecmp(__gnu_cxx::__normal_iterator<char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > >, __gnu_cxx::__normal_iterator<char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > >, __gnu_cxx::__normal_iterator<char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > >, __gnu_cxx::__normal_iterator<char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > >)@Base" 0.8.0
- (c++)"stringcasecmp(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, char const*)@Base" 0.8.0
+# (c++|optional=inline)"stringcasecmp(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, char const*)@Base" 0.8.0
(c++)"stringcasecmp(char const*, char const*, char const*, char const*)@Base" 0.8.0
(c++)"tolower_ascii(int)@Base" 0.8.0
(c++)"ParseQuoteWord(char const*&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >&)@Base" 0.8.0
@@ -43,7 +42,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"TokSplitString(char, char*, char**, unsigned long)@Base" 0.8.0
(c++)"maybe_add_auth(URI&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"pkgApplyStatus(pkgDepCache&)@Base" 0.8.0
- (c++)"pkgDistUpgrade(pkgDepCache&)@Base" 0.8.0
(c++)"CheckDomainList(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.0
(c++)"CreateDirectory(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.0
(c++)"DirectoryExists(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.0
@@ -100,10 +98,8 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"SourceCopy::RewriteEntry(_IO_FILE*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"SourceCopy::Type()@Base" 0.8.0
(c++)"SourceCopy::~SourceCopy()@Base" 0.8.0
- (c++)"pkgAcqFile::Custom600Headers()@Base" 0.8.0
(c++)"pkgAcqFile::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.0
(c++)"pkgAcqFile::DescURI()@Base" 0.8.0
- (c++)"pkgAcqFile::HashSum()@Base" 0.8.0
(c++)"pkgAcqFile::~pkgAcqFile()@Base" 0.8.0
(c++)"pkgAcquire::WorkerStep(pkgAcquire::Worker*)@Base" 0.8.0
(c++)"pkgAcquire::FetchNeeded()@Base" 0.8.0
@@ -114,15 +110,11 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgAcquire::Add(pkgAcquire::Worker*)@Base" 0.8.0
(c++)"pkgAcquire::Run(int)@Base" 0.8.0
(c++)"pkgAcquire::Bump()@Base" 0.8.0
- (c++)"pkgAcquire::Item::Custom600Headers()@Base" 0.8.0
(c++)"pkgAcquire::Item::ReportMirrorFailure(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"pkgAcquire::Item::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.0
(c++)"pkgAcquire::Item::Rename(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
- (c++)"pkgAcquire::Item::HashSum()@Base" 0.8.0
(c++)"pkgAcquire::Item::Finished()@Base" 0.8.0
- (c++)"pkgAcquire::Item::IsTrusted()@Base" 0.8.0
(c++)"pkgAcquire::Item::ShortDesc()@Base" 0.8.0
- (c++)"pkgAcquire::Item::Item(pkgAcquire*)@Base" 0.8.0
(c++)"pkgAcquire::Item::~Item()@Base" 0.8.0
(c++)"pkgAcquire::Clean(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"pkgAcquire::Queue::Bump()@Base" 0.8.0
@@ -135,7 +127,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgAcquire::Queue::Shutdown(bool)@Base" 0.8.0
(c++)"pkgAcquire::Queue::Queue(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire*)@Base" 0.8.0
(c++)"pkgAcquire::Queue::~Queue()@Base" 0.8.0
- (c++)"pkgAcquire::Setup(pkgAcquireStatus*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.0
(c++)"pkgAcquire::Remove(pkgAcquire::Item*)@Base" 0.8.0
(c++)"pkgAcquire::Remove(pkgAcquire::Worker*)@Base" 0.8.0
(c++)"pkgAcquire::RunFds(fd_set*, fd_set*)@Base" 0.8.0
@@ -170,15 +161,10 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgRecords::Lookup(pkgCache::VerFileIterator const&)@Base" 0.8.0
(c++)"pkgRecords::Lookup(pkgCache::DescFileIterator const&)@Base" 0.8.0
(c++)"pkgRecords::Parser::Maintainer()@Base" 0.8.0
- (c++)"pkgRecords::Parser::SHA256Hash()@Base" 0.8.0
(c++)"pkgRecords::Parser::Name()@Base" 0.8.0
(c++)"pkgRecords::Parser::GetRec(char const*&, char const*&)@Base" 0.8.0
- (c++)"pkgRecords::Parser::MD5Hash()@Base" 0.8.0
(c++)"pkgRecords::Parser::FileName()@Base" 0.8.0
(c++)"pkgRecords::Parser::Homepage()@Base" 0.8.0
- (c++)"pkgRecords::Parser::LongDesc()@Base" 0.8.0
- (c++)"pkgRecords::Parser::SHA1Hash()@Base" 0.8.0
- (c++)"pkgRecords::Parser::ShortDesc()@Base" 0.8.0
(c++)"pkgRecords::Parser::SourcePkg()@Base" 0.8.0
(c++)"pkgRecords::Parser::SourceVer()@Base" 0.8.0
(c++)"pkgRecords::pkgRecords(pkgCache&)@Base" 0.8.0
@@ -221,11 +207,8 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"PackageCopy::RewriteEntry(_IO_FILE*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"PackageCopy::Type()@Base" 0.8.0
(c++)"PackageCopy::~PackageCopy()@Base" 0.8.0
- (c++)"pkgAcqIndex::Custom600Headers()@Base" 0.8.0
(c++)"pkgAcqIndex::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.0
(c++)"pkgAcqIndex::DescURI()@Base" 0.8.0
- (c++)"pkgAcqIndex::HashSum()@Base" 0.8.0
- (c++)"pkgAcqIndex::pkgAcqIndex(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, HashString, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"pkgAcqIndex::~pkgAcqIndex()@Base" 0.8.0
(c++)"pkgDepCache::IsDeleteOk(pkgCache::PkgIterator const&, bool, unsigned long, bool)@Base" 0.8.0
(c++)"pkgDepCache::MarkDelete(pkgCache::PkgIterator const&, bool, unsigned long, bool)@Base" 0.8.0
@@ -236,8 +219,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgDepCache::ActionGroup::~ActionGroup()@Base" 0.8.0
(c++)"pkgDepCache::IsInstallOk(pkgCache::PkgIterator const&, bool, unsigned long, bool)@Base" 0.8.0
(c++)"pkgDepCache::MarkInstall(pkgCache::PkgIterator const&, bool, unsigned long, bool, bool)@Base" 0.8.0
- (c++)"pkgDepCache::MarkPackage(pkgCache::PkgIterator const&, pkgCache::VerIterator const&, bool const&, bool const&)@Base" 0.8.0
- (c++)"pkgDepCache::MarkRequired(pkgDepCache::InRootSetFunc&)@Base" 0.8.0
(c++)"pkgDepCache::SetReInstall(pkgCache::PkgIterator const&, bool)@Base" 0.8.0
(c++)"pkgDepCache::VersionState(pkgCache::DepIterator, unsigned char, unsigned char, unsigned char)@Base" 0.8.0
(c++)"pkgDepCache::BuildGroupOrs(pkgCache::VerIterator const&)@Base" 0.8.0
@@ -253,7 +234,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgDepCache::MarkFollowsSuggests()@Base" 0.8.0
(c++)"pkgDepCache::MarkFollowsRecommends()@Base" 0.8.0
(c++)"pkgDepCache::Init(OpProgress*)@Base" 0.8.0
- (c++)"pkgDepCache::Sweep()@Base" 0.8.0
(c++)"pkgDepCache::Policy::IsImportantDep(pkgCache::DepIterator const&)@Base" 0.8.0
(c++)"pkgDepCache::Policy::GetCandidateVer(pkgCache::PkgIterator const&)@Base" 0.8.0
(c++)"pkgDepCache::Policy::~Policy()@Base" 0.8.0
@@ -265,12 +245,10 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgDepCache::MarkKeep(pkgCache::PkgIterator const&, bool, bool, unsigned long)@Base" 0.8.0
(c++)"pkgDepCache::pkgDepCache(pkgCache*, pkgDepCache::Policy*)@Base" 0.8.0
(c++)"pkgDepCache::~pkgDepCache()@Base" 0.8.0
- (c++)"pkgSimulate::ShortBreaks()@Base" 0.8.0
(c++)"pkgSimulate::Policy::GetCandidateVer(pkgCache::PkgIterator const&)@Base" 0.8.0
(c++)"pkgSimulate::Policy::~Policy()@Base" 0.8.0
(c++)"pkgSimulate::Remove(pkgCache::PkgIterator, bool)@Base" 0.8.0
(c++)"pkgSimulate::Install(pkgCache::PkgIterator, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
- (c++)"pkgSimulate::Describe(pkgCache::PkgIterator, std::basic_ostream<char, std::char_traits<char> >&, bool, bool)@Base" 0.8.0
(c++)"pkgSimulate::Configure(pkgCache::PkgIterator)@Base" 0.8.0
(c++)"pkgSimulate::pkgSimulate(pkgDepCache*)@Base" 0.8.0
(c++)"pkgSimulate::~pkgSimulate()@Base" 0.8.0
@@ -280,8 +258,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"indexRecords::Load(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"indexRecords::Lookup(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"indexRecords::MetaKeys()@Base" 0.8.0
- (c++)"indexRecords::indexRecords(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
- (c++)"indexRecords::indexRecords()@Base" 0.8.0
(c++)"indexRecords::~indexRecords()@Base" 0.8.0
(c++)"pkgAcqMethod::FetchResult::TakeHashes(Hashes&)@Base" 0.8.0
(c++)"pkgAcqMethod::FetchResult::FetchResult()@Base" 0.8.0
@@ -356,7 +332,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"WeakPointable::~WeakPointable()@Base" 0.8.0
(c++)"debListParser::NewVersion(pkgCache::VerIterator&)@Base" 0.8.0
(c++)"debListParser::UsePackage(pkgCache::PkgIterator&, pkgCache::VerIterator&)@Base" 0.8.0
- (c++)"debListParser::Description()@Base" 0.8.0
(c++)"debListParser::ParseStatus(pkgCache::PkgIterator&, pkgCache::VerIterator&)@Base" 0.8.0
(c++)"debListParser::VersionHash()@Base" 0.8.0
(c++)"debListParser::Architecture()@Base" 0.8.0
@@ -367,8 +342,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"debListParser::ConvertRelation(char const*, unsigned int&)@Base" 0.8.0
(c++)"debListParser::Description_md5()@Base" 0.8.0
(c++)"debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator&, FileFd&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
- (c++)"debListParser::UniqFindTagWrite(char const*)@Base" 0.8.0
- (c++)"debListParser::DescriptionLanguage()@Base" 0.8.0
(c++)"debListParser::Size()@Base" 0.8.0
(c++)"debListParser::Step()@Base" 0.8.0
(c++)"debListParser::Offset()@Base" 0.8.0
@@ -380,14 +353,11 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"debListParser::~debListParser()@Base" 0.8.0
(c++)"pkgAcqArchive::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.0
(c++)"pkgAcqArchive::DescURI()@Base" 0.8.0
- (c++)"pkgAcqArchive::HashSum()@Base" 0.8.0
(c++)"pkgAcqArchive::Finished()@Base" 0.8.0
- (c++)"pkgAcqArchive::IsTrusted()@Base" 0.8.0
(c++)"pkgAcqArchive::QueueNext()@Base" 0.8.0
(c++)"pkgAcqArchive::ShortDesc()@Base" 0.8.0
(c++)"pkgAcqArchive::pkgAcqArchive(pkgAcquire*, pkgSourceList*, pkgRecords*, pkgCache::VerIterator const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >&)@Base" 0.8.0
(c++)"pkgAcqArchive::~pkgAcqArchive()@Base" 0.8.0
- (c++)"pkgAcqMetaSig::Custom600Headers()@Base" 0.8.0
(c++)"pkgAcqMetaSig::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.0
(c++)"pkgAcqMetaSig::DescURI()@Base" 0.8.0
(c++)"pkgAcqMetaSig::~pkgAcqMetaSig()@Base" 0.8.0
@@ -411,7 +381,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgSrcRecords::pkgSrcRecords(pkgSourceList&)@Base" 0.8.0
(c++)"pkgSrcRecords::~pkgSrcRecords()@Base" 0.8.0
(c++)"pkgTagSection::TrimRecord(bool, char const*&)@Base" 0.8.0
- (c++)"pkgTagSection::Scan(char const*, unsigned long)@Base" 0.8.0
(c++)"pkgTagSection::Trim()@Base" 0.8.0
(c++)"pkgVendorList::CreateList(Configuration&)@Base" 0.8.0
(c++)"pkgVendorList::FindVendor(std::vector<std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > >)@Base" 0.8.0
@@ -429,17 +398,12 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"debStatusIndex::~debStatusIndex()@Base" 0.8.0
(c++)"debIFTypeStatus::~debIFTypeStatus()@Base" 0.8.0
(c++)"debRecordParser::Maintainer()@Base" 0.8.0
- (c++)"debRecordParser::SHA256Hash()@Base" 0.8.0
(c++)"debRecordParser::Jump(pkgCache::VerFileIterator const&)@Base" 0.8.0
(c++)"debRecordParser::Jump(pkgCache::DescFileIterator const&)@Base" 0.8.0
(c++)"debRecordParser::Name()@Base" 0.8.0
(c++)"debRecordParser::GetRec(char const*&, char const*&)@Base" 0.8.0
- (c++)"debRecordParser::MD5Hash()@Base" 0.8.0
(c++)"debRecordParser::FileName()@Base" 0.8.0
(c++)"debRecordParser::Homepage()@Base" 0.8.0
- (c++)"debRecordParser::LongDesc()@Base" 0.8.0
- (c++)"debRecordParser::SHA1Hash()@Base" 0.8.0
- (c++)"debRecordParser::ShortDesc()@Base" 0.8.0
(c++)"debRecordParser::SourcePkg()@Base" 0.8.0
(c++)"debRecordParser::SourceVer()@Base" 0.8.0
(c++)"debRecordParser::debRecordParser(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgCache&)@Base" 0.8.0
@@ -455,19 +419,11 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"debSourcesIndex::debSourcesIndex(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, bool)@Base" 0.8.0
(c++)"debSourcesIndex::~debSourcesIndex()@Base" 0.8.0
(c++)"pkgAcqDiffIndex::ParseDiffIndex(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
- (c++)"pkgAcqDiffIndex::Custom600Headers()@Base" 0.8.0
(c++)"pkgAcqDiffIndex::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.0
(c++)"pkgAcqDiffIndex::DescURI()@Base" 0.8.0
- (c++)"pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, HashString)@Base" 0.8.0
(c++)"pkgAcqDiffIndex::~pkgAcqDiffIndex()@Base" 0.8.0
- (c++)"pkgAcqMetaIndex::QueueIndexes(bool)@Base" 0.8.0
- (c++)"pkgAcqMetaIndex::VerifyVendor(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
- (c++)"pkgAcqMetaIndex::RetrievalDone(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
- (c++)"pkgAcqMetaIndex::Custom600Headers()@Base" 0.8.0
(c++)"pkgAcqMetaIndex::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.0
(c++)"pkgAcqMetaIndex::DescURI()@Base" 0.8.0
- (c++)"pkgAcqMetaIndex::AuthDone(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
- (c++)"pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::vector<IndexTarget*, std::allocator<IndexTarget*> > const*, indexRecords*)@Base" 0.8.0
(c++)"pkgAcqMetaIndex::~pkgAcqMetaIndex()@Base" 0.8.0
(c++)"pkgVersionMatch::ExpressionMatches(char const*, char const*)@Base" 0.8.0
(c++)"pkgVersionMatch::ExpressionMatches(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, char const*)@Base" 0.8.0
@@ -479,15 +435,10 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"TranslationsCopy::CopyTranslations(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::vector<std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > >&, pkgCdromStatus*)@Base" 0.8.0
(c++)"debPackagesIndex::debPackagesIndex(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.0
(c++)"debPackagesIndex::~debPackagesIndex()@Base" 0.8.0
- (c++)"pkgAcqIndexDiffs::QueueNextDiff()@Base" 0.8.0
(c++)"pkgAcqIndexDiffs::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.0
- (c++)"pkgAcqIndexDiffs::Finish(bool)@Base" 0.8.0
(c++)"pkgAcqIndexDiffs::DescURI()@Base" 0.8.0
- (c++)"pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, HashString, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::vector<DiffInfo, std::allocator<DiffInfo> >)@Base" 0.8.0
(c++)"pkgAcqIndexDiffs::~pkgAcqIndexDiffs()@Base" 0.8.0
- (c++)"pkgAcqIndexTrans::Custom600Headers()@Base" 0.8.0
(c++)"pkgAcqIndexTrans::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.0
- (c++)"pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"pkgAcqIndexTrans::~pkgAcqIndexTrans()@Base" 0.8.0
(c++)"pkgAcquireStatus::Done(pkgAcquire::ItemDesc&)@Base" 0.8.0
(c++)"pkgAcquireStatus::Fail(pkgAcquire::ItemDesc&)@Base" 0.8.0
@@ -508,7 +459,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgCacheGenerator::FinishCache(OpProgress*)@Base" 0.8.0
(c++)"pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator&, pkgCacheGenerator::ListParser&)@Base" 0.8.0
(c++)"pkgCacheGenerator::MakeStatusCache(pkgSourceList&, OpProgress*, MMap**, bool)@Base" 0.8.0
- (c++)"pkgCacheGenerator::WriteUniqString(char const*, unsigned int)@Base" 0.8.0
(c++)"pkgCacheGenerator::CreateDynamicMMap(FileFd*, unsigned long)@Base" 0.8.0
(c++)"pkgCacheGenerator::MergeFileProvides(pkgCacheGenerator::ListParser&)@Base" 0.8.0
(c++)"pkgCacheGenerator::MakeOnlyStatusCache(OpProgress*, DynamicMMap**)@Base" 0.8.0
@@ -553,13 +503,8 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"debSrcRecordParser::Restart()@Base" 0.8.0
(c++)"debSrcRecordParser::Binaries()@Base" 0.8.0
(c++)"debSrcRecordParser::~debSrcRecordParser()@Base" 0.8.0
- (c++)"pkgProblemResolver::MakeScores()@Base" 0.8.0
- (c++)"pkgProblemResolver::ResolveByKeep()@Base" 0.8.0
(c++)"pkgProblemResolver::InstallProtect()@Base" 0.8.0
(c++)"pkgProblemResolver::This@Base" 0.8.0
- (c++)"pkgProblemResolver::Resolve(bool)@Base" 0.8.0
- (c++)"pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator)@Base" 0.8.0
- (c++)"pkgProblemResolver::ScoreSort(void const*, void const*)@Base" 0.8.0
(c++)"pkgProblemResolver::pkgProblemResolver(pkgDepCache*)@Base" 0.8.0
(c++)"pkgProblemResolver::~pkgProblemResolver()@Base" 0.8.0
(c++)"debVersioningSystem::CmpFragment(char const*, char const*, char const*, char const*)@Base" 0.8.0
@@ -636,7 +581,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgCache::VerFileIterator::operator++()@Base" 0.8.0
(c++)"pkgCache::DescFileIterator::operator++(int)@Base" 0.8.0
(c++)"pkgCache::DescFileIterator::operator++()@Base" 0.8.0
- (c++)"pkgCache::SingleArchFindPkg(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.0
(c++)"pkgCache::ReMap(bool const&)@Base" 0.8.0
(c++)"pkgCache::Header::Header()@Base" 0.8.0
(c++)"pkgCache::DepType(unsigned char)@Base" 0.8.0
@@ -663,11 +607,8 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"IndexCopy::ChopDirs(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned int)@Base" 0.8.0
(c++)"IndexCopy::GrabFirst(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >&, unsigned int)@Base" 0.8.0
(c++)"SigVerify::CopyAndVerify(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::vector<std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > >&, std::vector<std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > >, std::vector<std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > >)@Base" 0.8.0
- (c++)"SigVerify::CopyMetaIndex(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
- (c++)"SigVerify::Verify(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, indexRecords*)@Base" 0.8.0
(c++)"SigVerify::RunGPGV(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, int const&, int*)@Base" 0.8.0
(c++)"debSystem::Initialize(Configuration&)@Base" 0.8.0
- (c++)"debSystem::CheckUpdates()@Base" 0.8.0
(c++)"debSystem::AddStatusFiles(std::vector<pkgIndexFile*, std::allocator<pkgIndexFile*> >&)@Base" 0.8.0
(c++)"debSystem::ArchiveSupported(char const*)@Base" 0.8.0
(c++)"debSystem::Lock()@Base" 0.8.0
@@ -680,7 +621,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgDPkgPM::WriteHistoryTag(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.0
(c++)"pkgDPkgPM::WriteApportReport(char const*, char const*)@Base" 0.8.0
(c++)"pkgDPkgPM::RunScriptsWithPkgs(char const*)@Base" 0.8.0
- (c++)"pkgDPkgPM::handleDisappearAction(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.0
(c++)"pkgDPkgPM::Go(int)@Base" 0.8.0
(c++)"pkgDPkgPM::Reset()@Base" 0.8.0
(c++)"pkgDPkgPM::Remove(pkgCache::PkgIterator, bool)@Base" 0.8.0
@@ -727,7 +667,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgIndexFile::Type::CreatePkgParser(pkgCache::PkgFileIterator) const@Base" 0.8.0
(c++)"pkgIndexFile::Merge(pkgCacheGenerator&, OpProgress*) const@Base" 0.8.0
(c++)"pkgIndexFile::Merge(pkgCacheGenerator&, OpProgress&) const@Base" 0.8.0
- (c++)"Configuration::FindVector(char const*) const@Base" 0.8.0
(c++)"Configuration::MatchAgainstConfig::Match(char const*) const@Base" 0.8.0
(c++)"Configuration::Find(char const*, char const*) const@Base" 0.8.0
(c++)"Configuration::Item::FullTag(Configuration::Item const*) const@Base" 0.8.0
@@ -774,23 +713,19 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"debSourcesIndex::SourceInfo(pkgSrcRecords::Parser const&, pkgSrcRecords::File const&) const@Base" 0.8.0
(c++)"debSourcesIndex::HasPackages() const@Base" 0.8.0
(c++)"debSourcesIndex::CreateSrcParser() const@Base" 0.8.0
- (c++)"debSourcesIndex::Info(char const*) const@Base" 0.8.0
(c++)"debSourcesIndex::Size() const@Base" 0.8.0
(c++)"debSourcesIndex::Exists() const@Base" 0.8.0
(c++)"debSourcesIndex::GetType() const@Base" 0.8.0
(c++)"debSourcesIndex::Describe(bool) const@Base" 0.8.0
- (c++)"debSourcesIndex::IndexURI(char const*) const@Base" 0.8.0
(c++)"debPackagesIndex::ArchiveURI(std::basic_string<char, std::char_traits<char>, std::allocator<char> >) const@Base" 0.8.0
(c++)"debPackagesIndex::ArchiveInfo(pkgCache::VerIterator) const@Base" 0.8.0
(c++)"debPackagesIndex::FindInCache(pkgCache&) const@Base" 0.8.0
(c++)"debPackagesIndex::HasPackages() const@Base" 0.8.0
- (c++)"debPackagesIndex::Info(char const*) const@Base" 0.8.0
(c++)"debPackagesIndex::Size() const@Base" 0.8.0
(c++)"debPackagesIndex::Merge(pkgCacheGenerator&, OpProgress*) const@Base" 0.8.0
(c++)"debPackagesIndex::Exists() const@Base" 0.8.0
(c++)"debPackagesIndex::GetType() const@Base" 0.8.0
(c++)"debPackagesIndex::Describe(bool) const@Base" 0.8.0
- (c++)"debPackagesIndex::IndexURI(char const*) const@Base" 0.8.0
(c++)"debSrcRecordParser::Maintainer() const@Base" 0.8.0
(c++)"debSrcRecordParser::Package() const@Base" 0.8.0
(c++)"debSrcRecordParser::Section() const@Base" 0.8.0
@@ -798,13 +733,11 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"debTranslationsIndex::GetIndexes(pkgAcquire*) const@Base" 0.8.0
(c++)"debTranslationsIndex::FindInCache(pkgCache&) const@Base" 0.8.0
(c++)"debTranslationsIndex::HasPackages() const@Base" 0.8.0
- (c++)"debTranslationsIndex::Info(char const*) const@Base" 0.8.0
(c++)"debTranslationsIndex::Size() const@Base" 0.8.0
(c++)"debTranslationsIndex::Merge(pkgCacheGenerator&, OpProgress*) const@Base" 0.8.0
(c++)"debTranslationsIndex::Exists() const@Base" 0.8.0
(c++)"debTranslationsIndex::GetType() const@Base" 0.8.0
(c++)"debTranslationsIndex::Describe(bool) const@Base" 0.8.0
- (c++)"debTranslationsIndex::IndexURI(char const*) const@Base" 0.8.0
(c++)"Vendor::GetVendorID() const@Base" 0.8.0
(c++)"Vendor::LookupFingerprint(std::basic_string<char, std::char_traits<char>, std::allocator<char> >) const@Base" 0.8.0
(c++)"pkgCache::DepIterator::AllTargets() const@Base" 0.8.0
@@ -1101,6 +1034,14 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
debVS@Base 0.8.0
pkgLibVersion@Base 0.8.0
pkgVersion@Base 0.8.0
+ (c++)"pkgAcquireStatus::~pkgAcquireStatus()@Base" 0.8.0
+ (c++)"IndexCopy::~IndexCopy()@Base" 0.8.0
+ (c++)"pkgIndexFile::Type::~Type()@Base" 0.8.0
+ (c++)"pkgAcqBaseIndex::~pkgAcqBaseIndex()@Base" 0.8.0
+ (c++)"pkgArchiveCleaner::~pkgArchiveCleaner()@Base" 0.8.0
+ (c++)"typeinfo for pkgArchiveCleaner@Base" 0.8.0
+ (c++)"typeinfo name for pkgArchiveCleaner@Base" 0.8.0
+ (c++)"vtable for pkgArchiveCleaner@Base" 0.8.0
### architecture specific: va_list
(arch=armel armhf|c++)"pkgAcqMethod::PrintStatus(char const*, char const*, std::__va_list&) const@Base" 0.8.15~exp1
(arch=i386 hurd-i386 kfreebsd-i386 ppc64|c++)"pkgAcqMethod::PrintStatus(char const*, char const*, char*&) const@Base" 0.8.15~exp1
@@ -1140,7 +1081,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(arch=!x32|c++)"RFC1123StrToTime(char const*, long&)@Base" 0.8.0
(arch=x32|c++)"RFC1123StrToTime(char const*, long long&)@Base" 0.8.0
###
- (c++)"Configuration::MatchAgainstConfig::clearPatterns()@Base" 0.8.1
(c++)"CreateAPTDirectoryIfNeeded(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.2
(c++)"FileFd::FileSize()@Base" 0.8.8
(c++)"Base256ToNum(char const*, unsigned long&, unsigned int)@Base" 0.8.11
@@ -1149,27 +1089,15 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"RealFileExists(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.8.11
(c++)"StripEpoch(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.11
(c++)"pkgAcqIndex::Init(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.11
- (c++)"pkgAcqIndex::pkgAcqIndex(pkgAcquire*, IndexTarget const*, HashString const&, indexRecords const*)@Base" 0.8.11
(c++)"pkgTagSection::FindFlag(unsigned long&, unsigned long, char const*, char const*)@Base" 0.8.11
- (c++)"pkgAcqSubIndex::ParseIndex(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.11
- (c++)"pkgAcqSubIndex::Custom600Headers()@Base" 0.8.11
- (c++)"pkgAcqSubIndex::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.11
- (c++)"pkgAcqSubIndex::DescURI()@Base" 0.8.11
- (c++)"pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, HashString const&)@Base" 0.8.11
- (c++)"pkgAcqSubIndex::~pkgAcqSubIndex()@Base" 0.8.11
(c++)"pkgAcqMetaClearSig::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.11
(c++)"pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::vector<IndexTarget*, std::allocator<IndexTarget*> > const*, indexRecords*)@Base" 0.8.11
(c++)"pkgAcqMetaClearSig::~pkgAcqMetaClearSig()@Base" 0.8.11
- (c++)"pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire*, IndexTarget const*, HashString const&, indexRecords const*)@Base" 0.8.11
(c++)"IndexTarget::IsOptional() const@Base" 0.8.11
- (c++)"IndexTarget::IsSubIndex() const@Base" 0.8.11
(c++)"debReleaseIndex::TranslationIndexURI(char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) const@Base" 0.8.11
(c++)"debReleaseIndex::TranslationIndexURISuffix(char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) const@Base" 0.8.11
- (c++)"typeinfo for pkgAcqSubIndex@Base" 0.8.11
(c++)"typeinfo for pkgAcqMetaClearSig@Base" 0.8.11
- (c++)"typeinfo name for pkgAcqSubIndex@Base" 0.8.11
(c++)"typeinfo name for pkgAcqMetaClearSig@Base" 0.8.11
- (c++)"vtable for pkgAcqSubIndex@Base" 0.8.11
(c++)"vtable for pkgAcqMetaClearSig@Base" 0.8.11
(c++)"FindMountPointForDevice(char const*)@Base" 0.8.12
(c++)"pkgUdevCdromDevices::ScanForRemovable(bool)@Base" 0.8.12
@@ -1177,10 +1105,7 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"APT::Configuration::Compressor::~Compressor()@Base" 0.8.12
(c++)"APT::Configuration::getCompressors(bool)@Base" 0.8.12
(c++)"APT::Configuration::getCompressorExtensions()@Base" 0.8.12
- (c++)"APT::Configuration::setDefaultConfigurationForCompressors()@Base" 0.8.12
- (c++)"pkgAcqMetaClearSig::Custom600Headers()@Base" 0.8.13
(c++)"debListParser::NewProvidesAllArch(pkgCache::VerIterator&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.13.2
- (c++)"pkgDepCache::IsModeChangeOk(pkgDepCache::ModeList, pkgCache::PkgIterator const&, unsigned long, bool)@Base" 0.8.13.2
(c++)"pkgCache::DepIterator::IsNegative() const@Base" 0.8.15~exp1
(c++)"Configuration::CndSet(char const*, int)@Base" 0.8.15.3
(c++)"pkgProblemResolver::InstOrNewPolicyBroken(pkgCache::PkgIterator)@Base" 0.8.15.3
@@ -1253,9 +1178,6 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"SHA1Summation::Result()@Base" 0.8.16~exp2
(c++)"SHA256Summation::Add(unsigned char const*, unsigned long long)@Base" 0.8.16~exp6
(c++)"SHA512Summation::Add(unsigned char const*, unsigned long long)@Base" 0.8.16~exp6
- (c++)"debRecordParser::SHA512Hash()@Base" 0.8.16~exp2
- (c++)"pkgRecords::Parser::SHA512Hash()@Base" 0.8.16~exp6
- (c++)"Hashes::AddFD(int, unsigned long long, bool, bool, bool, bool)@Base" 0.8.16~exp6
(c++)"SummationImplementation::AddFD(int, unsigned long long)@Base" 0.8.16~exp6
(c++)"typeinfo for MD5Summation@Base" 0.8.16~exp6
(c++)"typeinfo for SHA1Summation@Base" 0.8.16~exp6
@@ -1281,11 +1203,8 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"OpProgress::OverallProgress(unsigned long long, unsigned long long, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.8.16~exp6
(c++)"OpProgress::Progress(unsigned long long)@Base" 0.8.16~exp6
(c++)"SourceCopy::GetFile(std::basic_string<char, std::char_traits<char>, std::allocator<char> >&, unsigned long long&)@Base" 0.8.16~exp6
- (c++)"pkgAcqFile::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.16~exp6
- (c++)"pkgAcqFile::pkgAcqFile(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)@Base" 0.8.16~exp6
(c++)"pkgAcquire::UriIterator::~UriIterator()@Base" 0.8.16~exp6
(c++)"pkgAcquire::MethodConfig::~MethodConfig()@Base" 0.8.16~exp6
- (c++)"pkgAcquire::Item::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.16~exp6
(c++)"pkgAcquire::Item::Start(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long)@Base" 0.8.16~exp6
(c++)"pkgRecords::Parser::RecordField(char const*)@Base" 0.8.16~exp6
(c++)"pkgTagFile::Jump(pkgTagSection&, unsigned long long)@Base" 0.8.16~exp6
@@ -1293,23 +1212,13 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgTagFile::pkgTagFile(FileFd*, unsigned long long)@Base" 0.8.16~exp6
(c++)"DynamicMMap::RawAllocate(unsigned long long, unsigned long)@Base" 0.8.16~exp6
(c++)"PackageCopy::GetFile(std::basic_string<char, std::char_traits<char>, std::allocator<char> >&, unsigned long long&)@Base" 0.8.16~exp6
- (c++)"pkgAcqIndex::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.16~exp6
- (c++)"indexRecords::parseSumData(char const*&, char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >&, unsigned long long&)@Base" 0.8.16~exp6
- (c++)"pkgAcqArchive::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.16~exp6
(c++)"pkgTagSection::~pkgTagSection()@Base" 0.8.16~exp6
- (c++)"pkgAcqSubIndex::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.16~exp6
(c++)"debRecordParser::RecordField(char const*)@Base" 0.8.16~exp6
(c++)"debReleaseIndex::SetTrusted(bool)@Base" 0.8.16~exp6
(c++)"debReleaseIndex::debReleaseIndex(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)@Base" 0.8.16~exp6
- (c++)"pkgAcqMetaIndex::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.16~exp6
- (c++)"pkgAcqIndexDiffs::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.16~exp6
- (c++)"pkgAcqMetaSig::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.16~exp6
- (c++)"pkgAcqDiffIndex::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.8.16~exp6
(c++)"pkgAcquireStatus::Fetched(unsigned long long, unsigned long long)@Base" 0.8.16~exp6
(c++)"PreferenceSection::~PreferenceSection()@Base" 0.8.16~exp6
(c++)"pkgCacheGenerator::NewDescription(pkgCache::DescIterator&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, HashSumValue<128> const&, unsigned int)@Base" 0.8.16~exp6
- (c++)"pkgProblemResolver::ResolveInternal(bool)@Base" 0.8.16~exp6
- (c++)"pkgProblemResolver::ResolveByKeepInternal()@Base" 0.8.16~exp6
(c++)"FileFd::Read(void*, unsigned long long, unsigned long long*)@Base" 0.8.16~exp6
(c++)"FileFd::Seek(unsigned long long)@Base" 0.8.16~exp6
(c++)"FileFd::Skip(unsigned long long)@Base" 0.8.16~exp6
@@ -1362,14 +1271,11 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"FileFd::Open(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned int, APT::Configuration::Compressor const&, unsigned long)@Base" 0.8.16~exp9
(c++)"FileFd::ReadLine(char*, unsigned long long)@Base" 0.8.16~exp9
(c++)"SummationImplementation::AddFD(FileFd&, unsigned long long)@Base" 0.8.16~exp9
- (c++)"Hashes::AddFD(FileFd&, unsigned long long, bool, bool, bool, bool)@Base" 0.8.16~exp9
(c++|optional=deprecated,previous-inline)"FileFd::gzFd()@Base" 0.8.0
### CacheSet rework: making them real containers breaks bigtime the API (for the CacheSetHelper)
(c++)"APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::const_iterator::getPkg() const@Base" 0.8.16~exp9
- (c++)"APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::getConstructor() const@Base" 0.8.16~exp9
(c++)"APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::empty() const@Base" 0.8.16~exp9
(c++)"APT::PackageContainer<std::list<pkgCache::PkgIterator, std::allocator<pkgCache::PkgIterator> > >::const_iterator::getPkg() const@Base" 0.8.16~exp9
- (c++)"APT::PackageContainer<std::list<pkgCache::PkgIterator, std::allocator<pkgCache::PkgIterator> > >::getConstructor() const@Base" 0.8.16~exp9
(c++)"APT::PackageContainer<std::list<pkgCache::PkgIterator, std::allocator<pkgCache::PkgIterator> > >::empty() const@Base" 0.8.16~exp9
(c++)"APT::VersionContainer<std::list<pkgCache::VerIterator, std::allocator<pkgCache::VerIterator> > >::empty() const@Base" 0.8.16~exp9
(c++)"APT::VersionContainer<std::list<pkgCache::VerIterator, std::allocator<pkgCache::VerIterator> > >::iterator::getVer() const@Base" 0.8.16~exp9
@@ -1382,23 +1288,12 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"APT::CacheSetHelper::showSelectedVersion(pkgCache::PkgIterator const&, pkgCache::VerIterator, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)@Base" 0.8.16~exp9
(c++)"APT::CacheSetHelper::canNotFindCandInstVer(APT::VersionContainerInterface*, pkgCacheFile&, pkgCache::PkgIterator const&)@Base" 0.8.16~exp9
(c++)"APT::CacheSetHelper::canNotFindInstCandVer(APT::VersionContainerInterface*, pkgCacheFile&, pkgCache::PkgIterator const&)@Base" 0.8.16~exp9
- (c++)"APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::setConstructor(APT::PackageContainerInterface::Constructor const&)@Base" 0.8.16~exp9
(c++)"APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::clear()@Base" 0.8.16~exp9
(c++)"APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::insert(pkgCache::PkgIterator const&)@Base" 0.8.16~exp9
- (c++)"APT::PackageContainer<std::list<pkgCache::PkgIterator, std::allocator<pkgCache::PkgIterator> > >::setConstructor(APT::PackageContainerInterface::Constructor const&)@Base" 0.8.16~exp9
(c++)"APT::PackageContainer<std::list<pkgCache::PkgIterator, std::allocator<pkgCache::PkgIterator> > >::clear()@Base" 0.8.16~exp9
(c++)"APT::PackageContainer<std::list<pkgCache::PkgIterator, std::allocator<pkgCache::PkgIterator> > >::insert(pkgCache::PkgIterator const&)@Base" 0.8.16~exp9
(c++)"APT::VersionContainer<std::list<pkgCache::VerIterator, std::allocator<pkgCache::VerIterator> > >::clear()@Base" 0.8.16~exp9
(c++)"APT::VersionContainer<std::list<pkgCache::VerIterator, std::allocator<pkgCache::VerIterator> > >::insert(pkgCache::VerIterator const&)@Base" 0.8.16~exp9
- (c++)"APT::PackageContainerInterface::FromString(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
- (c++)"APT::PackageContainerInterface::FromCommandLine(APT::PackageContainerInterface*, pkgCacheFile&, char const**, APT::CacheSetHelper&)@Base" 0.8.16~exp9
- (c++)"APT::PackageContainerInterface::FromModifierCommandLine(unsigned short&, APT::PackageContainerInterface*, pkgCacheFile&, char const*, std::list<APT::PackageContainerInterface::Modifier, std::allocator<APT::PackageContainerInterface::Modifier> > const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
- (c++)"APT::PackageContainerInterface::FromName(pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
- (c++)"APT::PackageContainerInterface::FromTask(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::CacheSetHelper&)@Base" 0.8.16~exp9
- (c++)"APT::PackageContainerInterface::FromRegEx(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::CacheSetHelper&)@Base" 0.8.16~exp9
- (c++)"APT::VersionContainerInterface::FromString(APT::VersionContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::VersionContainerInterface::Version const&, APT::CacheSetHelper&, bool)@Base" 0.8.16~exp9
- (c++)"APT::VersionContainerInterface::FromPackage(APT::VersionContainerInterface*, pkgCacheFile&, pkgCache::PkgIterator const&, APT::VersionContainerInterface::Version const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
- (c++)"APT::VersionContainerInterface::FromCommandLine(APT::VersionContainerInterface*, pkgCacheFile&, char const**, APT::VersionContainerInterface::Version const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
(c++)"APT::VersionContainerInterface::getCandidateVer(pkgCacheFile&, pkgCache::PkgIterator const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
(c++)"APT::VersionContainerInterface::getInstalledVer(pkgCacheFile&, pkgCache::PkgIterator const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
(c++)"APT::VersionContainerInterface::FromModifierCommandLine(unsigned short&, APT::VersionContainerInterface*, pkgCacheFile&, char const*, std::list<APT::VersionContainerInterface::Modifier, std::allocator<APT::VersionContainerInterface::Modifier> > const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
@@ -1469,10 +1364,8 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"APT::Progress::PackageManagerText::~PackageManagerText()@Base" 0.9.13~exp1
(c++)"APT::Progress::PackageManagerText::StatusChanged(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned int, unsigned int, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.9.13~exp1
(c++)"APT::String::Strip(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 0.9.13~exp1
- (c++)"APT::Upgrade::Upgrade(pkgDepCache&, int)@Base" 0.9.13~exp1
(c++)"pkgDPkgPM::BuildPackagesProgressMap()@Base" 0.9.13~exp1
(c++)"pkgDPkgPM::DoDpkgStatusFd(int)@Base" 0.9.13~exp1
- (c++)"pkgDPkgPM::GoNoABIBreak(APT::Progress::PackageManager*)@Base" 0.9.13~exp1
(c++)"pkgDPkgPM::ProcessDpkgStatusLine(char*)@Base" 0.9.13~exp1
(c++)"pkgDPkgPM::StartPtyMagic()@Base" 0.9.13~exp1
(c++)"pkgDPkgPM::StopPtyMagic()@Base" 0.9.13~exp1
@@ -1493,14 +1386,11 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"vtable for APT::Progress::PackageManagerText@Base" 0.9.13~exp1
(c++)"APT::Progress::PackageManagerFancy::instances@Base" 0.9.14.2
(c++)"APT::Progress::PackageManagerFancy::Start(int)@Base" 0.9.14.2
- (c++)"APT::Progress::PackageManagerFancy::staticSIGWINCH(int)@Base" 0.9.14.2
(c++)"APT::Progress::PackageManager::Start(int)@Base" 0.9.14.2
### client-side merged pdiffs
(c++)"pkgAcqIndexMergeDiffs::DescURI()@Base" 0.9.14.3~exp1
- (c++)"pkgAcqIndexMergeDiffs::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.9.14.3~exp1
(c++)"pkgAcqIndexMergeDiffs::Failed(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, pkgAcquire::MethodConfig*)@Base" 0.9.14.3~exp1
(c++)"pkgAcqIndexMergeDiffs::~pkgAcqIndexMergeDiffs()@Base" 0.9.14.3~exp1
- (c++)"pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, HashString const&, DiffInfo const&, std::vector<pkgAcqIndexMergeDiffs*, std::allocator<pkgAcqIndexMergeDiffs*> > const*)@Base" 0.9.14.3~exp1
(c++)"typeinfo for pkgAcqIndexMergeDiffs@Base" 0.9.14.3~exp1
(c++)"typeinfo name for pkgAcqIndexMergeDiffs@Base" 0.9.14.3~exp1
(c++)"vtable for pkgAcqIndexMergeDiffs@Base" 0.9.14.3~exp1
@@ -1508,12 +1398,310 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"pkgSourceList::ParseFileDeb822(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.9.14.3~exp1
(c++)"pkgSourceList::ParseFileOldStyle(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 0.9.14.3~exp1
(c++)"pkgSourceList::Type::ParseStanza(std::vector<metaIndex*, std::allocator<metaIndex*> >&, pkgTagSection&, int, FileFd&)@Base" 0.9.14.3~exp1
+### install foo.deb support
+ (c++)"debDebFileMetaIndex::ArchiveURI(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) const@Base" 1.1~exp1
+ (c++)"debDebFileMetaIndex::~debDebFileMetaIndex()@Base" 1.1~exp1
+ (c++)"debDebFileMetaIndex::debDebFileMetaIndex(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp1
+ (c++)"debDebFileMetaIndex::GetIndexes(pkgAcquire*, bool const&) const@Base" 1.1~exp1
+ (c++)"debDebFileMetaIndex::GetIndexFiles()@Base" 1.1~exp1
+ (c++)"debDebFileMetaIndex::IsTrusted() const@Base" 1.1~exp1
+ (c++)"debDebFileParser::~debDebFileParser()@Base" 1.1~exp1
+ (c++)"debDebFileParser::debDebFileParser(FileFd*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp1
+ (c++)"debDebFileParser::UsePackage(pkgCache::PkgIterator&, pkgCache::VerIterator&)@Base" 1.1~exp1
+ (c++)"debDebFileRecordParser::~debDebFileRecordParser()@Base" 1.1~exp1
+ (c++)"debDebFileRecordParser::FileName()@Base" 1.1~exp1
+ (c++)"debDebianSourceDirIndex::~debDebianSourceDirIndex()@Base" 1.1~exp1
+ (c++)"debDebianSourceDirIndex::GetType() const@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::ArchiveURI(std::basic_string<char, std::char_traits<char>, std::allocator<char> >) const@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::~debDebPkgFileIndex()@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::debDebPkgFileIndex(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::Describe(bool) const@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::Exists() const@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::FindInCache(pkgCache&) const@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::GetType() const@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::HasPackages() const@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::Merge(pkgCacheGenerator&, OpProgress*) const@Base" 1.1~exp1
+ (c++)"debDebPkgFileIndex::Size() const@Base" 1.1~exp1
+ (c++)"debDscFileIndex::CreateSrcParser() const@Base" 1.1~exp1
+ (c++)"debDscFileIndex::~debDscFileIndex()@Base" 1.1~exp1
+ (c++)"debDscFileIndex::debDscFileIndex(std::basic_string<char, std::char_traits<char>, std::allocator<char> >&)@Base" 1.1~exp1
+ (c++)"debDscFileIndex::Describe(bool) const@Base" 1.1~exp1
+ (c++)"debDscFileIndex::Exists() const@Base" 1.1~exp1
+ (c++)"debDscFileIndex::GetType() const@Base" 1.1~exp1
+ (c++)"debDscFileIndex::HasPackages() const@Base" 1.1~exp1
+ (c++)"debDscFileIndex::Size() const@Base" 1.1~exp1
+ (c++)"debDscRecordParser::~debDscRecordParser()@Base" 1.1~exp1
+ (c++)"debDscRecordParser::debDscRecordParser(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, pkgIndexFile const*)@Base" 1.1~exp1
+ (c++)"debIFTypeDebianSourceDir::CreateSrcPkgParser(std::basic_string<char, std::char_traits<char>, std::allocator<char> >) const@Base" 1.1~exp1
+ (c++)"debIFTypeDebianSourceDir::~debIFTypeDebianSourceDir()@Base" 1.1~exp1
+ (c++)"debIFTypeDebPkgFile::CreatePkgParser(pkgCache::PkgFileIterator) const@Base" 1.1~exp1
+ (c++)"debIFTypeDebPkgFile::~debIFTypeDebPkgFile()@Base" 1.1~exp1
+ (c++)"debIFTypeDscFile::CreateSrcPkgParser(std::basic_string<char, std::char_traits<char>, std::allocator<char> >) const@Base" 1.1~exp1
+ (c++)"debIFTypeDscFile::~debIFTypeDscFile()@Base" 1.1~exp1
+ (c++)"debListParser::AvailableDescriptionLanguages()@Base" 1.1~exp1
+ (c++)"debListParser::Description(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp1
+ (c++)"debListParser::SameVersion(unsigned short, pkgCache::VerIterator const&)@Base" 1.1~exp1
+ (c++)"debReleaseIndex::LocalFileName() const@Base" 1.1~exp1
+ (c++)"debSLTypeDebFile::CreateItem(std::vector<metaIndex*, std::allocator<metaIndex*> >&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::map<std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::less<std::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::basic_string<char, std::char_traits<char>, std::allocator<char> > > > > const&) const@Base" 1.1~exp1
+ (c++)"debSLTypeDebFile::~debSLTypeDebFile()@Base" 1.1~exp1
+ (c++)"flAbsPath(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp1
+ (c++)"GetTempFile(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)@Base" 1.1~exp1
+ (c++)"pkgIndexFile::Type::CreateSrcPkgParser(std::basic_string<char, std::char_traits<char>, std::allocator<char> >) const@Base" 1.1~exp1
+ (c++)"metaIndex::LocalFileName() const@Base" 1.1~exp1
+ (c++)"metaIndex::~metaIndex()@Base" 1.1~exp1
+ (c++)"typeinfo for debDebFileMetaIndex@Base" 1.1~exp1
+ (c++)"typeinfo for debDebFileParser@Base" 1.1~exp1
+ (c++)"typeinfo for debDebFileRecordParser@Base" 1.1~exp1
+ (c++)"typeinfo for debDebianSourceDirIndex@Base" 1.1~exp1
+ (c++)"typeinfo for debDebPkgFileIndex@Base" 1.1~exp1
+ (c++)"typeinfo for debDscFileIndex@Base" 1.1~exp1
+ (c++)"typeinfo for debDscRecordParser@Base" 1.1~exp1
+ (c++)"typeinfo for debIFTypeDebianSourceDir@Base" 1.1~exp1
+ (c++)"typeinfo for debIFTypeDebPkgFile@Base" 1.1~exp1
+ (c++)"typeinfo for debIFTypeDscFile@Base" 1.1~exp1
+ (c++)"typeinfo for debSLTypeDebFile@Base" 1.1~exp1
+ (c++)"typeinfo name for debDebFileMetaIndex@Base" 1.1~exp1
+ (c++)"typeinfo name for debDebFileParser@Base" 1.1~exp1
+ (c++)"typeinfo name for debDebFileRecordParser@Base" 1.1~exp1
+ (c++)"typeinfo name for debDebianSourceDirIndex@Base" 1.1~exp1
+ (c++)"typeinfo name for debDebPkgFileIndex@Base" 1.1~exp1
+ (c++)"typeinfo name for debDscFileIndex@Base" 1.1~exp1
+ (c++)"typeinfo name for debDscRecordParser@Base" 1.1~exp1
+ (c++)"typeinfo name for debIFTypeDebianSourceDir@Base" 1.1~exp1
+ (c++)"typeinfo name for debIFTypeDebPkgFile@Base" 1.1~exp1
+ (c++)"typeinfo name for debIFTypeDscFile@Base" 1.1~exp1
+ (c++)"typeinfo name for debSLTypeDebFile@Base" 1.1~exp1
+ (c++)"vtable for debDebFileMetaIndex@Base" 1.1~exp1
+ (c++)"vtable for debDebFileParser@Base" 1.1~exp1
+ (c++)"vtable for debDebFileRecordParser@Base" 1.1~exp1
+ (c++)"vtable for debDebianSourceDirIndex@Base" 1.1~exp1
+ (c++)"vtable for debDebPkgFileIndex@Base" 1.1~exp1
+ (c++)"vtable for debDscFileIndex@Base" 1.1~exp1
+ (c++)"vtable for debDscRecordParser@Base" 1.1~exp1
+ (c++)"vtable for debIFTypeDebianSourceDir@Base" 1.1~exp1
+ (c++)"vtable for debIFTypeDebPkgFile@Base" 1.1~exp1
+ (c++)"vtable for debIFTypeDscFile@Base" 1.1~exp1
+ (c++)"vtable for debSLTypeDebFile@Base" 1.1~exp1
+ _apt_DebFileType@Base 1.1~exp1
+### CacheFilter functors
+ (c++)"APT::CacheFilter::ANDMatcher::AND(APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::ANDMatcher(APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::ANDMatcher(APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::ANDMatcher(APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::ANDMatcher(APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::ANDMatcher(APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::ANDMatcher()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::~ANDMatcher()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::operator()(pkgCache::GrpIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::operator()(pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ANDMatcher::operator()(pkgCache::VerIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::FalseMatcher::~FalseMatcher()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::FalseMatcher::operator()(pkgCache::GrpIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::FalseMatcher::operator()(pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::FalseMatcher::operator()(pkgCache::VerIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::Matcher::~Matcher()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::NOTMatcher::NOTMatcher(APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::NOTMatcher::~NOTMatcher()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::NOTMatcher::operator()(pkgCache::GrpIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::NOTMatcher::operator()(pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::NOTMatcher::operator()(pkgCache::VerIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::operator()(pkgCache::GrpIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::operator()(pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::operator()(pkgCache::VerIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::OR(APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::ORMatcher(APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::ORMatcher(APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::ORMatcher(APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::ORMatcher(APT::CacheFilter::Matcher*, APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::ORMatcher(APT::CacheFilter::Matcher*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::~ORMatcher()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::ORMatcher::ORMatcher()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::PackageIsNewInstall::operator()(pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::PackageIsNewInstall::~PackageIsNewInstall()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::PackageIsNewInstall::PackageIsNewInstall(pkgCacheFile*)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::PackageMatcher::operator()(pkgCache::GrpIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::PackageMatcher::operator()(pkgCache::VerIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::PackageMatcher::~PackageMatcher()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::PackageNameMatchesFnmatch::~PackageNameMatchesFnmatch()@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::PackageNameMatchesFnmatch::PackageNameMatchesFnmatch(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::TrueMatcher::operator()(pkgCache::GrpIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::TrueMatcher::operator()(pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::TrueMatcher::operator()(pkgCache::VerIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheFilter::TrueMatcher::~TrueMatcher()@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::ANDMatcher@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::FalseMatcher@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::Matcher@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::NOTMatcher@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::ORMatcher@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::PackageArchitectureMatchesSpecification@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::PackageIsNewInstall@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::PackageMatcher@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::PackageNameMatchesFnmatch@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::PackageNameMatchesRegEx@Base" 1.1~exp4
+ (c++)"typeinfo for APT::CacheFilter::TrueMatcher@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::ANDMatcher@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::FalseMatcher@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::Matcher@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::NOTMatcher@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::ORMatcher@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::PackageArchitectureMatchesSpecification@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::PackageIsNewInstall@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::PackageMatcher@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::PackageNameMatchesFnmatch@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::PackageNameMatchesRegEx@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::CacheFilter::TrueMatcher@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::ANDMatcher@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::FalseMatcher@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::Matcher@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::NOTMatcher@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::ORMatcher@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::PackageArchitectureMatchesSpecification@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::PackageIsNewInstall@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::PackageMatcher@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::PackageNameMatchesFnmatch@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::PackageNameMatchesRegEx@Base" 1.1~exp4
+ (c++)"vtable for APT::CacheFilter::TrueMatcher@Base" 1.1~exp4
+### cacheset redesign (API, but not ABI compatible)
+# (c++|optional=inline)"APT::PackageContainerInterface::FromCommandLine(APT::PackageContainerInterface*, pkgCacheFile&, char const**, APT::CacheSetHelper&)@Base" 0.8.16~exp9
+# (c++|optional=inline)"APT::PackageContainerInterface::FromModifierCommandLine(unsigned short&, APT::PackageContainerInterface*, pkgCacheFile&, char const*, std::list<APT::PackageContainerInterface::Modifier, std::allocator<APT::PackageContainerInterface::Modifier> > const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
+# (c++|optional=inline)"APT::PackageContainerInterface::FromName(pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
+# (c++|optional=inline)"APT::PackageContainerInterface::FromTask(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::CacheSetHelper&)@Base" 0.8.16~exp9
+# (c++|optional=inline)"APT::PackageContainerInterface::FromRegEx(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::CacheSetHelper&)@Base" 0.8.16~exp9
+# (c++|optional=inline)"APT::VersionContainerInterface::FromString(APT::VersionContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::VersionContainerInterface::Version const&, APT::CacheSetHelper&, bool)@Base" 0.8.16~exp9
+# (c++|optional=inline)"APT::VersionContainerInterface::FromPackage(APT::VersionContainerInterface*, pkgCacheFile&, pkgCache::PkgIterator const&, APT::VersionContainerInterface::Version const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
+# (c++|optional=inline)"APT::VersionContainerInterface::FromCommandLine(APT::VersionContainerInterface*, pkgCacheFile&, char const**, APT::VersionContainerInterface::Version const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
+# (c++)"APT::PackageContainerInterface::FromString(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, APT::CacheSetHelper&)@Base" 0.8.16~exp9
+# (c++)"APT::PackageContainerInterface::FromGroup(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::CacheSetHelper&)@Base" 0.9.7
+# (c++)"APT::PackageContainerInterface::FromFnmatch(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::CacheSetHelper&)@Base" 0.9.11
+ (c++)"APT::CacheSetHelper::canNotFindFnmatch(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::canNotFindPackage(APT::CacheSetHelper::PkgSelector, APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::canNotFindVersion(APT::CacheSetHelper::VerSelector, APT::VersionContainerInterface*, pkgCacheFile&, pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::canNotGetCandInstVer(pkgCacheFile&, pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::canNotGetInstCandVer(pkgCacheFile&, pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::canNotGetVersion(APT::CacheSetHelper::VerSelector, pkgCacheFile&, pkgCache::PkgIterator const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::PackageFrom(APT::CacheSetHelper::PkgSelector, APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::PackageFromCommandLine(APT::PackageContainerInterface*, pkgCacheFile&, char const**)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::PackageFromFnmatch(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::PackageFromModifierCommandLine(unsigned short&, APT::PackageContainerInterface*, pkgCacheFile&, char const*, std::list<APT::CacheSetHelper::PkgModifier, std::allocator<APT::CacheSetHelper::PkgModifier> > const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::PackageFromName(pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::PackageFromPackageName(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::PackageFromRegEx(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::PackageFromString(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::PackageFromTask(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::showFnmatchSelection(pkgCache::PkgIterator const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::showPackageSelection(pkgCache::PkgIterator const&, APT::CacheSetHelper::PkgSelector, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"APT::CacheSetHelper::showVersionSelection(pkgCache::PkgIterator const&, pkgCache::VerIterator const&, APT::CacheSetHelper::VerSelector, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"APT::VersionContainerInterface::FromCommandLine(APT::VersionContainerInterface*, pkgCacheFile&, char const**, APT::CacheSetHelper::VerSelector, APT::CacheSetHelper&)@Base" 1.1~exp4
+ (c++)"APT::VersionContainerInterface::FromPackage(APT::VersionContainerInterface*, pkgCacheFile&, pkgCache::PkgIterator const&, APT::CacheSetHelper::VerSelector, APT::CacheSetHelper&)@Base" 1.1~exp4
+ (c++)"APT::VersionContainerInterface::FromString(APT::VersionContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::CacheSetHelper::VerSelector, APT::CacheSetHelper&, bool)@Base" 1.1~exp4
+### all the hashes are belong to us
+# (c++|optional=inline)"Hashes::AddFD(int, unsigned long long, bool, bool, bool, bool)@Base" 0.8.16~exp6
+# (c++|optional=inline)"Hashes::AddFD(FileFd&, unsigned long long, bool, bool, bool, bool)@Base" 0.8.16~exp9
+# (c++|optional=inline)"pkgRecords::Parser::MD5Hash()@Base" 0.8.0
+# (c++|optional=inline)"pkgRecords::Parser::SHA1Hash()@Base" 0.8.0
+# (c++|optional=inline)"pkgRecords::Parser::SHA256Hash()@Base" 0.8.0
+# (c++|optional=inline)"pkgRecords::Parser::SHA512Hash()@Base" 0.8.16~exp6
+ (c++)"debRecordParser::Hashes() const@Base" 1.1~exp1
+ (c++)"debRecordParser::LongDesc(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp1
+ (c++)"debRecordParser::ShortDesc(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp1
+ (c++)"Hashes::AddFD(FileFd&, unsigned long long, unsigned int)@Base" 1.1~exp1
+ (c++)"Hashes::AddFD(int, unsigned long long, unsigned int)@Base" 1.1~exp1
+ (c++)"Hashes::Add(unsigned char const*, unsigned long long, unsigned int)@Base" 1.1~exp1
+ (c++)"Hashes::GetHashStringList()@Base" 1.1~exp1
+ (c++)"Hashes::Hashes()@Base" 1.1~exp1
+ (c++)"Hashes::~Hashes()@Base" 1.1~exp1
+ (c++)"HashStringList::find(char const*) const@Base" 1.1~exp1
+ (c++)"HashStringList::operator==(HashStringList const&) const@Base" 1.1~exp1
+ (c++)"HashStringList::operator!=(HashStringList const&) const@Base" 1.1~exp1
+ (c++)"HashStringList::push_back(HashString const&)@Base" 1.1~exp1
+ (c++)"HashStringList::supported(char const*)@Base" 1.1~exp1
+ (c++)"HashStringList::usable() const@Base" 1.1~exp1
+ (c++)"HashStringList::VerifyFile(std::basic_string<char, std::char_traits<char>, std::allocator<char> >) const@Base" 1.1~exp1
+ (c++)"HashString::operator==(HashString const&) const@Base" 1.1~exp1
+ (c++)"HashString::operator!=(HashString const&) const@Base" 1.1~exp1
+ (c++)"indexRecords::GetSupportsAcquireByHash() const@Base" 1.1~exp1
+ (c++)"pkgAcqArchive::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
+ (c++)"pkgAcqArchive::IsTrusted() const@Base" 1.1~exp1
+ (c++)"pkgAcqDiffIndex::Custom600Headers() const@Base" 1.1~exp1
+ (c++)"pkgAcqDiffIndex::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
+ (c++)"pkgAcqFile::Custom600Headers() const@Base" 1.1~exp1
+ (c++)"pkgAcqFile::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
+ (c++)"pkgAcqFile::pkgAcqFile(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, HashStringList const&, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)@Base" 1.1~exp1
+ (c++)"pkgAcqIndex::Custom600Headers() const@Base" 1.1~exp1
+ (c++)"pkgAcqIndexDiffs::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
+ (c++)"pkgAcqIndex::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
+ (c++)"pkgAcqIndex::InitByHashIfNeeded(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp1
+ (c++)"pkgAcqIndexMergeDiffs::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
+ (c++)"pkgAcqIndexTrans::Custom600Headers() const@Base" 1.1~exp1
+ (c++)"pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp1
+ (c++)"pkgAcqMetaClearSig::Custom600Headers() const@Base" 1.1~exp1
+ (c++)"pkgAcqMetaIndex::Custom600Headers() const@Base" 1.1~exp1
+ (c++)"pkgAcqMetaIndex::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
+ (c++)"pkgAcqMetaSig::Custom600Headers() const@Base" 1.1~exp1
+ (c++)"pkgAcqMetaSig::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
+ (c++)"pkgAcqMethod::DropPrivsOrDie()@Base" 1.1~exp1
+ (c++)"pkgAcquire::Item::Custom600Headers() const@Base" 1.1~exp1
+ (c++)"pkgAcquire::Item::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
+ (c++)"pkgAcquire::Item::IsTrusted() const@Base" 1.1~exp1
+ (c++)"pkgRecords::Parser::Hashes() const@Base" 1.1~exp1
+ (c++)"pkgRecords::Parser::LongDesc(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp1
+ (c++)"pkgRecords::Parser::ShortDesc(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp1
+ (c++)"typeinfo for Hashes@Base" 1.1~exp1
+ (c++)"typeinfo name for Hashes@Base" 1.1~exp1
+ (c++)"vtable for Hashes@Base" 1.1~exp1
+ (c++)"typeinfo for pkgAcqBaseIndex@Base" 1.1~exp1
+ (c++)"typeinfo name for pkgAcqBaseIndex@Base" 1.1~exp1
+ (c++)"vtable for pkgAcqBaseIndex@Base" 1.1~exp1
+ (c++)"DiffInfo::DiffInfo(DiffInfo const&)@Base" 1.1~exp4
+### more transactional update
+ (c++)"pkgAcqBaseIndex::VerifyHashByMetaKey(HashStringList const&)@Base" 1.1~exp4
+ (c++)"pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire*, pkgAcqMetaBase*, IndexTarget const*, HashStringList const&, indexRecords*)@Base" 1.1~exp4
+ (c++)"pkgAcqIndex::AutoSelectCompression()@Base" 1.1~exp4
+ (c++)"pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire*, pkgAcqMetaBase*, IndexTarget const*, HashStringList const&, indexRecords*, std::vector<DiffInfo, std::allocator<DiffInfo> >)@Base" 1.1~exp4
+ (c++)"pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire*, pkgAcqMetaBase*, IndexTarget const*, HashStringList const&, indexRecords*, DiffInfo const&, std::vector<pkgAcqIndexMergeDiffs*, std::allocator<pkgAcqIndexMergeDiffs*> > const*)@Base" 1.1~exp4
+ (c++)"pkgAcqIndex::pkgAcqIndex(pkgAcquire*, pkgAcqMetaBase*, IndexTarget const*, HashStringList const&, indexRecords*)@Base" 1.1~exp4
+ (c++)"pkgAcqIndex::pkgAcqIndex(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, HashStringList const&)@Base" 1.1~exp4
+ (c++)"pkgAcqIndex::ReverifyAfterIMS()@Base" 1.1~exp4
+ (c++)"pkgAcqIndex::StageDecompressDone(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp4
+ (c++)"pkgAcqIndex::StageDownloadDone(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp4
+ (c++)"pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire*, pkgAcqMetaBase*, IndexTarget const*, HashStringList const&, indexRecords*)@Base" 1.1~exp4
+ (c++)"pkgAcqIndex::ValidateFile(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::AbortTransaction()@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::Add(pkgAcquire::Item*)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::CheckAuthDone(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::CheckDownloadDone(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::CheckStopAuthentication(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::CommitTransaction()@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::GetCustom600Headers(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) const@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::QueueForSignatureVerify(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::QueueIndexes(bool)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::TransactionHasError()@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::TransactionStageCopy(pkgAcquire::Item*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::TransactionStageRemoval(pkgAcquire::Item*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::VerifyVendor(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaClearSig::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaIndex::Finished()@Base" 1.1~exp4
+ (c++)"pkgAcqMetaIndex::Init(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire*, pkgAcqMetaBase*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::vector<IndexTarget*, std::allocator<IndexTarget*> > const*, indexRecords*)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire*, pkgAcqMetaBase*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::vector<IndexTarget*, std::allocator<IndexTarget*> > const*, indexRecords*)@Base" 1.1~exp4
+ (c++)"pkgAcquire::GetLock(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcquire::Item::Dequeue()@Base" 1.1~exp4
+ (c++)"pkgAcquire::Item::Item(pkgAcquire*, HashStringList const&, pkgAcqMetaBase*)@Base" 1.1~exp4
+ (c++)"pkgAcquire::Item::QueueURI(pkgAcquire::ItemDesc&)@Base" 1.1~exp4
+ (c++)"pkgAcquire::Item::SetActiveSubprocess(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgAcquire::Setup(pkgAcquireStatus*, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp4
+ (c++)"pkgArchiveCleaner::Erase(char const*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, stat&)@Base" 1.1~exp4
+ (c++)"pkgDepCache::MarkAndSweep()@Base" 1.1~exp4
+ (c++)"pkgDepCache::MarkAndSweep(pkgDepCache::InRootSetFunc&)@Base" 1.1~exp4
+ (c++)"pkgAcqMetaBase::~pkgAcqMetaBase()@Base" 1.1~exp4
+ (c++)"typeinfo for pkgAcqMetaBase@Base" 1.1~exp4
+ (c++)"typeinfo name for pkgAcqMetaBase@Base" 1.1~exp4
+ (c++)"vtable for pkgAcqMetaBase@Base" 1.1~exp4
### mixed stuff
(c++)"GetListOfFilesInDir(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)@Base" 0.8.16~exp13
(c++)"pkgCache::DepIterator::IsIgnorable(pkgCache::PkgIterator const&) const@Base" 0.8.16~exp10
(c++)"pkgCache::DepIterator::IsIgnorable(pkgCache::PrvIterator const&) const@Base" 0.8.16~exp10
(c++)"FileFd::Write(int, void const*, unsigned long long)@Base" 0.8.16~exp14
- (c++)"pkgTagSection::Exists(char const*)@Base" 0.9.7.9~exp1
(c++)"_strrstrip(char*)@Base" 0.9.7.9~exp2
(c++)"SplitClearSignedFile(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, FileFd*, std::vector<std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > >*, FileFd*)@Base" 0.9.7.9~exp2
(c++)"OpenMaybeClearSignedFile(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, FileFd&)@Base" 0.9.7.9~exp2
@@ -1523,30 +1711,27 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"AcquireUpdate(pkgAcquire&, int, bool, bool)@Base" 0.9.3
(c++)"pkgCache::DepIterator::IsMultiArchImplicit() const@Base" 0.9.6
(c++)"pkgCache::PrvIterator::IsMultiArchImplicit() const@Base" 0.9.6
- (c++)"APT::PackageContainerInterface::FromGroup(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::CacheSetHelper&)@Base" 0.9.7
(c++)"APT::CacheFilter::PackageArchitectureMatchesSpecification::PackageArchitectureMatchesSpecification(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)@Base" 0.9.7
(c++)"APT::CacheFilter::PackageArchitectureMatchesSpecification::~PackageArchitectureMatchesSpecification()@Base" 0.9.7
(c++)"APT::CacheFilter::PackageArchitectureMatchesSpecification::operator()(pkgCache::PkgIterator const&)@Base" 0.9.7
- (c++)"APT::CacheFilter::PackageArchitectureMatchesSpecification::operator()(pkgCache::VerIterator const&)@Base" 0.9.7
(c++)"APT::CacheFilter::PackageArchitectureMatchesSpecification::operator()(char const* const&)@Base" 0.9.7
(c++)"APT::Configuration::checkLanguage(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, bool)@Base" 0.9.7.5
(c++)"pkgCdrom::DropTranslation(std::vector<std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::allocator<std::basic_string<char, std::char_traits<char>, std::allocator<char> > > >&)@Base" 0.9.7.5
(c++)"pkgCache::DepIterator::IsSatisfied(pkgCache::PrvIterator const&) const@Base" 0.9.8
(c++)"pkgCache::DepIterator::IsSatisfied(pkgCache::VerIterator const&) const@Base" 0.9.8
(c++)"pkgCacheGenerator::NewDepends(pkgCache::PkgIterator&, pkgCache::VerIterator&, unsigned int, unsigned int const&, unsigned int const&, unsigned int*&)@Base" 0.9.8
- (c++)"pkgCacheGenerator::NewVersion(pkgCache::VerIterator&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, unsigned int, unsigned long, unsigned long)@Base" 0.9.8
(c++)"operator<<(std::basic_ostream<char, std::char_traits<char> >&, GlobalError::Item)@Base" 0.9.9
(c++)"pkgDepCache::IsDeleteOkProtectInstallRequests(pkgCache::PkgIterator const&, bool, unsigned long, bool)@Base" 0.9.9.1
(c++)"pkgDepCache::IsInstallOkMultiArchSameVersionSynced(pkgCache::PkgIterator const&, bool, unsigned long, bool)@Base" 0.9.9.1
(c++)"pkgDPkgPM::SendPkgsInfo(_IO_FILE*, unsigned int const&)@Base" 0.9.9.1
(c++)"pkgCache::VerIterator::MultiArchType() const@Base" 0.9.9.1
+ (c++)"AutoDetectProxy(URI&)@Base" 0.9.10
(c++)"CommandLine::GetCommand(CommandLine::Dispatch const*, unsigned int, char const* const*)@Base" 0.9.11
(c++)"CommandLine::MakeArgs(char, char const*, char const*, unsigned long)@Base" 0.9.11
(c++)"Configuration::Clear()@Base" 0.9.11
(c++)"Glob(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, int)@Base" 0.9.11
(c++)"APT::CacheFilter::PackageNameMatchesFnmatch::operator()(pkgCache::GrpIterator const&)@Base" 0.9.11
(c++)"APT::CacheFilter::PackageNameMatchesFnmatch::operator()(pkgCache::PkgIterator const&)@Base" 0.9.11
- (c++)"APT::PackageContainerInterface::FromFnmatch(APT::PackageContainerInterface*, pkgCacheFile&, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, APT::CacheSetHelper&)@Base" 0.9.11
(c++)"pkgTagSection::pkgTagSection()@Base" 0.9.11
(c++)"strv_length(char const**)@Base" 0.9.11
(c++)"StringSplit(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, unsigned int)@Base" 0.9.11.3
@@ -1579,7 +1764,42 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
(c++)"typeinfo for debTranslationsParser@Base" 1.0.4
(c++)"typeinfo name for debTranslationsParser@Base" 1.0.4
(c++)"vtable for debTranslationsParser@Base" 1.0.4
+ (c++)"pkgSrcRecords::Step()@Base" 1.0.4
+ (c++)"pkgDPkgPM::SetupSlavePtyMagic()@Base" 1.0.8
+ (c++)"APT::Progress::PackageManager::PackageManager()@Base" 1.1~exp1
+ (c++)"pkgDPkgPM::Go(APT::Progress::PackageManager*)@Base" 1.1~exp1
+ (c++)"pkgPackageManager::DoInstall(APT::Progress::PackageManager*)@Base" 1.1~exp1
+ (c++)"pkgPackageManager::DoInstallPostFork(APT::Progress::PackageManager*)@Base" 1.1~exp1
+ (c++)"pkgPackageManager::Go(APT::Progress::PackageManager*)@Base" 1.1~exp1
+ (c++)"pkgTagFile::Init(FileFd*, unsigned long long)@Base" 1.1~exp1
+ (c++)"pkgTagSection::Count() const@Base" 1.1~exp1
+ (c++)"pkgTagSection::Exists(char const*) const@Base" 1.1~exp1
+ (c++)"pkgTagSection::FindB(char const*, bool const&) const@Base" 1.1~exp1
+ (c++)"pkgTagSection::Scan(char const*, unsigned long, bool)@Base" 1.1~exp1
+ (c++)"StartsWithGPGClearTextSignature(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp1
+ (c++)"Popen(char const**, FileFd&, int&, FileFd::OpenMode)@Base" 1.1~exp1
+ (c++)"APT::String::Startswith(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp2
+ (c++)"APT::Upgrade::Upgrade(pkgDepCache&, int, OpProgress*)@Base" 1.1~exp4
+ (c++)"pkgAllUpgrade(pkgDepCache&, OpProgress*)@Base" 1.1~exp4
+ (c++)"pkgDistUpgrade(pkgDepCache&, OpProgress*)@Base" 1.1~exp4
+ (c++)"pkgProblemResolver::Resolve(bool, OpProgress*)@Base" 1.1~exp4
+ (c++)"pkgProblemResolver::ResolveByKeep(OpProgress*)@Base" 1.1~exp4
+ (c++)"pkgCacheGenerator::NewVersion(pkgCache::VerIterator&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, unsigned int, unsigned short, unsigned int)@Base" 1.1~exp4
+ (c++)"pkgCacheGenerator::StoreString(pkgCacheGenerator::StringType, char const*, unsigned int)@Base" 1.1~exp4
+ (c++)"pkgCache::PkgIterator::Section() const@Base" 1.1~exp4
+ (c++)"APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::iterator::getPkg() const@Base" 1.1~exp4
+ (c++)"typeinfo for APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::iterator@Base" 1.1~exp4
+ (c++)"typeinfo name for APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::iterator@Base" 1.1~exp4
+ (c++)"vtable for APT::PackageContainer<std::set<pkgCache::PkgIterator, std::less<pkgCache::PkgIterator>, std::allocator<pkgCache::PkgIterator> > >::iterator@Base" 1.1~exp4
+ (c++)"pkgAcqIndex::GetFinalFilename() const@Base" 1.1~exp4
+ (c++)"DropPrivileges()@Base" 1.1~exp4
+ (c++)"FileFd::FileFd(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned int, unsigned long)@Base" 1.1~exp4
+ (c++)"indexRecords::indexRecords(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&)@Base" 1.1~exp5
+ (c++)"indexRecords::IsAlwaysTrusted() const@Base" 1.1~exp5
+ (c++)"indexRecords::IsNeverTrusted() const@Base" 1.1~exp5
+ (c++)"indexRecords::SetTrusted(bool)@Base" 1.1~exp5
### demangle strangeness - buildd report it as MISSING and as new…
+ (c++)"pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::vector<IndexTarget*, std::allocator<IndexTarget*> > const*, indexRecords*)@Base" 0.8.0
(c++)"pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::vector<IndexTarget*, std::allocator<IndexTarget*> > const*, indexRecords*)@Base" 0.8.0
### gcc-4.6 artefacts
# (c++|optional=implicit)"HashString::operator=(HashString const&)@Base" 0.8.0
@@ -1597,9 +1817,7 @@ libapt-pkg.so.4.12 libapt-pkg4.12 #MINVER#
# (c++|optional=inline)"pkgSourceList::Type::~Type()@Base" 0.8.0
# (c++|optional=inline)"pkgIndexFile::~pkgIndexFile()@Base" 0.8.0
# (c++|optional=inline)"pkgCacheGenerator::ListParser::~ListParser()@Base" 0.8.0
-# (c++|optional=inline)"pkgAcquireStatus::~pkgAcquireStatus()@Base" 0.8.0
# (c++|optional=inline)"metaIndex::~metaIndex()@Base" 0.8.0
-# (c++|optional=inline)"IndexCopy::~IndexCopy()@Base" 0.8.0
### std library artefacts
(c++|regex|optional=std)"^std::vector<DiffInfo, .+@Base$" 0.8.0
(c++|regex|optional=std)"^std::vector<.+ >::(vector|push_back|erase|_[^ ]+)\(.+\)( const|)@Base$" 0.8.0
diff --git a/debian/postinst b/debian/postinst
deleted file mode 100755
index 1588f5241..000000000
--- a/debian/postinst
+++ /dev/null
@@ -1,39 +0,0 @@
-#! /bin/sh
-
-# apt postinst, based liberally on James Troup's gpm postinst
-# Copyright (C) 1998, Ben Gertzfield <che@debian.org>
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-set -e
-
-create_apt_conf ()
-{
- EXAMPLE_SOURCE=/usr/share/doc/apt/examples/sources.list
- if [ -f $EXAMPLE_SOURCE ]; then
- cp $EXAMPLE_SOURCE /etc/apt/sources.list
- fi
-}
-
-check_apt_conf ()
-{
- true
- # this is for future expansion
-}
-
-#DEBHELPER#
-
-case "$1" in
- configure)
- #
- # If there is no /etc/apt/sources.list then create a default
- #
- if [ ! -f /etc/apt/sources.list ]; then
- create_apt_conf
- else
- check_apt_conf
- fi
-esac
diff --git a/debian/tests/control b/debian/tests/control
index f7e47c5f3..0e774d4ca 100644
--- a/debian/tests/control
+++ b/debian/tests/control
@@ -1,3 +1,3 @@
Tests: run-tests
-Restrictions: allow-stderr
-Depends: @, @builddeps@, fakeroot, wget, stunnel4, db-util
+Restrictions: allow-stderr
+Depends: @, @builddeps@, fakeroot, wget, stunnel4, db-util, gnupg, gnupg2
diff --git a/doc/apt-get.8.xml b/doc/apt-get.8.xml
index 80b3be639..a372a0d30 100644
--- a/doc/apt-get.8.xml
+++ b/doc/apt-get.8.xml
@@ -525,6 +525,14 @@
Configuration Item: <literal>APT::Get::AllowUnauthenticated</literal>.</para></listitem>
</varlistentry>
+ <varlistentry><term><option>--no-allow-insecure-repositories</option></term>
+ <listitem><para>Forbid the update command to acquire unverifiable
+ data from configured sources. Apt will fail at the update command
+ for repositories without valid cryptographically signatures.
+
+ Configuration Item: <literal>Acquire::AllowInsecureRepositories</literal>.</para></listitem>
+ </varlistentry>
+
<varlistentry><term><option>--show-progress</option></term>
<listitem><para>Show user friendly progress information in the
terminal window when packages are installed, upgraded or
diff --git a/doc/apt.conf.5.xml b/doc/apt.conf.5.xml
index df6c77ff0..efe986ea8 100644
--- a/doc/apt.conf.5.xml
+++ b/doc/apt.conf.5.xml
@@ -384,9 +384,9 @@ DPkg::Pre-Install-Pkgs {"/usr/sbin/dpkg-preconfigure --apt";};
<para>The setting <literal>Acquire::http::Pipeline-Depth</literal> can be used to
enable HTTP pipelining (RFC 2616 section 8.1.2.2) which can be beneficial e.g. on
high-latency connections. It specifies how many requests are sent in a pipeline.
- Previous APT versions had a default of 10 for this setting, but the default value
- is now 0 (= disabled) to avoid problems with the ever-growing amount of webservers
- and proxies which choose to not conform to the HTTP/1.1 specification.</para>
+ APT tries to detect and workaround misbehaving webservers and proxies at runtime, but
+ if you know that yours does not conform to the HTTP/1.1 specification pipelining can
+ be disabled by setting the value to 0. It is enabled by default with the value 10.</para>
<para><literal>Acquire::http::AllowRedirect</literal> controls whether APT will follow
redirects, which is enabled by default.</para>
@@ -586,6 +586,38 @@ DPkg::Pre-Install-Pkgs {"/usr/sbin/dpkg-preconfigure --apt";};
</para></listitem>
</varlistentry>
+ <varlistentry><term><option>MaxReleaseFileSize</option></term>
+ <listitem><para>
+ The maximum file size of Release/Release.gpg/InRelease files.
+ The default is 10MB.
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry><term><option>AllowInsecureRepositories</option></term>
+ <listitem><para>
+ Allow the update operation to load data files from
+ a repository without a trusted signature. If enabled this
+ option no data files will be loaded and the update
+ operation fails with a error for this source. The default
+ is false for backward compatibility. This will be changed
+ in the future.
+ </para></listitem>
+ </varlistentry>
+
+ <varlistentry><term><option>AllowDowngradeToInsecureRepositories</option></term>
+ <listitem><para>
+ Allow that a repository that was previously gpg signed to become
+ unsigned durign a update operation. When there is no valid signature
+ of a perviously trusted repository apt will refuse the update. This
+ option can be used to override this protection. You almost certainly
+ never want to enable this. The default is false.
+
+ Note that apt will still consider packages from this source
+ untrusted and warn about them if you try to install
+ them.
+ </para></listitem>
+ </varlistentry>
+
</variablelist>
</refsect1>
diff --git a/doc/examples/configure-index b/doc/examples/configure-index
index 2d9f829ba..2925733d7 100644
--- a/doc/examples/configure-index
+++ b/doc/examples/configure-index
@@ -34,7 +34,7 @@ APT
{
Host-Architecture "armel";
Arch-Only "false";
- AllowUnauthenticated "false";
+ AllowUnauthenticated "false"; // packages from unauthenticated
AutomaticRemove "false";
HideAutoRemove "false";
Download-Only "false";
diff --git a/doc/po/pl.po b/doc/po/pl.po
index b0a6514b9..8adbcfde7 100644
--- a/doc/po/pl.po
+++ b/doc/po/pl.po
@@ -3996,10 +3996,10 @@ msgid ""
"The setting <literal>Acquire::http::Pipeline-Depth</literal> can be used to "
"enable HTTP pipelining (RFC 2616 section 8.1.2.2) which can be beneficial e."
"g. on high-latency connections. It specifies how many requests are sent in a "
-"pipeline. Previous APT versions had a default of 10 for this setting, but "
-"the default value is now 0 (= disabled) to avoid problems with the ever-"
-"growing amount of webservers and proxies which choose to not conform to the "
-"HTTP/1.1 specification."
+"pipeline. APT tries to detect and workaround misbehaving webservers and "
+"proxies at runtime, but if you know that yours does not conform to the "
+"HTTP/1.1 specification pipelining can be disabled by setting the value to 0. "
+"It is enabled by default with the value 10."
msgstr ""
#. type: Content of: <refentry><refsect1><variablelist><varlistentry><listitem><para>
diff --git a/doc/po/pt_BR.po b/doc/po/pt_BR.po
index d28c1e633..9d205e82e 100644
--- a/doc/po/pt_BR.po
+++ b/doc/po/pt_BR.po
@@ -2892,10 +2892,10 @@ msgid ""
"The setting <literal>Acquire::http::Pipeline-Depth</literal> can be used to "
"enable HTTP pipelining (RFC 2616 section 8.1.2.2) which can be beneficial e."
"g. on high-latency connections. It specifies how many requests are sent in a "
-"pipeline. Previous APT versions had a default of 10 for this setting, but "
-"the default value is now 0 (= disabled) to avoid problems with the ever-"
-"growing amount of webservers and proxies which choose to not conform to the "
-"HTTP/1.1 specification."
+"pipeline. APT tries to detect and workaround misbehaving webservers and "
+"proxies at runtime, but if you know that yours does not conform to the "
+"HTTP/1.1 specification pipelining can be disabled by setting the value to 0. "
+"It is enabled by default with the value 10."
msgstr ""
#. type: Content of: <refentry><refsect1><variablelist><varlistentry><listitem><para>
diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc
index 0901492f7..da45eb8d2 100644
--- a/ftparchive/cachedb.cc
+++ b/ftparchive/cachedb.cc
@@ -21,29 +21,31 @@
#include <apt-pkg/fileutl.h>
#include <apt-pkg/debfile.h>
#include <apt-pkg/gpgv.h>
+#include <apt-pkg/hashes.h>
#include <netinet/in.h> // htonl, etc
#include <ctype.h>
#include <stddef.h>
#include <sys/stat.h>
+#include <strings.h>
#include "cachedb.h"
#include <apti18n.h>
/*}}}*/
-CacheDB::CacheDB(std::string const &DB)
+CacheDB::CacheDB(std::string const &DB)
: Dbp(0), Fd(NULL), DebFile(0)
{
TmpKey[0]='\0';
ReadyDB(DB);
-};
+}
CacheDB::~CacheDB()
{
ReadyDB();
delete DebFile;
-};
+}
// CacheDB::ReadyDB - Ready the DB2 /*{{{*/
// ---------------------------------------------------------------------
@@ -268,15 +270,10 @@ bool CacheDB::GetCurStat()
/*}}}*/
// CacheDB::GetFileInfo - Get all the info about the file /*{{{*/
// ---------------------------------------------------------------------
-bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl,
- bool const &DoContents,
- bool const &GenContentsOnly,
- bool const &DoSource,
- bool const &DoMD5, bool const &DoSHA1,
- bool const &DoSHA256, bool const &DoSHA512,
+bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl, bool const &DoContents,
+ bool const &GenContentsOnly, bool const DoSource, unsigned int const DoHashes,
bool const &checkMtime)
{
- bool result = true;
this->FileName = FileName;
if (GetCurStat() == false)
@@ -284,31 +281,28 @@ bool CacheDB::GetFileInfo(std::string const &FileName, bool const &DoControl,
OldStat = CurStat;
if (GetFileStat(checkMtime) == false)
- return false;
+ return false;
/* if mtime changed, update CurStat from disk */
if (checkMtime == true && OldStat.mtime != CurStat.mtime)
CurStat.Flags = FlSize;
Stats.Bytes += CurStat.FileSize;
- Stats.Packages++;
+ ++Stats.Packages;
if ((DoControl && LoadControl() == false)
- || (DoContents && LoadContents(GenContentsOnly) == false)
- || (DoSource && LoadSource() == false)
- || (DoMD5 && GetMD5(false) == false)
- || (DoSHA1 && GetSHA1(false) == false)
- || (DoSHA256 && GetSHA256(false) == false)
- || (DoSHA512 && GetSHA512(false) == false) )
+ || (DoContents && LoadContents(GenContentsOnly) == false)
+ || (DoSource && LoadSource() == false)
+ || (DoHashes != 0 && GetHashes(false, DoHashes) == false)
+ )
{
- result = false;
+ return false;
}
-
- return result;
+
+ return true;
}
/*}}}*/
-
-bool CacheDB::LoadSource()
+bool CacheDB::LoadSource() /*{{{*/
{
// Try to read the control information out of the DB.
if ((CurStat.Flags & FlSource) == FlSource)
@@ -338,7 +332,7 @@ bool CacheDB::LoadSource()
return true;
}
-
+ /*}}}*/
// CacheDB::LoadControl - Load Control information /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -407,7 +401,7 @@ bool CacheDB::LoadContents(bool const &GenOnly)
return true;
}
/*}}}*/
-
+// CacheDB::GetHashes - Get the hashs /*{{{*/
static std::string bytes2hex(uint8_t *bytes, size_t length) {
char buf[3];
std::string space;
@@ -437,125 +431,59 @@ static void hex2bytes(uint8_t *bytes, const char *hex, int length) {
bytes++;
}
}
-
-// CacheDB::GetMD5 - Get the MD5 hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool CacheDB::GetMD5(bool const &GenOnly)
+bool CacheDB::GetHashes(bool const GenOnly, unsigned int const DoHashes)
{
- // Try to read the control information out of the DB.
- if ((CurStat.Flags & FlMD5) == FlMD5)
- {
- if (GenOnly == true)
- return true;
-
- MD5Res = bytes2hex(CurStat.MD5, sizeof(CurStat.MD5));
- return true;
- }
-
- Stats.MD5Bytes += CurStat.FileSize;
-
- if (OpenFile() == false)
- return false;
+ unsigned int FlHashes = DoHashes & (Hashes::MD5SUM | Hashes::SHA1SUM | Hashes::SHA256SUM | Hashes::SHA512SUM);
+ HashesList.clear();
- MD5Summation MD5;
- if (Fd->Seek(0) == false || MD5.AddFD(*Fd, CurStat.FileSize) == false)
- return false;
-
- MD5Res = MD5.Result();
- hex2bytes(CurStat.MD5, MD5Res.data(), sizeof(CurStat.MD5));
- CurStat.Flags |= FlMD5;
- return true;
-}
- /*}}}*/
-// CacheDB::GetSHA1 - Get the SHA1 hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool CacheDB::GetSHA1(bool const &GenOnly)
-{
- // Try to read the control information out of the DB.
- if ((CurStat.Flags & FlSHA1) == FlSHA1)
+ if (FlHashes != 0)
{
- if (GenOnly == true)
- return true;
+ if (OpenFile() == false)
+ return false;
- SHA1Res = bytes2hex(CurStat.SHA1, sizeof(CurStat.SHA1));
- return true;
- }
-
- Stats.SHA1Bytes += CurStat.FileSize;
-
- if (OpenFile() == false)
- return false;
+ Hashes hashes;
+ if (Fd->Seek(0) == false || hashes.AddFD(*Fd, CurStat.FileSize, FlHashes) == false)
+ return false;
- SHA1Summation SHA1;
- if (Fd->Seek(0) == false || SHA1.AddFD(*Fd, CurStat.FileSize) == false)
- return false;
-
- SHA1Res = SHA1.Result();
- hex2bytes(CurStat.SHA1, SHA1Res.data(), sizeof(CurStat.SHA1));
- CurStat.Flags |= FlSHA1;
- return true;
-}
- /*}}}*/
-// CacheDB::GetSHA256 - Get the SHA256 hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool CacheDB::GetSHA256(bool const &GenOnly)
-{
- // Try to read the control information out of the DB.
- if ((CurStat.Flags & FlSHA256) == FlSHA256)
- {
- if (GenOnly == true)
- return true;
-
- SHA256Res = bytes2hex(CurStat.SHA256, sizeof(CurStat.SHA256));
- return true;
+ HashStringList hl = hashes.GetHashStringList();
+ for (HashStringList::const_iterator hs = hl.begin(); hs != hl.end(); ++hs)
+ {
+ HashesList.push_back(*hs);
+ if (strcasecmp(hs->HashType().c_str(), "SHA512") == 0)
+ {
+ Stats.SHA512Bytes += CurStat.FileSize;
+ hex2bytes(CurStat.SHA512, hs->HashValue().data(), sizeof(CurStat.SHA512));
+ CurStat.Flags |= FlSHA512;
+ }
+ else if (strcasecmp(hs->HashType().c_str(), "SHA256") == 0)
+ {
+ Stats.SHA256Bytes += CurStat.FileSize;
+ hex2bytes(CurStat.SHA256, hs->HashValue().data(), sizeof(CurStat.SHA256));
+ CurStat.Flags |= FlSHA256;
+ }
+ else if (strcasecmp(hs->HashType().c_str(), "SHA1") == 0)
+ {
+ Stats.SHA1Bytes += CurStat.FileSize;
+ hex2bytes(CurStat.SHA1, hs->HashValue().data(), sizeof(CurStat.SHA1));
+ CurStat.Flags |= FlSHA1;
+ }
+ else if (strcasecmp(hs->HashType().c_str(), "MD5Sum") == 0)
+ {
+ Stats.MD5Bytes += CurStat.FileSize;
+ hex2bytes(CurStat.MD5, hs->HashValue().data(), sizeof(CurStat.MD5));
+ CurStat.Flags |= FlMD5;
+ }
+ else
+ return _error->Error("Got unknown unrequested hashtype %s", hs->HashType().c_str());
+ }
}
-
- Stats.SHA256Bytes += CurStat.FileSize;
-
- if (OpenFile() == false)
- return false;
-
- SHA256Summation SHA256;
- if (Fd->Seek(0) == false || SHA256.AddFD(*Fd, CurStat.FileSize) == false)
- return false;
-
- SHA256Res = SHA256.Result();
- hex2bytes(CurStat.SHA256, SHA256Res.data(), sizeof(CurStat.SHA256));
- CurStat.Flags |= FlSHA256;
- return true;
-}
- /*}}}*/
-// CacheDB::GetSHA256 - Get the SHA256 hash /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool CacheDB::GetSHA512(bool const &GenOnly)
-{
- // Try to read the control information out of the DB.
- if ((CurStat.Flags & FlSHA512) == FlSHA512)
- {
- if (GenOnly == true)
- return true;
-
- SHA512Res = bytes2hex(CurStat.SHA512, sizeof(CurStat.SHA512));
+ if (GenOnly == true)
return true;
- }
-
- Stats.SHA512Bytes += CurStat.FileSize;
-
- if (OpenFile() == false)
- return false;
- SHA512Summation SHA512;
- if (Fd->Seek(0) == false || SHA512.AddFD(*Fd, CurStat.FileSize) == false)
- return false;
-
- SHA512Res = SHA512.Result();
- hex2bytes(CurStat.SHA512, SHA512Res.data(), sizeof(CurStat.SHA512));
- CurStat.Flags |= FlSHA512;
- return true;
+ return HashesList.push_back(HashString("MD5Sum", bytes2hex(CurStat.MD5, sizeof(CurStat.MD5)))) &&
+ HashesList.push_back(HashString("SHA1", bytes2hex(CurStat.SHA1, sizeof(CurStat.SHA1)))) &&
+ HashesList.push_back(HashString("SHA256", bytes2hex(CurStat.SHA256, sizeof(CurStat.SHA256)))) &&
+ HashesList.push_back(HashString("SHA512", bytes2hex(CurStat.SHA512, sizeof(CurStat.SHA512))));
}
/*}}}*/
// CacheDB::Finish - Write back the cache structure /*{{{*/
diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h
index 29d710d2c..613963f6f 100644
--- a/ftparchive/cachedb.h
+++ b/ftparchive/cachedb.h
@@ -12,6 +12,7 @@
#ifndef CACHEDB_H
#define CACHEDB_H
+#include <apt-pkg/hashes.h>
#include <apt-pkg/debfile.h>
#include <db.h>
@@ -94,15 +95,12 @@ class CacheDB
bool LoadControl();
bool LoadContents(bool const &GenOnly);
bool LoadSource();
- bool GetMD5(bool const &GenOnly);
- bool GetSHA1(bool const &GenOnly);
- bool GetSHA256(bool const &GenOnly);
- bool GetSHA512(bool const &GenOnly);
-
+ bool GetHashes(bool const GenOnly, unsigned int const DoHashes);
+
// Stat info stored in the DB, Fixed types since it is written to disk.
enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2),
- FlSize=(1<<3), FlSHA1=(1<<4), FlSHA256=(1<<5),
- FlSHA512=(1<<6), FlSource=(1<<7),
+ FlSize=(1<<3), FlSHA1=(1<<4), FlSHA256=(1<<5),
+ FlSHA512=(1<<6), FlSource=(1<<7)
};
// the on-disk format changed (FileSize increased to 64bit) in
@@ -142,12 +140,8 @@ class CacheDB
debDebFile::MemControlExtract Control;
ContentsExtract Contents;
DscExtract Dsc;
+ HashStringList HashesList;
- std::string MD5Res;
- std::string SHA1Res;
- std::string SHA256Res;
- std::string SHA512Res;
-
// Runtime statistics
struct Stats
{
@@ -183,16 +177,13 @@ class CacheDB
bool SetFile(std::string const &FileName,struct stat St,FileFd *Fd);
// terrible old overloaded interface
- bool GetFileInfo(std::string const &FileName,
- bool const &DoControl,
- bool const &DoContents,
- bool const &GenContentsOnly,
- bool const &DoSource,
- bool const &DoMD5,
- bool const &DoSHA1,
- bool const &DoSHA256,
- bool const &DoSHA512,
- bool const &checkMtime = false);
+ bool GetFileInfo(std::string const &FileName,
+ bool const &DoControl,
+ bool const &DoContents,
+ bool const &GenContentsOnly,
+ bool const DoSource,
+ unsigned int const DoHashes,
+ bool const &checkMtime = false);
bool Finish();
diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc
index 91dd2b8bd..8c4181eda 100644
--- a/ftparchive/contents.cc
+++ b/ftparchive/contents.cc
@@ -302,17 +302,17 @@ void GenContents::DoPrint(FILE *Out,GenContents::Node *Top, char *Buf)
DoPrint(Out,Top->BTreeRight,Buf);
}
/*}}}*/
-// ContentsExtract Constructor /*{{{*/
+// ContentsExtract Constructor /*{{{*/
ContentsExtract::ContentsExtract()
- : Data(0), MaxSize(0), CurSize(0)
+ : Data(0), MaxSize(0), CurSize(0)
{
-};
+}
/*}}}*/
-// ContentsExtract Destructor /*{{{*/
+// ContentsExtract Destructor /*{{{*/
ContentsExtract::~ContentsExtract()
{
free(Data);
-};
+}
/*}}}*/
// ContentsExtract::Read - Read the archive /*{{{*/
// ---------------------------------------------------------------------
diff --git a/ftparchive/sources.h b/ftparchive/sources.h
index 91e0b1376..9ada15728 100644
--- a/ftparchive/sources.h
+++ b/ftparchive/sources.h
@@ -17,7 +17,7 @@ class DscExtract
bool TakeDsc(const void *Data, unsigned long Size);
bool Read(std::string FileName);
- DscExtract() : Data(0), Length(0) {
+ DscExtract() : Data(0), Length(0), IsClearSigned(false) {
Data = new char[maxSize];
};
~DscExtract() {
diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc
index 7c1c9cc03..db617e92a 100644
--- a/ftparchive/writer.cc
+++ b/ftparchive/writer.cc
@@ -54,29 +54,42 @@ FTWScanner *FTWScanner::Owner;
// SetTFRewriteData - Helper for setting rewrite lists /*{{{*/
// ---------------------------------------------------------------------
/* */
-inline void SetTFRewriteData(struct TFRewriteData &tfrd,
- const char *tag,
+static inline TFRewriteData SetTFRewriteData(const char *tag,
const char *rewrite,
const char *newtag = 0)
{
- tfrd.Tag = tag;
- tfrd.Rewrite = rewrite;
- tfrd.NewTag = newtag;
+ TFRewriteData tfrd;
+ tfrd.Tag = tag;
+ tfrd.Rewrite = rewrite;
+ tfrd.NewTag = newtag;
+ return tfrd;
+}
+ /*}}}*/
+// ConfigToDoHashes - which hashes to generate /*{{{*/
+static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
+{
+ if (_config->FindB(Conf, true) == true)
+ DoHashes |= Flag;
+ else
+ DoHashes &= ~Flag;
+}
+static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
+{
+ SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
+ SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
+ SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
+ SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
}
/*}}}*/
// FTWScanner::FTWScanner - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-FTWScanner::FTWScanner(string const &Arch): Arch(Arch)
+FTWScanner::FTWScanner(string const &Arch): Arch(Arch), DoHashes(~0)
{
ErrorPrinted = false;
NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
-
- DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
- DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true);
- DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true);
- DoSHA512 = _config->FindB("APT::FTPArchive::SHA512",true);
+ ConfigToDoHashes(DoHashes, "APT::FTPArchive");
}
/*}}}*/
// FTWScanner::Scanner - FTW Scanner /*{{{*/
@@ -327,10 +340,7 @@ PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string c
DeLinkLimit = 0;
// Process the command line options
- DoMD5 = _config->FindB("APT::FTPArchive::Packages::MD5",DoMD5);
- DoSHA1 = _config->FindB("APT::FTPArchive::Packages::SHA1",DoSHA1);
- DoSHA256 = _config->FindB("APT::FTPArchive::Packages::SHA256",DoSHA256);
- DoSHA512 = _config->FindB("APT::FTPArchive::Packages::SHA512",DoSHA512);
+ ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
DoContents = _config->FindB("APT::FTPArchive::Contents",true);
NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
@@ -385,12 +395,12 @@ bool FTWScanner::SetExts(string const &Vals)
bool PackagesWriter::DoPackage(string FileName)
{
// Pull all the data we need form the DB
- if (Db.GetFileInfo(FileName,
- true, /* DoControl */
- DoContents,
- true, /* GenContentsOnly */
- false, /* DoSource */
- DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) == false)
+ if (Db.GetFileInfo(FileName,
+ true, /* DoControl */
+ DoContents,
+ true, /* GenContentsOnly */
+ false, /* DoSource */
+ DoHashes, DoAlwaysStat) == false)
{
return false;
}
@@ -454,30 +464,27 @@ bool PackagesWriter::DoPackage(string FileName)
}
// This lists all the changes to the fields we are going to make.
- // (7 hardcoded + maintainer + suggests + end marker)
- TFRewriteData Changes[6+2+OverItem->FieldOverride.size()+1+1];
-
- unsigned int End = 0;
- SetTFRewriteData(Changes[End++], "Size", Size);
- if (DoMD5 == true)
- SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str());
- if (DoSHA1 == true)
- SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str());
- if (DoSHA256 == true)
- SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str());
- if (DoSHA512 == true)
- SetTFRewriteData(Changes[End++], "SHA512", Db.SHA512Res.c_str());
- SetTFRewriteData(Changes[End++], "Filename", NewFileName.c_str());
- SetTFRewriteData(Changes[End++], "Priority", OverItem->Priority.c_str());
- SetTFRewriteData(Changes[End++], "Status", 0);
- SetTFRewriteData(Changes[End++], "Optional", 0);
+ std::vector<TFRewriteData> Changes;
+
+ Changes.push_back(SetTFRewriteData("Size", Size));
+ for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
+ {
+ if (hs->HashType() == "MD5Sum")
+ Changes.push_back(SetTFRewriteData("MD5sum", hs->HashValue().c_str()));
+ else
+ Changes.push_back(SetTFRewriteData(hs->HashType().c_str(), hs->HashValue().c_str()));
+ }
+ Changes.push_back(SetTFRewriteData("Filename", NewFileName.c_str()));
+ Changes.push_back(SetTFRewriteData("Priority", OverItem->Priority.c_str()));
+ Changes.push_back(SetTFRewriteData("Status", 0));
+ Changes.push_back(SetTFRewriteData("Optional", 0));
string DescriptionMd5;
if (LongDescription == false) {
MD5Summation descmd5;
descmd5.Add(desc.c_str());
DescriptionMd5 = descmd5.Result().Value();
- SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str());
+ Changes.push_back(SetTFRewriteData("Description-md5", DescriptionMd5.c_str()));
if (TransWriter != NULL)
TransWriter->DoPackage(Package, desc, DescriptionMd5);
}
@@ -492,12 +499,12 @@ bool PackagesWriter::DoPackage(string FileName)
NewLine(1);
ioprintf(c1out, _(" %s maintainer is %s not %s\n"),
Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
- }
+ }
}
-
+
if (NewMaint.empty() == false)
- SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
-
+ Changes.push_back(SetTFRewriteData("Maintainer", NewMaint.c_str()));
+
/* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
but dpkg does this append bit. So we do the append bit, at least that way the
@@ -508,17 +515,17 @@ bool PackagesWriter::DoPackage(string FileName)
{
if (Tags.FindS("Suggests").empty() == false)
OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
- SetTFRewriteData(Changes[End++], "Suggests", OptionalStr.c_str());
+ Changes.push_back(SetTFRewriteData("Suggests", OptionalStr.c_str()));
}
- for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
+ for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
I != OverItem->FieldOverride.end(); ++I)
- SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
+ Changes.push_back(SetTFRewriteData(I->first.c_str(),I->second.c_str()));
- SetTFRewriteData(Changes[End++], 0, 0);
+ Changes.push_back(SetTFRewriteData( 0, 0));
// Rewrite and store the fields.
- if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes) == false)
+ if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes.data()) == false)
return false;
fprintf(Output,"\n");
@@ -589,10 +596,7 @@ SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string c
BufSize = 0;
// Process the command line options
- DoMD5 = _config->FindB("APT::FTPArchive::Sources::MD5",DoMD5);
- DoSHA1 = _config->FindB("APT::FTPArchive::Sources::SHA1",DoSHA1);
- DoSHA256 = _config->FindB("APT::FTPArchive::Sources::SHA256",DoSHA256);
- DoSHA512 = _config->FindB("APT::FTPArchive::Sources::SHA512",DoSHA512);
+ ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
@@ -614,17 +618,25 @@ SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string c
}
/*}}}*/
// SourcesWriter::DoPackage - Process a single package /*{{{*/
-// ---------------------------------------------------------------------
-/* */
+static std::ostream& addDscHash(std::ostream &out, unsigned int const DoHashes,
+ Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
+ HashString const * const Hash, unsigned long long Size, std::string FileName)
+{
+ if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
+ return out;
+ out << "\n " << Hash->HashValue() << " " << Size << " " << FileName
+ << "\n " << Tags.FindS(FieldName);
+ return out;
+}
bool SourcesWriter::DoPackage(string FileName)
{
// Pull all the data we need form the DB
if (Db.GetFileInfo(FileName,
- false, /* DoControl */
- false, /* DoContents */
- false, /* GenContentsOnly */
- true, /* DoSource */
- DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) == false)
+ false, /* DoControl */
+ false, /* DoContents */
+ false, /* GenContentsOnly */
+ true, /* DoSource */
+ DoHashes, DoAlwaysStat) == false)
{
return false;
}
@@ -712,29 +724,19 @@ bool SourcesWriter::DoPackage(string FileName)
*SOverItem = *OverItem;
}
}
-
+
// Add the dsc to the files hash list
string const strippedName = flNotDir(FileName);
std::ostringstream ostreamFiles;
- if (DoMD5 == true && Tags.Exists("Files"))
- ostreamFiles << "\n " << Db.MD5Res.c_str() << " " << St.st_size << " "
- << strippedName << "\n " << Tags.FindS("Files");
+ addDscHash(ostreamFiles, DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
string const Files = ostreamFiles.str();
std::ostringstream ostreamSha1;
- if (DoSHA1 == true && Tags.Exists("Checksums-Sha1"))
- ostreamSha1 << "\n " << string(Db.SHA1Res.c_str()) << " " << St.st_size << " "
- << strippedName << "\n " << Tags.FindS("Checksums-Sha1");
-
+ addDscHash(ostreamSha1, DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName);
std::ostringstream ostreamSha256;
- if (DoSHA256 == true && Tags.Exists("Checksums-Sha256"))
- ostreamSha256 << "\n " << string(Db.SHA256Res.c_str()) << " " << St.st_size << " "
- << strippedName << "\n " << Tags.FindS("Checksums-Sha256");
-
+ addDscHash(ostreamSha256, DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName);
std::ostringstream ostreamSha512;
- if (DoSHA512 == true && Tags.Exists("Checksums-Sha512"))
- ostreamSha512 << "\n " << string(Db.SHA512Res.c_str()) << " " << St.st_size << " "
- << strippedName << "\n " << Tags.FindS("Checksums-Sha512");
+ addDscHash(ostreamSha512, DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName);
// Strip the DirStrip prefix from the FileName and add the PathPrefix
string NewFileName;
@@ -766,35 +768,54 @@ bool SourcesWriter::DoPackage(string FileName)
string OriginalPath = Directory + ParseJnk;
// Add missing hashes to source files
- if ((DoSHA1 == true && !Tags.Exists("Checksums-Sha1")) ||
- (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) ||
- (DoSHA512 == true && !Tags.Exists("Checksums-Sha512")))
+ if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) ||
+ ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) ||
+ ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512")))
{
- if (Db.GetFileInfo(OriginalPath,
+ if (Db.GetFileInfo(OriginalPath,
false, /* DoControl */
false, /* DoContents */
false, /* GenContentsOnly */
false, /* DoSource */
- DoMD5, DoSHA1, DoSHA256, DoSHA512,
+ DoHashes,
DoAlwaysStat) == false)
{
return _error->Error("Error getting file info");
}
- if (DoSHA1 == true && !Tags.Exists("Checksums-Sha1"))
- ostreamSha1 << "\n " << string(Db.SHA1Res) << " "
- << Db.GetFileSize() << " " << ParseJnk;
-
- if (DoSHA256 == true && !Tags.Exists("Checksums-Sha256"))
- ostreamSha256 << "\n " << string(Db.SHA256Res) << " "
- << Db.GetFileSize() << " " << ParseJnk;
-
- if (DoSHA512 == true && !Tags.Exists("Checksums-Sha512"))
- ostreamSha512 << "\n " << string(Db.SHA512Res) << " "
- << Db.GetFileSize() << " " << ParseJnk;
+ for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
+ {
+ if (hs->HashType() == "MD5Sum")
+ continue;
+ char const * fieldname;
+ std::ostream * out;
+ if (hs->HashType() == "SHA1")
+ {
+ fieldname = "Checksums-Sha1";
+ out = &ostreamSha1;
+ }
+ else if (hs->HashType() == "SHA256")
+ {
+ fieldname = "Checksums-Sha256";
+ out = &ostreamSha256;
+ }
+ else if (hs->HashType() == "SHA512")
+ {
+ fieldname = "Checksums-Sha512";
+ out = &ostreamSha512;
+ }
+ else
+ {
+ _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
+ continue;
+ }
+ if (Tags.Exists(fieldname) == true)
+ continue;
+ (*out) << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk;
+ }
- // write back the GetFileInfo() stats data
- Db.Finish();
+ // write back the GetFileInfo() stats data
+ Db.Finish();
}
// Perform the delinking operation
@@ -820,22 +841,21 @@ bool SourcesWriter::DoPackage(string FileName)
// This lists all the changes to the fields we are going to make.
// (5 hardcoded + checksums + maintainer + end marker)
- TFRewriteData Changes[5+2+1+SOverItem->FieldOverride.size()+1];
+ std::vector<TFRewriteData> Changes;
- unsigned int End = 0;
- SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package");
+ Changes.push_back(SetTFRewriteData("Source",Package.c_str(),"Package"));
if (Files.empty() == false)
- SetTFRewriteData(Changes[End++],"Files",Files.c_str());
+ Changes.push_back(SetTFRewriteData("Files",Files.c_str()));
if (ChecksumsSha1.empty() == false)
- SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1.c_str());
+ Changes.push_back(SetTFRewriteData("Checksums-Sha1",ChecksumsSha1.c_str()));
if (ChecksumsSha256.empty() == false)
- SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256.c_str());
+ Changes.push_back(SetTFRewriteData("Checksums-Sha256",ChecksumsSha256.c_str()));
if (ChecksumsSha512.empty() == false)
- SetTFRewriteData(Changes[End++],"Checksums-Sha512",ChecksumsSha512.c_str());
+ Changes.push_back(SetTFRewriteData("Checksums-Sha512",ChecksumsSha512.c_str()));
if (Directory != "./")
- SetTFRewriteData(Changes[End++],"Directory",Directory.c_str());
- SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str());
- SetTFRewriteData(Changes[End++],"Status",0);
+ Changes.push_back(SetTFRewriteData("Directory",Directory.c_str()));
+ Changes.push_back(SetTFRewriteData("Priority",BestPrio.c_str()));
+ Changes.push_back(SetTFRewriteData("Status",0));
// Rewrite the maintainer field if necessary
bool MaintFailed;
@@ -850,16 +870,16 @@ bool SourcesWriter::DoPackage(string FileName)
}
}
if (NewMaint.empty() == false)
- SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
+ Changes.push_back(SetTFRewriteData("Maintainer", NewMaint.c_str()));
for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
I != SOverItem->FieldOverride.end(); ++I)
- SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
+ Changes.push_back(SetTFRewriteData(I->first.c_str(),I->second.c_str()));
- SetTFRewriteData(Changes[End++], 0, 0);
+ Changes.push_back(SetTFRewriteData(0, 0));
// Rewrite and store the fields.
- if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes) == false)
+ if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes.data()) == false)
return false;
fprintf(Output,"\n");
@@ -886,15 +906,13 @@ ContentsWriter::ContentsWriter(string const &DB, string const &Arch) :
determine what the package name is. */
bool ContentsWriter::DoPackage(string FileName, string Package)
{
- if (!Db.GetFileInfo(FileName,
- Package.empty(), /* DoControl */
- true, /* DoContents */
- false, /* GenContentsOnly */
- false, /* DoSource */
- false, /* DoMD5 */
- false, /* DoSHA1 */
- false, /* DoSHA256 */
- false)) /* DoSHA512 */
+ if (!Db.GetFileInfo(FileName,
+ Package.empty(), /* DoControl */
+ true, /* DoContents */
+ false, /* GenContentsOnly */
+ false, /* DoSource */
+ 0, /* DoHashes */
+ false /* checkMtime */))
{
return false;
}
@@ -1030,9 +1048,7 @@ ReleaseWriter::ReleaseWriter(string const &/*DB*/)
fprintf(Output, "%s: %s\n", (*I).first.c_str(), Value.c_str());
}
- DoMD5 = _config->FindB("APT::FTPArchive::Release::MD5",DoMD5);
- DoSHA1 = _config->FindB("APT::FTPArchive::Release::SHA1",DoSHA1);
- DoSHA256 = _config->FindB("APT::FTPArchive::Release::SHA256",DoSHA256);
+ ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
}
/*}}}*/
// ReleaseWriter::DoPackage - Process a single package /*{{{*/
@@ -1066,15 +1082,8 @@ bool ReleaseWriter::DoPackage(string FileName)
CheckSums[NewFileName].size = fd.Size();
Hashes hs;
- hs.AddFD(fd, 0, DoMD5, DoSHA1, DoSHA256, DoSHA512);
- if (DoMD5 == true)
- CheckSums[NewFileName].MD5 = hs.MD5.Result();
- if (DoSHA1 == true)
- CheckSums[NewFileName].SHA1 = hs.SHA1.Result();
- if (DoSHA256 == true)
- CheckSums[NewFileName].SHA256 = hs.SHA256.Result();
- if (DoSHA512 == true)
- CheckSums[NewFileName].SHA512 = hs.SHA512.Result();
+ hs.AddFD(fd, 0, DoHashes);
+ CheckSums[NewFileName].Hashes = hs.GetHashStringList();
fd.Close();
return true;
@@ -1083,54 +1092,29 @@ bool ReleaseWriter::DoPackage(string FileName)
/*}}}*/
// ReleaseWriter::Finish - Output the checksums /*{{{*/
// ---------------------------------------------------------------------
-void ReleaseWriter::Finish()
+static void printChecksumTypeRecord(FILE * const Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
{
- if (DoMD5 == true)
- {
- fprintf(Output, "MD5Sum:\n");
- for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
- I != CheckSums.end(); ++I)
- {
- fprintf(Output, " %s %16llu %s\n",
- (*I).second.MD5.c_str(),
- (*I).second.size,
- (*I).first.c_str());
- }
- }
- if (DoSHA1 == true)
- {
- fprintf(Output, "SHA1:\n");
- for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
- I != CheckSums.end(); ++I)
- {
- fprintf(Output, " %s %16llu %s\n",
- (*I).second.SHA1.c_str(),
- (*I).second.size,
- (*I).first.c_str());
- }
- }
- if (DoSHA256 == true)
- {
- fprintf(Output, "SHA256:\n");
- for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
+ fprintf(Output, "%s:\n", Type);
+ for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
I != CheckSums.end(); ++I)
{
+ HashString const * const hs = I->second.Hashes.find(Type);
+ if (hs == NULL)
+ continue;
fprintf(Output, " %s %16llu %s\n",
- (*I).second.SHA256.c_str(),
+ hs->HashValue().c_str(),
(*I).second.size,
(*I).first.c_str());
}
- }
-
- fprintf(Output, "SHA512:\n");
- for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
- I != CheckSums.end();
- ++I)
- {
- fprintf(Output, " %s %16llu %s\n",
- (*I).second.SHA512.c_str(),
- (*I).second.size,
- (*I).first.c_str());
- }
-
+}
+void ReleaseWriter::Finish()
+{
+ if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
+ printChecksumTypeRecord(Output, "MD5Sum", CheckSums);
+ if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
+ printChecksumTypeRecord(Output, "SHA1", CheckSums);
+ if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
+ printChecksumTypeRecord(Output, "SHA256", CheckSums);
+ if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
+ printChecksumTypeRecord(Output, "SHA512", CheckSums);
}
diff --git a/ftparchive/writer.h b/ftparchive/writer.h
index d8a10e0bb..226996475 100644
--- a/ftparchive/writer.h
+++ b/ftparchive/writer.h
@@ -13,6 +13,8 @@
#ifndef WRITER_H
#define WRITER_H
+#include <apt-pkg/hashes.h>
+
#include <string>
#include <stdio.h>
#include <iostream>
@@ -61,10 +63,7 @@ class FTWScanner
}
public:
- bool DoMD5;
- bool DoSHA1;
- bool DoSHA256;
- bool DoSHA512;
+ unsigned int DoHashes;
unsigned long DeLinkLimit;
string InternalPrefix;
@@ -197,17 +196,14 @@ public:
string PathPrefix;
string DirStrip;
-protected:
struct CheckSum
{
- string MD5;
- string SHA1;
- string SHA256;
- string SHA512;
+ HashStringList Hashes;
// Limited by FileFd::Size()
unsigned long long size;
~CheckSum() {};
};
+protected:
map<string,struct CheckSum> CheckSums;
};
diff --git a/methods/copy.cc b/methods/copy.cc
index 40f8f85ec..a23c0316c 100644
--- a/methods/copy.cc
+++ b/methods/copy.cc
@@ -67,6 +67,14 @@ bool CopyMethod::Fetch(FetchItem *Itm)
Res.LastModified = Buf.st_mtime;
Res.IMSHit = false;
URIStart(Res);
+
+ // when the files are identical, just compute the hashes
+ if(File == Itm->DestFile)
+ {
+ CalculateHashes(Res);
+ URIDone(Res);
+ return true;
+ }
// just calc the hashes if the source and destination are identical
if (File == Itm->DestFile)
@@ -116,5 +124,6 @@ int main()
setlocale(LC_ALL, "");
CopyMethod Mth;
+
return Mth.Run();
}
diff --git a/methods/ftp.cc b/methods/ftp.cc
index 66787a7be..0504e5872 100644
--- a/methods/ftp.cc
+++ b/methods/ftp.cc
@@ -75,9 +75,10 @@ time_t FtpMethod::FailTime = 0;
// FTPConn::FTPConn - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
+FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
DataListenFd(-1), ServerName(Srv),
- ForceExtended(false), TryPassive(true)
+ ForceExtended(false), TryPassive(true),
+ PeerAddrLen(0), ServerAddrLen(0)
{
Debug = _config->FindB("Debug::Acquire::Ftp",false);
PasvAddr = 0;
@@ -848,7 +849,8 @@ bool FTPConn::Finalize()
/* This opens a data connection, sends REST and RETR and then
transfers the file over. */
bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &Hash,bool &Missing)
+ Hashes &Hash,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner)
{
Missing = false;
if (CreateDataFd() == false)
@@ -921,7 +923,14 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
{
Close();
return false;
- }
+ }
+
+ if (MaximumSize > 0 && To.Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ To.Tell(), MaximumSize);
+ }
}
// All done
@@ -979,6 +988,10 @@ bool FtpMethod::Configuration(string Message)
return false;
TimeOut = _config->FindI("Acquire::Ftp::Timeout",TimeOut);
+
+ // no more active ftp, sorry
+ DropPrivsOrDie();
+
return true;
}
/*}}}*/
@@ -1062,7 +1075,7 @@ bool FtpMethod::Fetch(FetchItem *Itm)
FailFd = Fd.Fd();
bool Missing;
- if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing) == false)
+ if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize,this) == false)
{
Fd.Close();
@@ -1131,6 +1144,6 @@ int main(int, const char *argv[])
}
FtpMethod Mth;
-
+
return Mth.Run();
}
diff --git a/methods/ftp.h b/methods/ftp.h
index dd92f0086..2efd28ec6 100644
--- a/methods/ftp.h
+++ b/methods/ftp.h
@@ -62,7 +62,8 @@ class FTPConn
bool Size(const char *Path,unsigned long long &Size);
bool ModTime(const char *Path, time_t &Time);
bool Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &MD5,bool &Missing);
+ Hashes &MD5,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner);
FTPConn(URI Srv);
~FTPConn();
diff --git a/methods/gpgv.cc b/methods/gpgv.cc
index ae521a2ed..488c16826 100644
--- a/methods/gpgv.cc
+++ b/methods/gpgv.cc
@@ -5,6 +5,7 @@
#include <apt-pkg/error.h>
#include <apt-pkg/gpgv.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/fileutl.h>
#include <ctype.h>
#include <errno.h>
@@ -43,12 +44,22 @@ class GPGVMethod : public pkgAcqMethod
protected:
virtual bool Fetch(FetchItem *Itm);
-
+ virtual bool Configuration(string Message);
public:
GPGVMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig) {};
};
+bool GPGVMethod::Configuration(string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
+
string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
vector<string> &GoodSigners,
vector<string> &BadSigners,
@@ -74,7 +85,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
FILE *pipein = fdopen(fd[0], "r");
- // Loop over the output of gpgv, and check the signatures.
+ // Loop over the output of apt-key (which really is gnupg), and check the signatures.
size_t buffersize = 64;
char *buffer = (char *) malloc(buffersize);
size_t bufferoff = 0;
@@ -159,7 +170,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
waitpid(pid, &status, 0);
if (Debug == true)
{
- std::clog << "gpgv exited\n";
+ ioprintf(std::clog, "gpgv exited with status %i\n", WEXITSTATUS(status));
}
if (WEXITSTATUS(status) == 0)
@@ -171,7 +182,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
else if (WEXITSTATUS(status) == 1)
return _("At least one invalid signature was encountered.");
else if (WEXITSTATUS(status) == 111)
- return _("Could not execute 'gpgv' to verify signature (is gpgv installed?)");
+ return _("Could not execute 'apt-key' to verify signature (is gnupg installed?)");
else if (WEXITSTATUS(status) == 112)
{
// acquire system checks for "NODATA" to generate GPG errors (the others are only warnings)
@@ -181,7 +192,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
return errmsg;
}
else
- return _("Unknown error executing gpgv");
+ return _("Unknown error executing apt-key");
}
bool GPGVMethod::Fetch(FetchItem *Itm)
@@ -199,7 +210,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
Res.Filename = Itm->DestFile;
URIStart(Res);
- // Run gpgv on file, extract contents and get the key ID of the signer
+ // Run apt-key on file, extract contents and get the key ID of the signer
string msg = VerifyGetSigners(Path.c_str(), Itm->DestFile.c_str(),
GoodSigners, BadSigners, WorthlessSigners,
NoPubKeySigners);
@@ -251,7 +262,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
if (_config->FindB("Debug::Acquire::gpgv", false))
{
- std::clog << "gpgv succeeded\n";
+ std::clog << "apt-key succeeded\n";
}
return true;
@@ -261,7 +272,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
int main()
{
setlocale(LC_ALL, "");
-
+
GPGVMethod Mth;
return Mth.Run();
diff --git a/methods/gzip.cc b/methods/gzip.cc
index df3f8828f..387c05f2e 100644
--- a/methods/gzip.cc
+++ b/methods/gzip.cc
@@ -33,12 +33,22 @@ const char *Prog;
class GzipMethod : public pkgAcqMethod
{
virtual bool Fetch(FetchItem *Itm);
+ virtual bool Configuration(std::string Message);
public:
GzipMethod() : pkgAcqMethod("1.1",SingleInstance | SendConfig) {};
};
+bool GzipMethod::Configuration(std::string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
// GzipMethod::Fetch - Decompress the passed URI /*{{{*/
// ---------------------------------------------------------------------
@@ -139,5 +149,6 @@ int main(int, char *argv[])
++Prog;
GzipMethod Mth;
+
return Mth.Run();
}
diff --git a/methods/http.cc b/methods/http.cc
index f2a4a4db6..a5de13511 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -64,7 +64,8 @@ const unsigned int CircleBuf::BW_HZ=10;
// CircleBuf::CircleBuf - Circular input buffer /*{{{*/
// ---------------------------------------------------------------------
/* */
-CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0)
+CircleBuf::CircleBuf(unsigned long long Size)
+ : Size(Size), Hash(0), TotalWriten(0)
{
Buf = new unsigned char[Size];
Reset();
@@ -80,6 +81,7 @@ void CircleBuf::Reset()
InP = 0;
OutP = 0;
StrPos = 0;
+ TotalWriten = 0;
MaxGet = (unsigned long long)-1;
OutQueue = string();
if (Hash != 0)
@@ -217,6 +219,8 @@ bool CircleBuf::Write(int Fd)
return false;
}
+
+ TotalWriten += Res;
if (Hash != 0)
Hash->Add(Buf + (OutP%Size),Res);
@@ -651,6 +655,13 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File)
return _error->Errno("write",_("Error writing to output file"));
}
+ if (MaximumSize > 0 && File && File->Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ File->Tell(), MaximumSize);
+ }
+
// Handle commands from APT
if (FD_ISSET(STDIN_FILENO,&rfds))
{
@@ -759,6 +770,8 @@ bool HttpMethod::Configuration(string Message)
if (ServerMethod::Configuration(Message) == false)
return false;
+ DropPrivsOrDie();
+
AllowRedirect = _config->FindB("Acquire::http::AllowRedirect",true);
PipelineDepth = _config->FindI("Acquire::http::Pipeline-Depth",
PipelineDepth);
diff --git a/methods/http.h b/methods/http.h
index 1df9fa07d..40a88a7be 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -63,6 +63,8 @@ class CircleBuf
public:
Hashes *Hash;
+ // total amount of data that got written so far
+ unsigned long long TotalWriten;
// Read data in
bool Read(int Fd);
@@ -81,8 +83,8 @@ class CircleBuf
bool ReadSpace() const {return Size - (InP - OutP) > 0;};
bool WriteSpace() const {return InP - OutP > 0;};
- // Dump everything
void Reset();
+ // Dump everything
void Stats();
CircleBuf(unsigned long long Size);
diff --git a/methods/http_main.cc b/methods/http_main.cc
index 3b346a514..cd52c42e8 100644
--- a/methods/http_main.cc
+++ b/methods/http_main.cc
@@ -1,5 +1,6 @@
#include <config.h>
-
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/error.h>
#include <signal.h>
#include "http.h"
@@ -13,5 +14,6 @@ int main()
signal(SIGPIPE, SIG_IGN);
HttpMethod Mth;
+
return Mth.Loop();
}
diff --git a/methods/https.cc b/methods/https.cc
index 0499af0c5..366148e19 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -37,6 +37,16 @@
/*}}}*/
using namespace std;
+bool HttpsMethod::Configuration(std::string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
+
size_t
HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
{
@@ -82,6 +92,12 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
if(me->File->Write(buffer, size*nmemb) != true)
return false;
+ if(me->Queue->MaximumSize > 0 && me->File->Tell() > me->Queue->MaximumSize)
+ {
+ me->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ me->TotalWritten, me->Queue->MaximumSize);
+ }
return size*nmemb;
}
diff --git a/methods/https.h b/methods/https.h
index faac8a3cd..9df18e83a 100644
--- a/methods/https.h
+++ b/methods/https.h
@@ -58,6 +58,8 @@ class HttpsMethod : public pkgAcqMethod
static const int DL_MIN_SPEED = 10;
virtual bool Fetch(FetchItem *);
+ virtual bool Configuration(std::string Message);
+
static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp);
static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp);
static int progress_callback(void *clientp, double dltotal, double dlnow,
@@ -66,13 +68,13 @@ class HttpsMethod : public pkgAcqMethod
CURL *curl;
FetchResult Res;
HttpsServerState *Server;
+ unsigned long long TotalWritten;
public:
FileFd *File;
- HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), File(NULL)
+ HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), Server(NULL), TotalWritten(0), File(NULL)
{
- File = 0;
curl = curl_easy_init();
};
diff --git a/methods/server.cc b/methods/server.cc
index 92d94e638..c4689ff12 100644
--- a/methods/server.cc
+++ b/methods/server.cc
@@ -324,10 +324,10 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
failure */
if (Server->Result < 200 || Server->Result >= 300)
{
- char err[255];
- snprintf(err,sizeof(err)-1,"HttpError%i",Server->Result);
+ std::string err;
+ strprintf(err, "HttpError%u", Server->Result);
SetFailReason(err);
- _error->Error("%u %s",Server->Result,Server->Code);
+ _error->Error("%u %s", Server->Result, Server->Code);
if (Server->HaveContent == true)
return ERROR_WITH_CONTENT_PAGE;
return ERROR_UNRECOVERABLE;
@@ -393,9 +393,16 @@ bool ServerMethod::Fetch(FetchItem *)
for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth;
I = I->Next, Depth++)
{
- // If pipelining is disabled, we only queue 1 request
- if (Server->Pipeline == false && Depth >= 0)
- break;
+ if (Depth >= 0)
+ {
+ // If pipelining is disabled, we only queue 1 request
+ if (Server->Pipeline == false)
+ break;
+ // if we have no hashes, do at most one such request
+ // as we can't fixup pipeling misbehaviors otherwise
+ else if (I->ExpectedHashes.usable() == false)
+ break;
+ }
// Make sure we stick with the same server
if (Server->Comp(I->Uri) == false)
@@ -525,6 +532,13 @@ int ServerMethod::Loop()
// Run the data
bool Result = true;
+
+ // ensure we don't fetch too much
+ // we could do "Server->MaximumSize = Queue->MaximumSize" here
+ // but that would break the clever pipeline messup detection
+ // so instead we use the size of the biggest item in the queue
+ Server->MaximumSize = FindMaximumObjectSizeInQueue();
+
if (Server->HaveContent)
Result = Server->RunData(File);
@@ -547,7 +561,38 @@ int ServerMethod::Loop()
// Send status to APT
if (Result == true)
{
- Res.TakeHashes(*Server->GetHashes());
+ Hashes * const resultHashes = Server->GetHashes();
+ HashStringList const hashList = resultHashes->GetHashStringList();
+ if (PipelineDepth != 0 && Queue->ExpectedHashes.usable() == true && Queue->ExpectedHashes != hashList)
+ {
+ // we did not get the expected hash… mhhh:
+ // could it be that server/proxy messed up pipelining?
+ FetchItem * BeforeI = Queue;
+ for (FetchItem *I = Queue->Next; I != 0 && I != QueueBack; I = I->Next)
+ {
+ if (I->ExpectedHashes.usable() == true && I->ExpectedHashes == hashList)
+ {
+ // yes, he did! Disable pipelining and rewrite queue
+ if (Server->Pipeline == true)
+ {
+ // FIXME: fake a warning message as we have no proper way of communicating here
+ std::string out;
+ strprintf(out, _("Automatically disabled %s due to incorrect response from server/proxy. (man 5 apt.conf)"), "Acquire::http::PipelineDepth");
+ std::cerr << "W: " << out << std::endl;
+ Server->Pipeline = false;
+ // we keep the PipelineDepth value so that the rest of the queue can be fixed up as well
+ }
+ Rename(Res.Filename, I->DestFile);
+ Res.Filename = I->DestFile;
+ BeforeI->Next = I->Next;
+ I->Next = Queue;
+ Queue = I;
+ break;
+ }
+ BeforeI = I;
+ }
+ }
+ Res.TakeHashes(*resultHashes);
URIDone(Res);
}
else
@@ -567,7 +612,10 @@ int ServerMethod::Loop()
QueueBack = Queue;
}
else
+ {
+ Server->Close();
Fail(true);
+ }
}
break;
}
@@ -662,3 +710,13 @@ int ServerMethod::Loop()
return 0;
}
/*}}}*/
+ /*{{{*/
+unsigned long long
+ServerMethod::FindMaximumObjectSizeInQueue() const
+{
+ unsigned long long MaxSizeInQueue = 0;
+ for (FetchItem *I = Queue; I != 0 && I != QueueBack; I = I->Next)
+ MaxSizeInQueue = std::max(MaxSizeInQueue, I->MaximumSize);
+ return MaxSizeInQueue;
+}
+ /*}}}*/
diff --git a/methods/server.h b/methods/server.h
index f5e68d902..7d5198478 100644
--- a/methods/server.h
+++ b/methods/server.h
@@ -49,6 +49,8 @@ struct ServerState
URI Proxy;
unsigned long TimeOut;
+ unsigned long long MaximumSize;
+
protected:
ServerMethod *Owner;
@@ -73,7 +75,7 @@ struct ServerState
bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0;
StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
- State = Header; Persistent = false; Pipeline = true;};
+ State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;};
virtual bool WriteResponse(std::string const &Data) = 0;
/** \brief Transfer the data from the socket */
@@ -104,6 +106,10 @@ class ServerMethod : public pkgAcqMethod
unsigned long PipelineDepth;
bool AllowRedirect;
+ // Find the biggest item in the fetch queue for the checking of the maximum
+ // size
+ unsigned long long FindMaximumObjectSizeInQueue() const APT_PURE;
+
public:
bool Debug;
@@ -140,7 +146,7 @@ class ServerMethod : public pkgAcqMethod
virtual ServerState * CreateServerState(URI uri) = 0;
virtual void RotateDNS() = 0;
- ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(0), AllowRedirect(false), Debug(false) {};
+ ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(10), AllowRedirect(false), Debug(false) {};
virtual ~ServerMethod() {};
};
diff --git a/po/ChangeLog b/po/ChangeLog
deleted file mode 100644
index 373ef4ca6..000000000
--- a/po/ChangeLog
+++ /dev/null
@@ -1,1077 +0,0 @@
-2009-09-26 Christian Perrier <bubulle@debian.org>
-
- * LINGUAS: re-disabled Hebrew translation on translator's request.
-
-2009-06-05 Jordi Mallach <jordi@debian.org>
-
- * ca.po: Updated to 539t
-
-2009-06-04 Milo Casagrande <milo@ubuntu.com>
-
- * it.po: Updated to 539t
-
-2009-06-01 Deng Xiyue <manphiz-guest@users.alioth.debian.org>
-
- * zh_CN.po: Updated to 539t
-
-2009-05-21 Marcos <marcos.alvarez.costales@gmail.com>
-
- * ast.po: Updated to 539t
-
-2009-04-28 Ivan Masár <helix84@centrum.sk>
-
- * sk.po: Updated to 539t
-
-2009-04-23 Christian Perrier <bubulle@debian.org>
-
- * Update all PO files and apt-all.pot. 545 strings.
- Formerly complete PO files are now 539t1f6u
- * fr.po: updated to 545t.
-
-2009-03-19 Ivan Masár <helix84@centrum.sk>
-
- * sk.po: Updated to 539t
-
-2009-03-04 Daniel Nylander <po@danielnylander.se>
-
- * sv.po: Updated to 539t
-
-2009-02-23 Christian Perrier <bubulle@debian.org>
-
- * Update all PO files and apt-all.pot. 539 strings.
- Formerly complete PO files are now 538t1u
- * fr.po: updated to 539t.
-
-2009-02-01 Hans Fredrik Nordhaug <hans@nordhaug.priv.no>
-
- * nb.po: updated to 539t.
-
-2009-01-27 Damyan Ivanov <dmn@debian.org>
-
- * bg.po: updated to 539t.
-
-2008-12-11 Christian Perrier <bubulle@debian.org>
-
- * fr.po: fix spelling error to "défectueux"
-
-2009-01-04 Tetralet <tetralet@gmail.com>
-
- * zh_TW.po: Added as 538t1u.
-
-2008-12-22 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: updated to 539t.
-
-2008-12-22 Jordi Mallach <jordi@debian.org>
-
- * ca.po: updated to 539t.
-
-2008-12-19 Marcelino Villarino <mvillarino@gmail.com>
-
- * gl.po: updated to 539t.
-
-2008-12-12 Tapio Lehtonen <tale@debian.org>
-
- * fi.po: updated to 539t.
-
-2008-12-06 Christian Perrier <bubulle@debian.org>
-
- * fr.po: dropped awful use of first person
-
-2008-11-23 Artem Bondarenko <artem.brz@gmail.com>
-
- * uk.po: updated to 477t55f7u
-
-2008-11-23 Sampada Nakhare <sampadanakhare@gmail.com>
-
- * mr.po: updated to 539t
-
-2008-11-21 Yuri Kozlov <kozlov.y@gmail.com>
-
- * ru.po: Update to 539t
-
-2008-11-18 Piarres Beobide <pi@beobide.net>
-
- * eu.po: updated to 539t.
-
-2008-11-17 Felipe Augusto van de Wiel (faw) <faw@debian.org>
-
- * pt_BR.po: updated to 539t.
-
-2008-11-17 Hans Fredrik Nordhaug <hans@nordhaug.priv.no>
-
- * nb.po: updated to 539t.
-
-2008-11-17 Miroslav Kure <kurem@upcase.inf.upol.cz>
-
- * cs.po: updated to 539t.
-
-2008-11-17 Miguel Figueiredo <elmig@debianpt.org>
-
- * pt.po: Updated to 539t
-
-2008-11-16 Javier Fernandez-Sanguino <jfs@debian.org>
-
- * es.po: updated to 539t
-
-2008-11-15 Deng Xiyue <manphiz-guest@users.alioth.debian.org>
-
- * zh_CN.po: updated to 539t.
-
-2008-11-15 Eddy Petrișor <eddy.petrisor@gmail.com>
-
- * ro.po: updated to 539t.
-
-2008-11-15 Javier Fernandez-Sanguino <jfs@debian.org>
-
- * es.po: updated to 536t3f2u
-
-2008-11-14 Holger Wansing <linux@wansing-online.de>
-
- * de.po: Updated to 539t
-
-2008-11-14 Wiktor Wandachowicz <siryes@gmail.com>
-
- * pl.po: Updated to 539t
-
-2008-11-14 Neil Williams <linux@codehelp.co.uk>
-
- * en_GB.po: Updated to 539t
-
-2008-11-14 Samuele Giovanni Tonon <samu@debian.org>
-
- * it.po: Updated to 539t
-
-2008-11-14 Jordi Mallach <jordi@debian.org>
-
- * ca.po: Updated to 539t
-
-2008-11-14 Changwoo Ryu <cwryu@debian.org>
-
- * ko.po: Updated to 539t
-
-2008-11-14 Kenshi Muto <kmuto@debian.org>
-
- * ja.po: Updated to 539t
-
-2008-11-14 Daniel Nylander <po@danielnylander.se>
-
- * sv.po: Updated to 539t
-
-2008-11-14 Ivan Masár <helix84@centrum.sk>
-
- * sk.po: Updated to 539t
-
-2008-11-13 Damyan Ivanov <dmn@debian.org>
-
- * bg.po: Updated to 538t1f
-
-2008-11-13 Christian Perrier <bubulle@debian.org>
-
- * Update all PO files and apt-all.pot. 538 strings.
- Formerly complete PO files are now 538t1f
- * French translation re-completed
-
-2008-11-09 Theppitak Karoonboonyanan <thep@linux.thai.net>
-
- * th.po: updated to 540t.
-
-2008-11-06 Christian Perrier <bubulle@debian.org>
-
- * Update all PO files and apt-all.pot. 540 strings.
- Formerly complete PO files are now 538t1f1u
-
-2008-09-19 Jordi Mallach <jordi@debian.org>
-
- * ca.po: Update to 538t
-
-2008-09-16 Wiktor Wandachowicz <siryes@gmail.com>
-
- * pl.po: Update to 538t
-
-2008-09-16 Yuri Kozlov <kozlov.y@gmail.com>
-
- * ru.po: Update to 538t
-
-2008-09-12 Emmanuel Galatoulas <galaxico@quad-nrg.net>
-
- * el.po: Update to 538t
-
-2008-09-10 Miguel Figueiredo <elmig@debianpt.org>
-
- * pt.po: Updated to 538t
-
-2008-09-05 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: updated to 538t.
-
-2008-09-01 Hans Fredrik Nordhaug <hans@nordhaug.priv.no>
-
- * nb.po: updated to 538t.
-
-2008-08-31 Miroslav Kure <kurem@upcase.inf.upol.cz>
-
- * cs.po: updated to 538t.
-
-2008-08-28 Piarres Beobide <pi@beobide.net>
-
- * eu.po: updated to 538t.
-
-2008-08-26 Felipe Augusto van de Wiel (faw) <faw@debian.org>
-
- * pt_BR.po: updated to 538t.
-
-2008-08-18 Deng Xiyue <manphiz-guest@users.alioth.debian.org>
-
- * zh_CN.po: updated to 538t.
-
-2008-08-07 Serafeim Zanikolas <serzan@hellug.gr>
-
- * el.po: updated to 534t3f1u.
-
-2008-08-02 Gintautas Miliauskas <gintas@akl.lt>
-
- * lt.po: updated to 300t4f234u.
-
-2008-08-01 Kenshi Muto <kmuto@debian.org>
-
- * ja.po: updated to 538t.
-
-2008-07-28 Eddy Petrisor <eddy.petrisor@gmail.com>
-
- * ro.po: updated to 538t.
-
-2008-07-28 Jacobo Tarrio <jtarrio@trasno.net>
-
- * gl.po: updated to 538t.
-
-2008-07-27 Ivan Masár <helix84@centrum.sk>
-
- * sk.po: Updated to 538t
-
-2008-07-26 Damyan Ivanov <dmn@debian.org>
-
- * bg.po: Updated to 538t
-
-2008-07-26 Christian Perrier <bubulle@debian.org>
-
- * fr.po: Updated to 538t
-
-2008-07-25 Michael Vogt <mvo@debian.org>
-
- * Update all PO files and apt-all.pot. 538 strings.
- Formerly complete PO files are now 536t1f1u
-
-2008-07-21 Miguel Figueiredo <elmig@debianpt.org>
-
- * pt.po: Updated to 536t
-
-2008-07-19 Changwoo Ryu <cwryu@debian.org>
-
- * ko.po: Updated to 536t
-
-2008-07-12 Holger Wansing <linux@wansing-online.de>
-
- * de.po: corrected.
-
-2008-06-29 Asho Yeh <asho@debian.org.tw>
-
- * zh_TW.po: Updated to 536t
-
-2008-06-27 Eddy Petrisor <eddy.petrisor@gmail.com>
-
- * ro.po: updated to 536t.
-
-2008-05-14 Hans Fr. Nordhaug <hans@nordhaug.priv.no>
-
- * nb.po: updated to 536t.
-
-2008-05-11 SZERVÁC Attila <sas@321.hu>
-
- * hu.po: updated to 536t.
-
-2008-05-11 Felipe Augusto van de Wiel (faw) <faw@debian.org>
-
- * pt_BR.po: updated to 536t.
-
-2008-05-08 Erdal Ronahi <erdal dot ronahi at gmail dot com>
-
- * ku.po: updated to 136t25f343u
-
-2008-05-08 Bart Cornelis <cobaco@skolelinux.no>
-
- * nl.po: updated to 536t.
-
-2008-05-07 Jens Seidel <jensseidel@users.sf.net>
-
- * de.po: updated to 536t.
-
-2008-05-07 Peter Karlsson <peterk@debian.org>
-
- * sv.po: updated to 536t.
-
-2008-05-07 Miguel Figueiredo <elmig@debianpt.org>
-
- * pt.po: updated to 536t.
-
-2008-05-07 Jacobo Tarrio <jtarrio@trasno.net>
-
- * gl.po: updated to 536t.
-
-2008-05-07 Yuri Kozlov <kozlov.y@gmail.com>
-
- * ru.po: updated to 536t.
-
-2008-05-07 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: updated to 536t.
-
-2008-05-06 Peter Mann <Peter.Mann@tuke.sk>
-
- * sk.po: updated to 536t.
-
-2008-05-06 Miroslav Kure <kurem@upcase.inf.upol.cz>
-
- * cs.po: updated to 536t.
-
-2008-05-06 Kenshi Muto <kmuto@debian.org>
-
- * ja.po: updated to 536t.
-
-2008-05-05 Piarres Beobide <pi@beobide.net>
-
- * eu.po: updated to 536t.
-
-2008-05-05 Sunjae Park <darehanl@gmail.com>
-
- * ko.po: updated to 536t.
-
-2008-05-05 Tapio Lehtonen <tale@debian.org>
-
- * fi.po: updated to 536t.
-
-2008-05-04 Damyan Ivanov <dmn@debiian.org>
-
- * bg.po: updated to 536t.
-
-2008-05-04 Samuele Giovanni Tonon <samu@debian.org>
-
- * it.po: updated to 536t.
-
-2008-05-04 Wiktor Wandachowicz <siryes@gmail.com>
-
- * pl.po: updated to 536t.
-
-2008-05-04 Theppitak Karoonboonyanan <thep@linux.thai.net>
-
- * th.po: updated to 536t.
-
-2008-05-04 Christian Perrier <bubulle@debian.org>
-
- * fr.po: updated to 536t.
-
-2008-05-04 Christian Perrier <bubulle@debian.org>
-
- * Update all PO files and apt-all.pot. 536 strings.
- Formerly complete PO files are now 535t1u (new string
- from dselect/install. See #322470
-
-2008-05-04 Deng Xiyue <manphiz-guest@users.alioth.debian.org>
-
- * zh_CN.po: updated to 535t. Closes: #473360
-
-2008-05-03 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: updated to 535t. Closes: #479008
-
-2008-04-19 Jacobo Tarrío <jtarrio@debian.org>
-
- * gl.po: updated to 536t.
-
-2008-04-16 Damyan Ivanov <dmn@debian.org>
-
- * bg.po: updated to 536t.
-
-2008-04-16 Christian Perrier <bubulle@debian.org>
-
- * fr.po: updated to 536t.
-
-2008-03-19 Ivan Masár <helix84@centrum.sk>
-
- * sk.po: updated to 536t.
-
-2008-03-06 Wiktor Wandachowicz <siryes@gmail.com>\
-
- * pl.po: updated to 536t.
-
-2008-02-28 Peter Karlsson <peterk@debian.org>
-
- * sv.po: updated to 536t.
-
-2008-02-21 Jens Seidel <jensseidel@users.sf.net>
-
- * de.po: updated to 536t. Closes: #466842
-
-2008-02-16 Deng Xiyue <manphiz-guest@users.alioth.debian.org>
-
- * zh_CN.po: updated to 536t. Closes: #465866
-
-2008-02-13 Sunjae Park <darehanl@gmail.com>
-
- * ko.po: updated to 529t7f. Closes: #448430
-
-2008-02-07 Miguel Figueiredo <elmig@debianpt.org>
-
- * pt.po: updated to 536t. Closes: #464575
-
-2008-01-19 Christian Perrier <bubulle@debian.org>
-
- * Preventive unfuzzy files for a message aimed at fixing #452640
-
-2008-01-19 Jacobo Tarrio <jtarrio@trasno.net>
-
- * gl.po: updated to 536t. Closes: #461468
-
-2008-01-17 Piarres Beobide <pi@beobide.net>
-
- * eu.po: updated to 536t. Closes: #461166
-
-2008-01-13 Christian Perrier <bubulle@debian.org>
-
- * Update all PO files and apt-all.pot. 536 strings.
- Formerly complete PO files are now 534t2f but were
- unfuzzied
-
-2008-01-04 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: updated to 536t. Closes: #459013
-
-2008-01-02 Hans Fredrik Nordhaug <hans@nordhaug.priv.no>
-
- * nb.po: Updated to 536t. Closes: #457917
-
-2007-12-29 Deng Xiyue <manphiz-guest@users.alioth.debian.org>
-
- * zh_CN.po: Updated to 536t. Closes: #458039
-
-2007-12-18 Kenshi Muto <kmuto@debian.org>
-
- * ja.po: Updated to 536t. Closes: #456909
-
-2007-12-17 Christian Perrier <bubulle@debian.org>
-
- * fr.po: completed to 536t.
-
-2007-12-17 Christian Perrier <bubulle@debian.org>
-
- * Update all PO files and apt-all.pot. 536 strings.
- Formerly complete PO files are now 530t6f
-
-2007-12-15 Christian Perrier <bubulle@debian.org>
-
- * fr.po: completed to 542t.
-
-2007-12-15 Christian Perrier <bubulle@debian.org>
-
- * Update all PO files and apt-all.pot. 542 strings.
- Formerly complete PO files are now 536t6f
-
-2007-12-01 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: updated to 536t.
-
-2007-11-27 Piarres Beobide <pi@beobide.net>
-
- * eu.po: updated to 536t.
-
-2007-11-01 Christian Perrier <bubulle@debian.org>
-
- * *.po: preventive unfuzzy after removal of an extra space
- in a message "Stored label: %s\n"
-
-2007-10-30 Peter Karlsson <peterk@debian.org>
-
- * sv.po: updated to 536t.
-
-2007-10-29 Jacobo Tarrio <jtarrio@trasno.net>
-
- * gl.po: updated to 536t. Closes: #448497
-
-2007-10-29 Sunjae Park <darehanl@gmail.com>
-
- * ko.po: Updated to 536t. Closes: #448430
-
-2007-10-28 Christian Perrier <bubulle@debian.org>
-
- * Add a bunch of languages that were not listed in LINGUAS:
- Arabic, Dzongkha, Khmer, Marathi, Nepali, Thai
-
-2007-10-28 Christian Perrier <bubulle@debian.org>
-
- * fr.po: completed to 536t.
-
-2007-10-28 Christian Perrier <bubulle@debian.org>
-
- * Update all PO files and apt-all.pot. 536 strings.
- Formerly complete PO files are now 532t3f1u
-
-2007-10-14 Jacobo Tarrio <jtarrio@trasno.net>
-
- * gl.po: updated to 535t. Closes: #446626
-
-2007-10-12 Peter Karlsson <peterk@debian.org>
-
- * sv.po: updated to 535t.
-
-2007-09-17 Theppitak Karoonboonyanan <thep@linux.thai.net>
-
- * th.po: added with 535t. Closes: #442833
-
-2007-09-07 Claus Hindsgaul <claus.hindsgaul@gmail.com>
-
- * da.po: completed to 532t3f. Closes: #441102
-
-2007-09-03 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: completed to 535t. Closes: #440611
-
-2007-08-07 Piarres Beobide <pi@beobide.net>
-
- * eu.po: completed to 535t. Closes: #436425
-
-2007-08-04 Christian Perrier <bubulle@debian.org>
-
- * fr.po: completed to 535t.
-
-2007-08-04 Christian Perrier <bubulle@debian.org>
-
- * Update all PO and the POT. Gives 529t6f for formerly
- complete translations
-
-2007-07-11 Piarres Beobide <pi@beobide.net>
-
- * eu.po: completed to 532t. Closes: #423766
-
-2007-07-06 Christian Perrier <bubulle@debian.org>
-
- * Update all PO and the POT. Gives 529t3f for formerly
- complete translations
- * Unfuzzy formerly complete translations (es, fr, gl, vi)
-
-2007-06-21 Javier Fernandez-Sanguino <jfs@debian.org>
-
- * es.po: completed to 532t, again. Closes: #429935
-
-2007-06-21 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: completed to 532t, again. Closes: #429899
-
-2007-06-19 Jacobo Tarrío <jtarrio@debian.org>
-
- * gl.po: completed to 532t. Closes: #429506
-
-2007-06-13 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: completed to 532t. Closes: #428672
-
-2007-06-12 Christian Perrier <bubulle@debian.org>
-
- * Update all PO and the POT. Gives 514t14f4u for formerly
- complete translations
- * fr.po: completed to 532t
-
-2007-06-12 Christian Perrier <bubulle@debian.org>
-
- * ku.po, uk.po, LINGUAS: reintegrate those translations
- which disappeared from the BZR repositories
-
-2007-06-01 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: Updated to 515t. Closes: #426976
-
-2007-05-13 Piarres Beobide <pi@beobide.net>
-
- * eu.po: Updated to 515t. Closes: #423766
-
-2007-05-10 Miguel Figueiredo <elmig@debianpt.org>
-
- * pt.po: 515t. Closes: #423111
-
-2007-05-08 Christian Perrier <bubulle@debian.org>
-
- * fr.po: Updated by Christian Perrier
-
-2007-05-08 Christian Perrier <bubulle@debian.org>
-
- * Update all PO and the POT. Gives 513t2f for formerly
- complete translations
-
-2007-04-01 priti Patil <prithisd@gmail.com>
-
- * mr.po: New Marathi translation
- Closes: #416806
-
-2007-03-31 Kov Chai <tchaikov@sjtu.org>
-
- * zh_CN.po: Updated by Kov Chai
- Closes: #416822
-
-2007-03-29 eric pareja <xenos@upm.edu.ph>
-
- * tl.po: Updated by Eric Pareja
- Closes: #416638
-
-2007-02-28 Jacobo Tarrio <jtarrio@trasno.net>
-
- * gl.po: Updated by Jacobo Tarrio
- Closes: #412828
-
-2007-02-03 Claus Hindsgaul <claus.hindsgaul@gmail.com>
-
- * da.po: Updated by Claus Hindsgaul
- Closes: #409483
-
-2007-01-29 Christian Perrier <bubulle@debian.org>
-
- * fr.po: Remove a non-breakable space for usability
- issues. Closes: #408877
-
-2006-12-12 Yuri Kozlov <kozlov.y@gmail.com>
-
- * ru.po: Updated Russian translation. Closes: #405476
-
-2006-12-12 Christian Perrier <bubulle@debian.org>
-
- * *.po: Unfuzzy after upstream typo corrections
-
-2006-12-12 Eugeniy Meshcheryakov <eugen@debian.org>
-
- * uk.po: Updated Ukrainian translation: 495t16f3u
-
-2006-11-04 Artem Bondarenko <artem.brz@gmail.com>
-
- * uk.po: New Ukrainian translation: 483t28f3u
-
-2006-11-02 Emmanuel Galatoulas <galas@tee.gr>
-
- * el.po: Update to 503t9f2u
-
-2006-10-24 Michael Piefel <piefel@debian.org>
-
- * de.po: Updates and corrections.
-
-2006-10-22 Jordi Mallach <jordi@debian.org>
-
- * ca.po: Updated to 514t
-
-2006-10-22 Bart Cornelis <cobaco@linux.be>
-
- * be.po: Updated to 514t
-
-2006-10-21 Samuele Giovanni Tonon <samu@debian.org>
-
- * it.po: Updated to 514t
-
-2006-10-21 SZERVÁC Attila <sas@321.hu>
-
- * hu.po: Updated to 514t
-
-2006-10-21 Asho Yeh <asho@debian.org.tw>
-
- * zh_TW.po: Updated to 514t
-
-2006-10-21 Ossama M. Khayat <okhayat@yahoo.com>
-
- * ar.po: Updated to 293t221u.
-
-2006-10-16 Yuri Kozlov <kozlov.y@gmail.com>
-
- * ru.po: Updated to 514t. Closes: #392466
-
-2006-10-16 Hans Fredrik Nordhaug <hans@nordhaug.priv.no>
-
- * nb.po: Updated to 514t. Closes: #392466
-
-2006-10-15 Rui Az. <astronomy@mail.pt>
-
- * pt.po: Updated to 514t. Closes: #393199
-
-2006-10-14 Christian Perrier <bubulle@debian.org>
-
- * fr.po: One spelling error corrected: s/accèder/accéder
-
-2006-10-13 Khoem Sokhem <khoemsokhem@khmeros.info>
-
- * km.po: Updated to 514t.
-
-2006-10-13 Sunjae Park <darehanl@gmail.com>
-
- * ko.po: Updated to 514t.
-
-2006-10-12 Yavor Doganov <yavor@doganov.org>
-
- * bg.po: Updated to 514t.
-
-2006-10-12 Michael Piefel <piefel@debian.org>
-
- * de.po: Updated to 514t.
-
-2006-10-12 Neil Williams <linux@codehelp.co.uk>
-
- * en_GB.po: Updated to 514t.
-
-2006-10-08 Javier Fernández-Sanguino Peña <jfs@computer.org>
-
- * es.po: Updated to 514t. Closes: #391661
-
-2006-10-06 Claus Hindsgaul <claus.hindsgaul@gmail.com>
-
- * da.po: Updated to 514t. Closes: #391424
-
-2006-10-04 Miroslav Kure <kurem@upcase.inf.upol.cz>
-
- * cs.po: Updated. Closes: #391064
-
-2006-09-29 Tapio Lehtonen <tale@debian.org>
-
- * fi.po: Updated to 514t. Closes: #390149
-
-2006-09-27 Piarres Beobide <pi@beobide.net>
-
- * eu.po: Updated to 514t. Closes: #389725
-
-2006-09-21 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: Updated to 514t. Closes: #388555
-
-2006-09-20 Sorin Batariuc <sorin@bonbon.net>
-
- * ro.po: Updated to 514t. Closes: #388402
-
-2006-09-18 Kinley Tshering <gasepkuenden2k3@hotmail.com>
-
- * dz.po: Updated to 514t. Closes: #388184
-
-2006-09-17 Davide Viti <zinosat@tiscali.it>
-
- * it.po: Fixed typos. Closes: #387812
-
-2006-09-17 Erdal Ronahi <erdal.ronahi@gmail.com>
-
- * ku.po: New kurdish translation. Closes: #387766
- 71t40f403u
-
-2006-09-10 Peter Mann <Peter.Mann@tuke.sk>
-
- * sk.po: Updated to 514t. Closes: #386851
-
-2006-09-08 Christian Perrier <bubulle@debian.org>
-
- * LINGUAS: re-enabled Hebrew translation on translator's request.
-
-2006-09-08 Kenshi Muto <kmuto@debian.org>
-
- * ja.po: Updated to 514t. Closes: #386537
-
-2006-09-06 Jacobo Tarrio <jtarrio@debian.org>
-
- * gl.po: Updated to 514t. Closes: #386397
-
-2006-09-02 Christian Perrier <bubulle@debian.org>
-
- * fr.po: Updated to 516t.
-
-2006-09-02 Christian Perrier <bubulle@debian.org>
-
- * fr.po: Updated to 516t.
-
-2006-08-20 Christian Perrier <bubulle@debian.org>
-
- * Update all PO and the POT. Gives 512t3f1uf for formerly
- complete translations
-
-2006-08-13 Tapio Lehtonen <tale@debian.org>
-
- * fi.po: Updated to 512t. Closes: #382702
-
-2006-07-19 Sunjae Park <darehanl@gmail.com>
-
- * ko.po: Updated to 512t. Closes: #378901
-
-2006-07-02 SZERVAC Attila <sas@321.hu>
-
- * hu.po: Updated to 512t. Closes: #376330
-
-2006-07-01 Leang Chumsoben <soben@khmeros.info>
-
- * km.po: New Khmer translation: 506t6f. Closes: #375068
-
-2006-07-01 Shiva Pokharel <pokharelshiva@hotmail.com>
-
- * ne.po: New Nepali translation: 512t. Closes: #373729
-
-2006-07-01 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: Updated to 512t. Closes: #368038
-
-2006-07-01 Christian Perrier <bubulle@debian.org>
-
- * zh_TW.po: Remove an extra %s in one string. Closes: #370551
-
-2006-07-01 Kinley Tshering <gasepkuenden2k3@hotmail.com>
-
- * dz.po: New Dzongkha translation: 512t
-
-2006-06-25 Sorin Batariuc <sorin@bonbon.net>
-
- * ro.po: Updated to 512t
-
-2006-06-21 Piarres Beobide <pi@beobide.net>
-
- * eu.po: Updated
-
-2006-06-07 Piarres Beobide <pi@beobide.net>
-
- * eu.po: Updated
-
-2006-05-29 Peter Mann <Peter.Mann@tuke.sk>
-
- * sk.po: Completed to 512t
-
-2006-05-28 Piarres Beobide <pi@beobide.net>
-
- * eu.po: Completed to 512t
-
-2006-05-17 Christian Perrier <bubulle@debian.org>
-
- * fr.po: Completed to 512t
-
-2006-05-17 Daniel Nylander <yeager@lidkoping.net>
-
- * sv.po: Completed to 512t
-
-2006-05-16 Christian Perrier <bubulle@debian.org>
-
- * Update all PO and the POT. Gives 506t6f for formerly
- complete translations
-
-2006-04-01 Yavor Doganov <yavor@doganov.org>
-
- * bg.po: Added, complete to 512t. Closes: #360262
-
-2006-03-16 eric pareja <xenos@upm.edu.ph>
-
- * tl.po: Completed to 512t. Closes: #357215
-
-2006-03-13 Sorin Batariuc <sorin@bonbon.net>
-
- * ro.po: Completed to 512t. Closes: #355897
-
-2006-03-12 Miguel Figueiredo <elmig@debianpt.org>
-
- * pt.po: Completed to 512t. Closes: #355798
-
-2006-02-14 Carlos Z.F. Liu <carlosliu@users.sourceforge.net>
-
- * zh_CN.po: Completed to 512t. Closes: #353936
-
-2006-02-14 Samuele Giovanni Tonon <samu@debian.org>
-
- * it.po: Completed to 512t. Closes: #352803
-
-2006-02-13 Andre Luis Lopes <andrelop@debian.org>
-
- * ca.po: Completed to 512t. Closes: #352419
-
-2006-02-06 Jordi Mallach <jordi@debian.org>
-
- * ca.po: Completed to 512t. Closes: #351592
-
-2006-01-30 Piarres Beobide <pi@beobide.net>
-
- * eu.po: Completed to 512t. Closes: #350483
-
-2006-01-24 Kenshi Muto <kmuto@debian.org>
-
- * ja.po: Completed to 512t. Closes: #349806
-
-2006-01-23 Bartosz Fenski aka fEnIo <fenio@debian.org>
-
- * pl.po: Completed to 512t. Closes: #349514
-
-2006-01-23 Peter Mann <Peter.Mann@tuke.sk>
-
- * sk.po: Completed to 512t. Closes: #349474
-
-2006-01-23 Jacobo Tarrio <jtarrio@trasno.net>
-
- * gl.po: Completed to 512 strings
- Closes: #349407
-
-2006-01-22 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: Completed to 512 strings
-
-2006-01-21 Daniel Nylander <yeager@lidkoping.net>
-
- * sv.po: Completed to 512 strings
- Closes: #349210
-
-2006-01-21 Yuri Kozlov <kozlov.y@gmail.com>
-
- * ru.po: Completed to 512 strings
- Closes: #349154
-
-2006-01-21 Claus Hindsgaul <claus_h@image.dk>
-
- * da.po: Completed to 512 strings
- Closes: #349084
-
-2006-01-20 Christian Perrier <bubulle@debian.org>
-
- * fr.po: Completed to 512 strings
- * LINGUAS: Add Welsh
-
-2006-01-20 Christian Perrier <bubulle@debian.org>
-
- * *.po: Updated from sources (512 strings)
-
-2006-01-20 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: Completed to 511 strings
- Closes: #348968
-
-2006-01-18 Konstantinos Margaritis <markos@debian.org>
-
- * el.po: Completed to 511 strings
- Closes: #344642
-
-2005-11-07 Claus Hindsgaul <claus_h@image.dk>
-
- * da.po: Completed to 511 strings
- Closes: #348574
-
-2005-11-16 Andrew Deason <adeason@tjhsst.edu>
-
- * en_GB.po: Minor errors correction
-
-2005-11-12 Ruben Porras <nahoo82@telefonica.net>
-
- * es.po: Updated to 510t1f
- Closes: #348158
-
-2005-11-12 Jacobo Tarrio <jacobo@tarrio.org>
-
- * gl.po: Completed to 511 strings
- Closes: #347729
-
-2006-01-10 Samuele Giovanni Tonon <samu@mclink.it>
-
- * it.po: Yet another update
- Closes: #347435
-
-2006-01-09 Jonas Koelker <jonaskoelker@users.sourceforge.net>
-
- * en_GB.po, de.po: fix spaces errors in "Ign " translations
- Closes: #347258
-
-2006-01-09 Thomas Huriaux <thomas.huriaux@gmail.com>
-
- * makefile: make update-po a pre-requisite of clean target so
- that POT and PO files are always up-to-date
-
-2006-01-08 Daniel Nylander <yeager@lidkoping.net>
-
- * sv.po: Completed to 511t. Closes: #346450
-
-2006-01-06 Peter Mann <Peter.Mann@tuke.sk>
-
- * sk.po: Completed to 511t. Closes: #346369
-
-2006-01-06 Christian Perrier <bubulle@debian.org>
-
- * *.po: Updated from sources (511 strings)
- * fr.po: Completed to 511t
-
-2006-01-01 Samuele Giovanni Tonon <samu@mclink.it>
-
- * it.po: Completed to 510t
-
-2006-01-01 Neil Williams <linux@codehelp.co.uk>
-
- * en_GB.po: Completed to 510t
-
-2005-12-30 Miroslav Kure <kurem@upcase.inf.upol.cz>
-
- * cs.po: Completed to 510t
-
-2005-12-25 Ming Hua <minghua@rice.edu>
-
- * zh_CN.po: Completed to 510t
-
-2005-12-25 Konstantinos Margaritis <markos@debian.org>
-
- * el.po: Updated to 510t
-
-2005-12-19 Clytie Siddall <clytie@riverland.net.au>
-
- * vi.po: Updated to 383t93f34u
-
-2005-12-19 eric pareja <xenos@upm.edu.ph>
-
- * tl.po: Completed to 510 strings
- Closes: #344306
-
-2005-12-19 Daniel Nylander <yeager@lidkoping.net>
-
- * sv.po: Completed to 510 strings
- Closes: #344056
-
-2005-11-29 Christian Perrier <bubulle@debian.org>
-
- * LINGUAS: disabled Hebrew translation. Closes: #313283
-
-2005-12-05 Piarres Beobide <pi@beobide.net>
-
- * eu.po: Completed to 510 strings
- Closes: #342091
-
-2005-11-29 Christian Perrier <bubulle@debian.org>
-
- * fr.po: Completed to 510 strings
- * *.po : Synced with the POT files
-
-2005-11-14 Kov Tchai <tchaikov@sjtu.edu.cn>
-
- * zh_CN.po: Completed to 510 strings
- Definitely Closes: #338267
-
-2005-11-13 Kov Tchai <tchaikov@sjtu.edu.cn>
-
- * zh_CN.po: Completed to 507 strings
- Closes: #338267
-
-2005-11-09 Jacobo Tarrio <jacobo@tarrio.org>
-
- * gl.po: Completed to 510 strings
- Closes: #338356
-
-2005-11-08 Piarres Beobide <pi@beobide.net>
-
- * eu.po: Completed to 510 strings
- Closes: #338101
-
-2005-11-07 Claus Hindsgaul <claus_h@image.dk>
-
- * da.po: Completed to 510 strings
- Closes: #337949
-
-2005-11-04 Eric Pareja <xenos@upm.edu.ph>
-
- * tl.po: Completed to 510 strings
- Closes: #337306
-
-2005-11-04 Christian Perrier <bubulle@debian.org>
-
- * Changelog: added to better track down fixed issues
-
diff --git a/po/apt-all.pot b/po/apt-all.pot
index d2229a936..664965900 100644
--- a/po/apt-all.pot
+++ b/po/apt-all.pot
@@ -901,7 +901,7 @@ msgid "At least one invalid signature was encountered."
msgstr ""
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -913,7 +913,7 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
+msgid "Unknown error executing apt-key"
msgstr ""
#: methods/gpgv.cc:217 methods/gpgv.cc:224
diff --git a/po/ar.po b/po/ar.po
index c4069143e..1bea20877 100644
--- a/po/ar.po
+++ b/po/ar.po
@@ -914,7 +914,7 @@ msgid "At least one invalid signature was encountered."
msgstr ""
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -926,7 +926,7 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
+msgid "Unknown error executing apt-key"
msgstr ""
#: methods/gpgv.cc:217 methods/gpgv.cc:224
@@ -1578,8 +1578,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "حساب الترقية..."
+msgid "Calculating upgrade"
+msgstr "حساب الترقية"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/ast.po b/po/ast.po
index 629c8f2da..9f7d7b2d0 100644
--- a/po/ast.po
+++ b/po/ast.po
@@ -1021,8 +1021,8 @@ msgid "At least one invalid signature was encountered."
msgstr "Atopóse polo menos una robla mala."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "Nun pudo executase 'gpgv' pa verificar la robla (¿ta instaláu gpgv?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "Nun pudo executase 'apt-key' pa verificar la robla (¿ta instaláu gnupg?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1033,8 +1033,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Fallu desconocíu al executar gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Fallu desconocíu al executar apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1706,8 +1706,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Calculando l'anovamientu... "
+msgid "Calculating upgrade"
+msgstr "Calculando l'anovamientu"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/bg.po b/po/bg.po
index ea7382513..e41b2bdd0 100644
--- a/po/bg.po
+++ b/po/bg.po
@@ -1051,10 +1051,10 @@ msgid "At least one invalid signature was encountered."
msgstr "Намерен е поне един невалиден подпис."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Неуспех при изпълнение на „gpgv“ за проверка на подписа (инсталиран ли е "
-"gpgv?)"
+"Неуспех при изпълнение на „apt-key“ за проверка на подписа (инсталиран ли е "
+"gnupg?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1065,8 +1065,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Неизвестна грешка при изпълнението на gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Неизвестна грешка при изпълнението на apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1743,8 +1743,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Изчисляване на актуализацията..."
+msgid "Calculating upgrade"
+msgstr "Изчисляване на актуализацията"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/bs.po b/po/bs.po
index 3cf69f0b1..81294d8c8 100644
--- a/po/bs.po
+++ b/po/bs.po
@@ -920,7 +920,7 @@ msgid "At least one invalid signature was encountered."
msgstr ""
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -932,7 +932,7 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
+msgid "Unknown error executing apt-key"
msgstr ""
#: methods/gpgv.cc:217 methods/gpgv.cc:224
@@ -1575,8 +1575,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Računam nadogradnju..."
+msgid "Calculating upgrade"
+msgstr "Računam nadogradnju"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/ca.po b/po/ca.po
index 31961ecac..db6d12e77 100644
--- a/po/ca.po
+++ b/po/ca.po
@@ -1034,10 +1034,10 @@ msgid "At least one invalid signature was encountered."
msgstr "S'ha trobat almenys una signatura invàlida."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"No s'ha pogut executar el «gpgv» per a verificar la signatura (està "
-"instaŀlat el gpgv?)"
+"No s'ha pogut executar el «apt-key» per a verificar la signatura (està "
+"instaŀlat el gnupg?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1048,8 +1048,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "S'ha produït un error desconegut en executar el gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "S'ha produït un error desconegut en executar el apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1731,8 +1731,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "S'està calculant l'actualització… "
+msgid "Calculating upgrade"
+msgstr "S'està calculant l'actualització"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/cs.po b/po/cs.po
index 5bb6a79b7..be22ecd09 100644
--- a/po/cs.po
+++ b/po/cs.po
@@ -8,7 +8,7 @@ msgstr ""
"Project-Id-Version: apt\n"
"Report-Msgid-Bugs-To: APT Development Team <deity@lists.debian.org>\n"
"POT-Creation-Date: 2014-09-09 20:35+0200\n"
-"PO-Revision-Date: 2014-08-15 13:30+0200\n"
+"PO-Revision-Date: 2014-10-05 06:09+0200\n"
"Last-Translator: Miroslav Kure <kurem@debian.cz>\n"
"Language-Team: Czech <debian-l10n-czech@lists.debian.org>\n"
"Language: cs\n"
@@ -122,7 +122,7 @@ msgstr "Soubory balíku:"
#: cmdline/apt-cache.cc:1553 cmdline/apt-cache.cc:1644
msgid "Cache is out of sync, can't x-ref a package file"
-msgstr "Cache není synchronizovaná, nemohu se odkázat na soubor balíku"
+msgstr "Cache není synchronizovaná, nelze se odkázat na soubor balíku"
#. Show any packages have explicit pins
#: cmdline/apt-cache.cc:1567
@@ -622,7 +622,7 @@ msgstr ""
#: cmdline/apt-helper.cc:36
msgid "Need one URL as argument"
-msgstr ""
+msgstr "Jako argument vyžaduje jedno URL"
#: cmdline/apt-helper.cc:49
msgid "Must specify at least one pair url/filename"
@@ -633,7 +633,6 @@ msgid "Download Failed"
msgstr "Stažení selhalo"
#: cmdline/apt-helper.cc:80
-#, fuzzy
msgid ""
"Usage: apt-helper [options] command\n"
" apt-helper [options] download-file uri target-path\n"
@@ -653,6 +652,7 @@ msgstr ""
"\n"
"Příkazy:\n"
" download-file - stáhne zadané uri do cílové cesty\n"
+" auto-detect-proxy - detekuje proxy pomocí apt.conf\n"
"\n"
" Tento APT pomocník má schopnosti svatého čehokoliv.\n"
@@ -1055,7 +1055,7 @@ msgstr "Nelze se připojit k %s:%s:"
#: methods/gpgv.cc:168
msgid ""
"Internal error: Good signature, but could not determine key fingerprint?!"
-msgstr "Vnitřní chyba: Dobrý podpis, ale nemohu zjistit otisk klíče?!"
+msgstr "Vnitřní chyba: Dobrý podpis, ale nelze zjistit otisk klíče?!"
#: methods/gpgv.cc:172
msgid "At least one invalid signature was encountered."
@@ -1383,7 +1383,7 @@ msgstr "Poznámka: Toto má svůj důvod a děje se automaticky v dpkg."
#: apt-private/private-install.cc:391
msgid "We are not supposed to delete stuff, can't start AutoRemover"
-msgstr "Neměli bychom mazat věci, nemůžu spustit AutoRemover"
+msgstr "Neměli bychom mazat věci, nelze spustit AutoRemover"
#: apt-private/private-install.cc:499
msgid ""
@@ -2406,7 +2406,7 @@ msgstr "Toto není platný DEB archiv, chybí část „%s“"
#: apt-inst/deb/debfile.cc:132
#, c-format
msgid "Internal error, could not locate member %s"
-msgstr "Vnitřní chyba, nemohu najít část %s"
+msgstr "Vnitřní chyba, nelze najít část %s"
#: apt-inst/deb/debfile.cc:227
msgid "Unparsable control file"
@@ -2735,7 +2735,7 @@ msgstr "Generování závislostí"
#: apt-pkg/depcache.cc:188 apt-pkg/depcache.cc:221 apt-pkg/depcache.cc:225
msgid "Reading state information"
-msgstr "Čtu stavové informace"
+msgstr "Načítají se stavové informace"
#: apt-pkg/depcache.cc:250
#, c-format
@@ -2994,7 +2994,7 @@ msgstr "Nešlo vyhodnotit seznam zdrojových balíků %s"
#: apt-pkg/pkgcachegen.cc:1299 apt-pkg/pkgcachegen.cc:1403
#: apt-pkg/pkgcachegen.cc:1409 apt-pkg/pkgcachegen.cc:1566
msgid "Reading package lists"
-msgstr "Čtu seznamy balíků"
+msgstr "Načítají se seznamy balíků"
#: apt-pkg/pkgcachegen.cc:1316
msgid "Collecting File Provides"
@@ -3080,7 +3080,7 @@ msgstr "Zkomolený řádek %lu v seznamu zdrojů %s (zpracování URI)"
#: apt-pkg/sourcelist.cc:217
#, c-format
msgid "Malformed line %lu in source list %s (absolute dist)"
-msgstr "Zkomolený řádek %lu v seznamu zdrojů %s (Absolutní dist)"
+msgstr "Zkomolený řádek %lu v seznamu zdrojů %s (absolutní dist)"
#: apt-pkg/sourcelist.cc:224
#, c-format
diff --git a/po/cy.po b/po/cy.po
index dbf4bfba9..dd6feb3d4 100644
--- a/po/cy.po
+++ b/po/cy.po
@@ -1042,7 +1042,7 @@ msgid "At least one invalid signature was encountered."
msgstr ""
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -1054,7 +1054,7 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
+msgid "Unknown error executing apt-key"
msgstr ""
#: methods/gpgv.cc:217 methods/gpgv.cc:224
@@ -1731,8 +1731,8 @@ msgstr ""
#: apt-private/private-upgrade.cc:25
#, fuzzy
-msgid "Calculating upgrade... "
-msgstr "Yn Cyfrifo'r Uwchraddiad... "
+msgid "Calculating upgrade"
+msgstr "Yn Cyfrifo'r Uwchraddiad"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/da.po b/po/da.po
index 11ef78a5e..e6c853fd3 100644
--- a/po/da.po
+++ b/po/da.po
@@ -1075,9 +1075,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Stødte på mindst én ugyldig signatur."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Kunne ikke køre »gpgv« for at verificere signaturen (er gpgv installeret?)"
+"Kunne ikke køre »apt-key« for at verificere signaturen (er gnupg installeret?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1090,8 +1090,8 @@ msgstr ""
"autentificering?)"
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Ukendt fejl ved kørsel af gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Ukendt fejl ved kørsel af apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1771,8 +1771,8 @@ msgid "All packages are up to date."
msgstr "Alle pakker er opdateret."
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Beregner opgraderingen ... "
+msgid "Calculating upgrade"
+msgstr "Beregner opgraderingen"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/de.po b/po/de.po
index ae3b15d41..13b291043 100644
--- a/po/de.po
+++ b/po/de.po
@@ -1113,9 +1113,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Mindestens eine ungültige Signatur wurde entdeckt."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"»gpgv« konnte zur Überprüfung der Signatur nicht ausgeführt werden (ist gpgv "
+"»apt-key« konnte zur Überprüfung der Signatur nicht ausgeführt werden (ist gnupg "
"installiert?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -1129,8 +1129,8 @@ msgstr ""
"das Netzwerk eine Authentifizierung?)"
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Unbekannter Fehler beim Ausführen von gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Unbekannter Fehler beim Ausführen von apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1833,8 +1833,8 @@ msgid "All packages are up to date."
msgstr "Alle Pakete sind aktuell."
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Paketaktualisierung (Upgrade) wird berechnet... "
+msgid "Calculating upgrade"
+msgstr "Paketaktualisierung (Upgrade) wird berechnet"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/dz.po b/po/dz.po
index 3ed665f94..8ee4d15da 100644
--- a/po/dz.po
+++ b/po/dz.po
@@ -1016,7 +1016,7 @@ msgstr "ཉུང་མཐའ་རང་ནུས་མེད་ཀྱི་མ
#: methods/gpgv.cc:174
#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
"མིང་རྟགས་བདེན་སྦྱོར་འབད་ནི་ལུ་'%s'འདི་ལག་ལེན་འཐབ་མ་ཚུགས། (gpgv་དེ་ཁཞི་བཙུགས་འབད་ཡོདཔ་ཨིན་ན།?)"
@@ -1029,8 +1029,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "gpgv་ལག་ལེན་འཐབ་ནི་ལུ་མ་ཤེས་པའི་འཛོལ་བ་།"
+msgid "Unknown error executing apt-key"
+msgstr "apt-key་ལག་ལེན་འཐབ་ནི་ལུ་མ་ཤེས་པའི་འཛོལ་བ་།"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1696,8 +1696,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "ཡར་བསྐྱེད་རྩིས་བཏོན་དོ་... "
+msgid "Calculating upgrade"
+msgstr "ཡར་བསྐྱེད་རྩིས་བཏོན་དོ་"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/el.po b/po/el.po
index 92f178b0d..1b19f54da 100644
--- a/po/el.po
+++ b/po/el.po
@@ -1028,10 +1028,10 @@ msgstr "Βρέθηκε τουλάχιστον μια μη έγκυρη υπογ
#: methods/gpgv.cc:174
#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
"Αδυναμία εκτέλεσης του '%s' για την επαλήθευση της υπογραφής (είναι "
-"εγκατεστημένο το gpgv;)"
+"εγκατεστημένο το gnupg;)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1042,8 +1042,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Άγνωστο σφάλμα κατά την εκτέλεση του gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Άγνωστο σφάλμα κατά την εκτέλεση του apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1718,8 +1718,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Υπολογισμός της αναβάθμισης... "
+msgid "Calculating upgrade"
+msgstr "Υπολογισμός της αναβάθμισης"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/es.po b/po/es.po
index 471774590..f808761df 100644
--- a/po/es.po
+++ b/po/es.po
@@ -1086,9 +1086,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Se encontró al menos una firma inválida."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"No se pudo ejecutar «gpgv» para verificar la firma (¿está instalado gpgv?)"
+"No se pudo ejecutar «apt-key» para verificar la firma (¿está instalado gnupg?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1099,8 +1099,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Error desconocido ejecutando gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Error desconocido ejecutando apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1780,8 +1780,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Calculando la actualización... "
+msgid "Calculating upgrade"
+msgstr "Calculando la actualización"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/eu.po b/po/eu.po
index 9379abad0..8b3dd9025 100644
--- a/po/eu.po
+++ b/po/eu.po
@@ -1016,9 +1016,8 @@ msgid "At least one invalid signature was encountered."
msgstr "Beintza sinadura baliogabe bat aurkitu da."
#: methods/gpgv.cc:174
-#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "Ezin da %s abiarazi sinadura egiaztatzeko (gpgv instalaturik al dago?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "Ezin da apt-key abiarazi sinadura egiaztatzeko (gnupg instalaturik al dago?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1029,8 +1028,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Errore ezezaguna gpgv exekutatzean"
+msgid "Unknown error executing apt-key"
+msgstr "Errore ezezaguna apt-key exekutatzean"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1700,8 +1699,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Berriketak kalkulatzen... "
+msgid "Calculating upgrade"
+msgstr "Berriketak kalkulatzen"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/fi.po b/po/fi.po
index cb23ace35..b4333d57f 100644
--- a/po/fi.po
+++ b/po/fi.po
@@ -1007,10 +1007,9 @@ msgid "At least one invalid signature was encountered."
msgstr "LÖytyi ainakin yksi kelvoton allekirjoitus."
#: methods/gpgv.cc:174
-#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Ei käynnistynyt \"%s\" allekirjoitusta tarkistamaan (onko gpgv asennettu?)"
+"Ei käynnistynyt \"apt-key\" allekirjoitusta tarkistamaan (onko gnupg asennettu?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1021,8 +1020,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Tapahtui tuntematon virhe suoritettaessa gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Tapahtui tuntematon virhe suoritettaessa apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1692,8 +1691,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Käsitellään päivitystä ... "
+msgid "Calculating upgrade"
+msgstr "Käsitellään päivitystä"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/fr.po b/po/fr.po
index f3b69a800..2325fc0bf 100644
--- a/po/fr.po
+++ b/po/fr.po
@@ -1077,10 +1077,10 @@ msgid "At least one invalid signature was encountered."
msgstr "Au moins une signature non valable a été rencontrée."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Impossible d'exécuter « gpgv » pour contrôler la signature (veuillez "
-"vérifier si gpgv est installé)."
+"Impossible d'exécuter « apt-key » pour contrôler la signature (veuillez "
+"vérifier si gnupg est installé)."
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1093,8 +1093,8 @@ msgstr ""
"Peut-être le réseau nécessite-t-il une authentification."
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Erreur inconnue à l'exécution de gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Erreur inconnue à l'exécution de apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1788,8 +1788,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Calcul de la mise à jour... "
+msgid "Calculating upgrade"
+msgstr "Calcul de la mise à jour"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/gl.po b/po/gl.po
index e7aa78713..23d0f1c6b 100644
--- a/po/gl.po
+++ b/po/gl.po
@@ -1032,10 +1032,10 @@ msgid "At least one invalid signature was encountered."
msgstr "Atopouse polo menos unha sinatura incorrecta."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Non é posíbel executar «gpgv» para verificar a sinatura (Está instalado "
-"gpgv?)"
+"Non é posíbel executar «apt-key» para verificar a sinatura (Está instalado "
+"gnupg?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1046,8 +1046,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Produciuse un erro descoñecido ao executar gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Produciuse un erro descoñecido ao executar apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1729,8 +1729,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Calculando a anovación... "
+msgid "Calculating upgrade"
+msgstr "Calculando a anovación"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/he.po b/po/he.po
index adc738bec..8175d0ebb 100644
--- a/po/he.po
+++ b/po/he.po
@@ -1006,7 +1006,7 @@ msgid "Recommended packages:"
msgstr ""
#: cmdline/apt-get.cc:1965
-msgid "Calculating upgrade... "
+msgid "Calculating upgrade"
msgstr ""
#: cmdline/apt-get.cc:1968 methods/ftp.cc:708 methods/connect.cc:112
@@ -1810,11 +1810,11 @@ msgstr ""
#: methods/gpgv.cc:232
#, c-format
-msgid "Could not execute '%s' to verify signature (is gpgv installed?)"
+msgid "Could not execute '%s' to verify signature (is gnupg installed?)"
msgstr ""
#: methods/gpgv.cc:237
-msgid "Unknown error executing gpgv"
+msgid "Unknown error executing apt-key"
msgstr ""
#: methods/gpgv.cc:271 methods/gpgv.cc:278
diff --git a/po/hu.po b/po/hu.po
index 9457b384c..9acff5c86 100644
--- a/po/hu.po
+++ b/po/hu.po
@@ -1050,9 +1050,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Legalább egy aláírás érvénytelen."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Nem indítható el a „gpgv” az aláírás ellenőrzéséhez (telepítve van a gpgv?)"
+"Nem indítható el a „apt-key” az aláírás ellenőrzéséhez (telepítve van a gnupg?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1063,8 +1063,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Ismeretlen gpgv futtatási hiba"
+msgid "Unknown error executing apt-key"
+msgstr "Ismeretlen apt-key futtatási hiba"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1736,8 +1736,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Frissítés kiszámítása... "
+msgid "Calculating upgrade"
+msgstr "Frissítés kiszámítása"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/it.po b/po/it.po
index 5348110c2..050b56111 100644
--- a/po/it.po
+++ b/po/it.po
@@ -1093,9 +1093,9 @@ msgid "At least one invalid signature was encountered."
msgstr "È stata trovata almeno una firma non valida."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Impossibile eseguire \"gpgv\" per verificare la firma (forse gpgv non è "
+"Impossibile eseguire \"apt-key\" per verificare la firma (forse gnupg non è "
"installato)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -1109,8 +1109,8 @@ msgstr ""
"richiede autenticazione?)"
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Errore sconosciuto durante l'esecuzione di gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Errore sconosciuto durante l'esecuzione di apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1796,8 +1796,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Calcolo dell'aggiornamento... "
+msgid "Calculating upgrade"
+msgstr "Calcolo dell'aggiornamento"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/ja.po b/po/ja.po
index c79bfbf28..7a5f87a4a 100644
--- a/po/ja.po
+++ b/po/ja.po
@@ -6,10 +6,10 @@
# Debian Project, Kenshi Muto <kmuto@debian.org>, 2004-2012
msgid ""
msgstr ""
-"Project-Id-Version: apt 0.9.16.1\n"
+"Project-Id-Version: apt 1.0.9.1\n"
"Report-Msgid-Bugs-To: APT Development Team <deity@lists.debian.org>\n"
"POT-Creation-Date: 2014-09-09 20:35+0200\n"
-"PO-Revision-Date: 2014-03-21 19:53+0900\n"
+"PO-Revision-Date: 2014-09-27 19:32+0900\n"
"Last-Translator: Kenshi Muto <kmuto@debian.org>\n"
"Language-Team: Debian Japanese List <debian-japanese@lists.debian.org>\n"
"Language: ja\n"
@@ -640,7 +640,7 @@ msgstr ""
#: cmdline/apt-helper.cc:36
msgid "Need one URL as argument"
-msgstr ""
+msgstr "引数として URL が 1 つ必要です"
#: cmdline/apt-helper.cc:49
msgid "Must specify at least one pair url/filename"
@@ -651,7 +651,6 @@ msgid "Download Failed"
msgstr "ダウンロード失敗"
#: cmdline/apt-helper.cc:80
-#, fuzzy
msgid ""
"Usage: apt-helper [options] command\n"
" apt-helper [options] download-file uri target-path\n"
@@ -671,6 +670,7 @@ msgstr ""
"\n"
"コマンド:\n"
" download-file - 指定した uri を目標パスにダウンロードする\n"
+" auto-detect-proxy - apt.conf を使ってプロキシを検出する\n"
"\n"
" この APT helper は Super Meep Powers 化されています。\n"
@@ -1084,9 +1084,9 @@ msgid "At least one invalid signature was encountered."
msgstr "少なくとも 1 つの不正な署名が発見されました。"
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"署名を検証するための 'gpgv' の実行ができませんでした (gpgv はインストールされ"
+"署名を検証するための 'apt-key' の実行ができませんでした (gnupg はインストールされ"
"ていますか?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -1100,8 +1100,8 @@ msgstr ""
"が必要?)"
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "gpgv の実行中に未知のエラーが発生"
+msgid "Unknown error executing apt-key"
+msgstr "apt-key の実行中に未知のエラーが発生"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1496,57 +1496,56 @@ msgid "Recommended packages:"
msgstr "推奨パッケージ:"
#: apt-private/private-install.cc:825
-#, fuzzy, c-format
+#, c-format
msgid "Skipping %s, it is already installed and upgrade is not set.\n"
-msgstr "すでに %s に展開されたソースがあるため、展開をスキップします\n"
+msgstr "%s はすでにインストール済みで upgrade がセットされていないため、インストールをスキップします。\n"
#: apt-private/private-install.cc:829
#, c-format
msgid "Skipping %s, it is not installed and only upgrades are requested.\n"
-msgstr ""
+msgstr "%s はインストールされておらず、アップグレードだけの要求なので、インストールをスキップします。\n"
#: apt-private/private-install.cc:841
#, c-format
msgid "Reinstallation of %s is not possible, it cannot be downloaded.\n"
-msgstr ""
+msgstr "%s はダウンロードできないため、再インストールは不可能です。\n"
#: apt-private/private-install.cc:846
-#, fuzzy, c-format
+#, c-format
msgid "%s is already the newest version.\n"
-msgstr "%s はすでに保留に設定されています。\n"
+msgstr "%s はすでに最新版です。\n"
#: apt-private/private-install.cc:894
#, c-format
msgid "Selected version '%s' (%s) for '%s'\n"
-msgstr ""
+msgstr "'%3$s' のバージョン '%1$s' (%2$s) を選択しました\n"
#: apt-private/private-install.cc:899
#, c-format
msgid "Selected version '%s' (%s) for '%s' because of '%s'\n"
-msgstr ""
+msgstr "'%4$s' のために '%3$s' のバージョン '%1$s' (%2$s) を選択しました\n"
#. TRANSLATORS: Note, this is not an interactive question
#: apt-private/private-install.cc:941
#, c-format
msgid "Package '%s' is not installed, so not removed. Did you mean '%s'?\n"
-msgstr ""
+msgstr "パッケージ '%s' はインストールされていないため削除もされません。削除したかったのは '%s' でしょうか?\n"
#: apt-private/private-install.cc:947
-#, fuzzy, c-format
+#, c-format
msgid "Package '%s' is not installed, so not removed\n"
-msgstr "%lu 個のパッケージが完全にインストールまたは削除されていません。\n"
+msgstr "パッケージ '%s' はインストールされていないため、削除もされません\n"
#: apt-private/private-list.cc:129
msgid "Listing"
msgstr "一覧表示"
#: apt-private/private-list.cc:159
-#, fuzzy, c-format
+#, c-format
msgid "There is %i additional version. Please use the '-a' switch to see it"
msgid_plural ""
"There are %i additional versions. Please use the '-a' switch to see them."
-msgstr[0] ""
-"追加レコードが %i 件あります。表示するには '-a' スイッチを付けてください。"
+msgstr[0] "追加バージョンが %i 件あります。表示するには '-a' スイッチを付けてください。"
#: apt-private/private-main.cc:32
msgid ""
@@ -1758,11 +1757,11 @@ msgstr "update コマンドは引数をとりません"
msgid "%i package can be upgraded. Run 'apt list --upgradable' to see it.\n"
msgid_plural ""
"%i packages can be upgraded. Run 'apt list --upgradable' to see them.\n"
-msgstr[0] ""
+msgstr[0] "アップグレードできるパッケージが %i 個あります。表示するには 'apt list --upgradable' を実行してください。\n"
#: apt-private/private-update.cc:94
msgid "All packages are up to date."
-msgstr ""
+msgstr "パッケージはすべて最新です。"
#: apt-private/private-upgrade.cc:25
msgid "Calculating upgrade... "
@@ -1882,9 +1881,9 @@ msgstr ""
" -o=? 指定した設定オプションを適用する (例: -o dir::cache=/tmp)\n"
#: cmdline/apt-extracttemplates.cc:254
-#, fuzzy, c-format
+#, c-format
msgid "Unable to mkstemp %s"
-msgstr "%s の状態を取得できません"
+msgstr "mkstemp %s を実行できません"
#: cmdline/apt-extracttemplates.cc:259 apt-pkg/pkgcachegen.cc:1400
#, c-format
@@ -2040,9 +2039,8 @@ msgid "Failed to stat %s"
msgstr "%s の状態を取得するのに失敗しました"
#: ftparchive/cachedb.cc:332
-#, fuzzy
msgid "Failed to read .dsc"
-msgstr "%s のリンク読み取りに失敗しました"
+msgstr ".dsc の読み取りに失敗しました"
#: ftparchive/cachedb.cc:365
msgid "Archive has no control record"
@@ -2441,9 +2439,9 @@ msgid "Unable to lock directory %s"
msgstr "ディレクトリ %s をロックできません"
#: apt-pkg/acquire.cc:490 apt-pkg/clean.cc:39
-#, fuzzy, c-format
+#, c-format
msgid "Clean of %s is not supported"
-msgstr "インデックスファイルのタイプ '%s' はサポートされていません"
+msgstr "%s の消去はサポートされていません"
#. only show the ETA if it makes sense
#. two days
diff --git a/po/km.po b/po/km.po
index 9202b6072..ae00856f0 100644
--- a/po/km.po
+++ b/po/km.po
@@ -1005,8 +1005,8 @@ msgstr "​បានជួប​ប្រទះ​​​​ហត្ថលេខ
#: methods/gpgv.cc:174
#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "មិន​អាច​ប្រតិបត្តិ '%s' ដើម្បី​ផ្ទៀងផ្ទាត់​ហត្ថលេខា (តើ gpgv ត្រូវ​បាន​ដំឡើង​ឬនៅ ?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "មិន​អាច​ប្រតិបត្តិ 'apt-key' ដើម្បី​ផ្ទៀងផ្ទាត់​ហត្ថលេខា (តើ gnupg ត្រូវ​បាន​ដំឡើង​ឬនៅ ?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1017,8 +1017,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "មិនស្គាល់កំហុស ក្នុងការប្រតិបត្តិ gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "មិនស្គាល់កំហុស ក្នុងការប្រតិបត្តិ apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1675,8 +1675,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "កំពុង​គណនា​ការ​ធ្វើ​ឲ្យ​ប្រសើរ... "
+msgid "Calculating upgrade"
+msgstr "កំពុង​គណនា​ការ​ធ្វើ​ឲ្យ​ប្រសើរ"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/ko.po b/po/ko.po
index c1b756cf7..661da2bde 100644
--- a/po/ko.po
+++ b/po/ko.po
@@ -1011,9 +1011,9 @@ msgid "At least one invalid signature was encountered."
msgstr "최소한 하나 이상의 서명이 잘못되었습니다."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"서명을 확인하는 'gpgv' 프로그램을 실행할 수 없습니다. (gpgv를 설치했습니까?)"
+"서명을 확인하는 'apt-key' 프로그램을 실행할 수 없습니다. (gnupg를 설치했습니까?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1024,8 +1024,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "gpgv 실행 도중 알 수 없는 오류 발생"
+msgid "Unknown error executing apt-key"
+msgstr "apt-key 실행 도중 알 수 없는 오류 발생"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1688,8 +1688,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "업그레이드를 계산하는 중입니다... "
+msgid "Calculating upgrade"
+msgstr "업그레이드를 계산하는 중입니다"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/ku.po b/po/ku.po
index 5f5512065..0aaa66c33 100644
--- a/po/ku.po
+++ b/po/ku.po
@@ -923,7 +923,7 @@ msgid "At least one invalid signature was encountered."
msgstr ""
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -935,8 +935,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Di xebitandina gpgv de çewtiya nenas"
+msgid "Unknown error executing apt-key"
+msgstr "Di xebitandina apt-key de çewtiya nenas"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
#, fuzzy
@@ -1578,8 +1578,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Bilindkirin tê hesibandin..."
+msgid "Calculating upgrade"
+msgstr "Bilindkirin tê hesibandin"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/lt.po b/po/lt.po
index 2f6d71365..e66708cdc 100644
--- a/po/lt.po
+++ b/po/lt.po
@@ -928,7 +928,7 @@ msgid "At least one invalid signature was encountered."
msgstr ""
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -940,8 +940,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Nežinoma klaida kviečiant gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Nežinoma klaida kviečiant apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1600,8 +1600,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Skaičiuojami atnaujinimai... "
+msgid "Calculating upgrade"
+msgstr "Skaičiuojami atnaujinimai"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/mr.po b/po/mr.po
index 25134d605..3031722b7 100644
--- a/po/mr.po
+++ b/po/mr.po
@@ -1002,9 +1002,9 @@ msgstr "किमान एक अवैध सही सापडली."
#: methods/gpgv.cc:174
#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"सहीची खात्री करण्यासाठी '%s' कार्यान्वित करू शकत नाही (gpgv संस्थापित केले आहे का?)"
+"सहीची खात्री करण्यासाठी 'apt-key' कार्यान्वित करू शकत नाही (gnupg संस्थापित केले आहे का?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1015,8 +1015,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "gpgv कार्यान्वित होत असताना अपरिचित त्रुटी"
+msgid "Unknown error executing apt-key"
+msgstr "apt-key कार्यान्वित होत असताना अपरिचित त्रुटी"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1679,8 +1679,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "पुढिल आवृत्तीची गणती करीत आहे..."
+msgid "Calculating upgrade"
+msgstr "पुढिल आवृत्तीची गणती करीत आहे"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/nb.po b/po/nb.po
index 445f1cca5..12da02f47 100644
--- a/po/nb.po
+++ b/po/nb.po
@@ -1017,9 +1017,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Minst en ugyldig signatur ble funnet."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Klarte ikke kjøre «gpgv» for å verifisere signaturen (er gpgv installert?)"
+"Klarte ikke kjøre «apt-key» for å verifisere signaturen (er gnupg installert?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1030,8 +1030,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Ukjent feil ved kjøring av gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Ukjent feil ved kjøring av apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1706,8 +1706,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Beregner oppgradering... "
+msgid "Calculating upgrade"
+msgstr "Beregner oppgradering"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/ne.po b/po/ne.po
index 08de0f8ef..0d61a52be 100644
--- a/po/ne.po
+++ b/po/ne.po
@@ -1002,9 +1002,8 @@ msgid "At least one invalid signature was encountered."
msgstr "कम्तिमा एउटा अवैध हस्ताक्षर विरोध भयो ।"
#: methods/gpgv.cc:174
-#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "हस्ताक्षर रूजू गर्न '%s' कार्यन्वयन गर्न सकिएन (के gpgv स्थापना भयो?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "हस्ताक्षर रूजू गर्न 'apt-key' कार्यन्वयन गर्न सकिएन (के gnupg स्थापना भयो?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1015,8 +1014,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "gpgv कार्यन्वयन गर्दा अज्ञात त्रुटि"
+msgid "Unknown error executing apt-key"
+msgstr "apt-key कार्यन्वयन गर्दा अज्ञात त्रुटि"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1675,8 +1674,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "स्तर वृद्धि गणना गरिदैछ..."
+msgid "Calculating upgrade"
+msgstr "स्तर वृद्धि गणना गरिदैछ."
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/nl.po b/po/nl.po
index c410baa46..c333ae9f9 100644
--- a/po/nl.po
+++ b/po/nl.po
@@ -1032,9 +1032,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Er is tenminste één ongeldige ondertekening gevonden."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Kon 'gpgv' niet uitvoeren om ondertekening te verifiëren (is gpgv "
+"Kon 'apt-key' niet uitvoeren om ondertekening te verifiëren (is gnupg "
"geïnstalleerd?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -1046,8 +1046,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Onbekende fout bij het uitvoeren van gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Onbekende fout bij het uitvoeren van apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1730,8 +1730,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Opwaardering wordt doorgerekend... "
+msgid "Calculating upgrade"
+msgstr "Opwaardering wordt doorgerekend"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/nn.po b/po/nn.po
index 7c5f77759..40f77e8be 100644
--- a/po/nn.po
+++ b/po/nn.po
@@ -1012,7 +1012,7 @@ msgid "At least one invalid signature was encountered."
msgstr ""
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -1024,7 +1024,7 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
+msgid "Unknown error executing apt-key"
msgstr ""
#: methods/gpgv.cc:217 methods/gpgv.cc:224
@@ -1691,8 +1691,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Reknar ut oppgradering ... "
+msgid "Calculating upgrade"
+msgstr "Reknar ut oppgradering"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/pl.po b/po/pl.po
index 95785904a..1646a47e8 100644
--- a/po/pl.po
+++ b/po/pl.po
@@ -1060,9 +1060,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Napotkano przynajmniej jeden nieprawidłowy podpis."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Nie udało się uruchomić gpgv by zweryfikować podpis (czy gpgv jest "
+"Nie udało się uruchomić apt-key by zweryfikować podpis (czy gnupg jest "
"zainstalowane?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -1074,8 +1074,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Nieznany błąd podczas uruchamiania gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Nieznany błąd podczas uruchamiania apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1774,8 +1774,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Obliczanie aktualizacji..."
+msgid "Calculating upgrade"
+msgstr "Obliczanie aktualizacji"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/pt.po b/po/pt.po
index 5dcd74bb5..b17fb0380 100644
--- a/po/pt.po
+++ b/po/pt.po
@@ -1050,9 +1050,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Pelo menos uma assinatura inválida foi encontrada."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Não foi possível executar 'gpgv' para verificar a assinatura (o gpgv está "
+"Não foi possível executar 'apt-key' para verificar a assinatura (o gnupg está "
"instalado?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -1064,8 +1064,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Erro desconhecido ao executar gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Erro desconhecido ao executar apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1741,8 +1741,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "A calcular a actualização... "
+msgid "Calculating upgrade"
+msgstr "A calcular a actualização"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/pt_BR.po b/po/pt_BR.po
index c23a42275..0be44e8af 100644
--- a/po/pt_BR.po
+++ b/po/pt_BR.po
@@ -1022,10 +1022,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Ao menos uma assinatura inválida foi encontrada."
#: methods/gpgv.cc:174
-#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Não foi possível executar '%s' para verificar a assinatura (o gpgv está "
+"Não foi possível executar 'apt-key' para verificar a assinatura (o gnupg está "
"instalado?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
@@ -1037,8 +1036,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Erro desconhecido executando gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Erro desconhecido executando apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1710,8 +1709,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Calculando atualização... "
+msgid "Calculating upgrade"
+msgstr "Calculando atualização"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/ro.po b/po/ro.po
index 2e556ef62..191cd5a44 100644
--- a/po/ro.po
+++ b/po/ro.po
@@ -1023,10 +1023,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Cel puțin o semnătură nevalidă a fost întâlnită."
#: methods/gpgv.cc:174
-#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Nu s-a putut executa „%s” pentru verificarea semnăturii (gpgv este instalat?)"
+"Nu s-a putut executa „apt-key” pentru verificarea semnăturii (gnupg este instalat?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1037,8 +1036,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Eroare necunoscută în timp ce se execută gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Eroare necunoscută în timp ce se execută apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1717,8 +1716,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Calculez înnoirea... "
+msgid "Calculating upgrade"
+msgstr "Calculez înnoirea"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/ru.po b/po/ru.po
index 93d5163fb..35dc160e1 100644
--- a/po/ru.po
+++ b/po/ru.po
@@ -1059,8 +1059,8 @@ msgid "At least one invalid signature was encountered."
msgstr "Найдена как минимум одна неправильная подпись."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "Не удалось выполнить «gpgv» для проверки подписи (gpgv установлена?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "Не удалось выполнить «apt-key» для проверки подписи (gnupg установлена?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1071,8 +1071,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Неизвестная ошибка при выполнении gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Неизвестная ошибка при выполнении apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1764,8 +1764,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Расчёт обновлений…"
+msgid "Calculating upgrade"
+msgstr "Расчёт обновлений"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/sk.po b/po/sk.po
index acfd4cf5b..855f309d2 100644
--- a/po/sk.po
+++ b/po/sk.po
@@ -1039,8 +1039,8 @@ msgid "At least one invalid signature was encountered."
msgstr "Bola zistená aspoň jedna nesprávna signatúra."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "Nedá sa spustiť „gpgv“ kvôli overeniu podpisu (je nainštalované gpgv?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "Nedá sa spustiť „apt-key“ kvôli overeniu podpisu (je nainštalované gnupg?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1051,8 +1051,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Neznáma chyba pri spustení gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Neznáma chyba pri spustení apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1738,8 +1738,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Prepočítava sa aktualizácia... "
+msgid "Calculating upgrade"
+msgstr "Prepočítava sa aktualizácia"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/sl.po b/po/sl.po
index 69bf678d0..536d99465 100644
--- a/po/sl.po
+++ b/po/sl.po
@@ -1036,8 +1036,8 @@ msgid "At least one invalid signature was encountered."
msgstr "Najden je bil vsaj en neveljaven podpis."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "Ni mogoče izvesti 'gpgv' za preverjanje podpisa (je gpgv nameščen?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "Ni mogoče izvesti 'apt-key' za preverjanje podpisa (je gnupg nameščen?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1048,8 +1048,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Neznana napaka med izvajanjem gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Neznana napaka med izvajanjem apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1739,8 +1739,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Preračunavanje nadgradnje ... "
+msgid "Calculating upgrade"
+msgstr "Preračunavanje nadgradnje"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/sv.po b/po/sv.po
index 5d11f06c2..d761c5c38 100644
--- a/po/sv.po
+++ b/po/sv.po
@@ -1025,9 +1025,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Minst en ogiltig signatur träffades på."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Kunde inte köra \"gpgv\" för att verifiera signatur (är gpgv installerad?)"
+"Kunde inte köra \"apt-key\" för att verifiera signatur (är gnupg installerad?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1038,8 +1038,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Okänt fel vid körning av gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Okänt fel vid körning av apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1723,8 +1723,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Beräknar uppgradering... "
+msgid "Calculating upgrade"
+msgstr "Beräknar uppgradering"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/th.po b/po/th.po
index 5d04c5cda..d241c1314 100644
--- a/po/th.po
+++ b/po/th.po
@@ -1052,8 +1052,8 @@ msgid "At least one invalid signature was encountered."
msgstr "พบลายเซ็นที่ใช้การไม่ได้อย่างน้อยหนึ่งรายการ"
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "ไม่สามารถเรียก 'gpgv' เพื่อตรวจสอบลายเซ็น (ได้ติดตั้ง gpgv ไว้หรือไม่?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "ไม่สามารถเรียก 'apt-key' เพื่อตรวจสอบลายเซ็น (ได้ติดตั้ง gnupg ไว้หรือไม่?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1066,8 +1066,8 @@ msgstr ""
"'%s' (เครือข่ายต้องยืนยันตัวบุคคลหรือไม่?)"
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "เกิดข้อผิดพลาดไม่ทราบสาเหตุขณะเรียก gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "เกิดข้อผิดพลาดไม่ทราบสาเหตุขณะเรียก apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1718,8 +1718,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "กำลังคำนวณการปรับรุ่น... "
+msgid "Calculating upgrade"
+msgstr "กำลังคำนวณการปรับรุ่น"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/tl.po b/po/tl.po
index 49a96a24e..e1186c424 100644
--- a/po/tl.po
+++ b/po/tl.po
@@ -1017,10 +1017,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Hindi kukulang sa isang hindi tanggap na lagda ang na-enkwentro."
#: methods/gpgv.cc:174
-#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Hindi maitakbo ang '%s' upang maberipika ang lagda (nakaluklok ba ang gpgv?)"
+"Hindi maitakbo ang 'apt-key' upang maberipika ang lagda (nakaluklok ba ang gnupg?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1031,8 +1030,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Hindi kilalang error sa pag-execute ng gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Hindi kilalang error sa pag-execute ng apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1700,8 +1699,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Sinusuri ang pag-upgrade... "
+msgid "Calculating upgrade"
+msgstr "Sinusuri ang pag-upgrade"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/tr.po b/po/tr.po
index a3cd7077f..3dd0568e0 100644
--- a/po/tr.po
+++ b/po/tr.po
@@ -9,7 +9,7 @@ msgstr ""
"Project-Id-Version: apt\n"
"Report-Msgid-Bugs-To: APT Development Team <deity@lists.debian.org>\n"
"POT-Creation-Date: 2014-09-09 20:35+0200\n"
-"PO-Revision-Date: 2014-09-11 02:47+0200\n"
+"PO-Revision-Date: 2014-09-29 22:08+0200\n"
"Last-Translator: Mert Dirik <mertdirik@gmail.com>\n"
"Language-Team: Debian l10n Turkish <debian-l10n-turkish@lists.debian.org>\n"
"Language: tr\n"
@@ -83,7 +83,7 @@ msgstr "Toplam birikmiş dizgiler: "
#: cmdline/apt-cache.cc:362
msgid "Total dependency version space: "
-msgstr "Toplam bağlımlık sürümü alanı: "
+msgstr "Toplam bağımlılık sürümü alanı: "
#: cmdline/apt-cache.cc:367
msgid "Total slack space: "
@@ -127,7 +127,7 @@ msgstr "Paket dosyaları:"
#: cmdline/apt-cache.cc:1553 cmdline/apt-cache.cc:1644
msgid "Cache is out of sync, can't x-ref a package file"
-msgstr "Önbellek eşzamanlı değil, paket dosyası 'x-ref' yapılamıyor."
+msgstr "Önbellek eşzamanlı değil, paket dosyası 'x-ref' yapılamıyor"
#. Show any packages have explicit pins
#: cmdline/apt-cache.cc:1567
@@ -205,8 +205,8 @@ msgid ""
"See the apt-cache(8) and apt.conf(5) manual pages for more information.\n"
msgstr ""
"Kullanım: apt-cache [seçenekler] komut\n"
-" apt-cache [seçenekler] showpkg paket1 [paket2 ...]\n"
-" apt-cache [seçenekler] showsrc paket1 [paket2 ...]\n"
+" apt-cache [seçenekler] showpkg paket1 [paket2 ...]\n"
+" apt-cache [seçenekler] showsrc paket1 [paket2 ...]\n"
"\n"
"apt-cache APT'nin ikili paket önbelleğindeki dosyaları\n"
"sorgulamakta kullanılan alt seviye bir araçtır.\n"
@@ -233,7 +233,7 @@ msgstr ""
" -p=? Paket önbelleği.\n"
" -s=? Kaynak önbelleği.\n"
" -q İlerleme göstergesini kapat.\n"
-" -i unmet komutunda yalnızca önemli bağımlılıkları görüntüle.\n"
+" -i unmet komutunda sadece önemli bağımlılıkları görüntüle.\n"
" -c=? Belirtilen yapılandırma dosyasını kullan\n"
" -o=? Herhangi bir yapılandırma seçeneğini ayarla, örneğin -o dir::cache=/"
"tmp\n"
@@ -242,7 +242,7 @@ msgstr ""
#: cmdline/apt-cdrom.cc:76
msgid "Please provide a name for this Disc, such as 'Debian 5.0.3 Disk 1'"
-msgstr "Lütfen bu CD/DVD'ye bir isim verin, örneğin 'Debian 5.0.3 Disk 1'"
+msgstr "Lütfen bu CD/DVD'ye bir ad verin, örneğin 'Debian 5.0.3 Disk 1'"
#: cmdline/apt-cdrom.cc:91
msgid "Please insert a Disc in the drive and press enter"
@@ -272,7 +272,7 @@ msgstr "Kalan CD'leriniz için bu işlemi yineleyin."
#: cmdline/apt-config.cc:48
msgid "Arguments not in pairs"
-msgstr "Değişkenler (argüman) çiftler halinde değil"
+msgstr "Argümanlar çiftler halinde değil"
#: cmdline/apt-config.cc:89
msgid ""
@@ -337,7 +337,7 @@ msgstr "%s paketi bulunamadı"
#: apt-private/private-install.cc:865
#, c-format
msgid "%s set to manually installed.\n"
-msgstr "%s elle kurulmuş olarak ayarlı.\n"
+msgstr "%s elle kurulmuş olarak ayarlandı.\n"
#: cmdline/apt-get.cc:461 cmdline/apt-mark.cc:83
#, c-format
@@ -437,7 +437,7 @@ msgstr "İndirme işlemi tamamlandı ve sadece indirme kipinde"
#: cmdline/apt-get.cc:950
#, c-format
msgid "Skipping unpack of already unpacked source in %s\n"
-msgstr "%s için zaten açılmış bazı paketlerin açılması atlanıyor.\n"
+msgstr "%s için zaten açılmış bazı paketlerin açılması atlanıyor\n"
#: cmdline/apt-get.cc:963
#, c-format
@@ -460,7 +460,7 @@ msgstr "Alt süreç başarısız"
#: cmdline/apt-get.cc:1030
msgid "Must specify at least one package to check builddeps for"
-msgstr "İnşa bağımlılıklarının denetleneceği en az bir paket belirtilmedilir"
+msgstr "İnşa bağımlılıklarının denetleneceği en az bir paket belirtilmelidir"
#: cmdline/apt-get.cc:1055
#, c-format
@@ -469,7 +469,7 @@ msgid ""
"Architectures for setup"
msgstr ""
"%s mimarisine uygun mimari bilgileri mevcut değil. Kurulumu için apt.conf(5) "
-"rehber sayfasındaki APT::Architectures kısmına göz atın."
+"rehber sayfasındaki APT::Architectures kısmına göz atın"
#: cmdline/apt-get.cc:1079 cmdline/apt-get.cc:1082
#, c-format
@@ -488,7 +488,7 @@ msgid ""
"packages"
msgstr ""
"'%4$s' paketlerinde %3$s paketine izin verilmediği için %2$s kaynağının %1$s "
-"bağımlılığı karşılanamıyor."
+"bağımlılığı karşılanamıyor"
#: cmdline/apt-get.cc:1290
#, c-format
@@ -496,12 +496,12 @@ msgid ""
"%s dependency for %s cannot be satisfied because the package %s cannot be "
"found"
msgstr ""
-"%2$s için %1$s bağımlılığı, %3$s paketi bulunamadığı için karşılanamadı."
+"%2$s için %1$s bağımlılığı, %3$s paketi bulunamadığı için karşılanamadı"
#: cmdline/apt-get.cc:1313
#, c-format
msgid "Failed to satisfy %s dependency for %s: Installed package %s is too new"
-msgstr "%2$s için %1$s bağımlılığı karşılanamadı: Kurulu %3$s paketi çok yeni."
+msgstr "%2$s için %1$s bağımlılığı karşılanamadı: Kurulu %3$s paketi çok yeni"
#: cmdline/apt-get.cc:1352
#, c-format
@@ -589,17 +589,17 @@ msgid ""
" This APT has Super Cow Powers.\n"
msgstr ""
"Kullanım: apt-get [seçenekler] komut\n"
-" apt-get [seçenekler] install|remove paket1 [paket2 ...]\n"
-" apt-get [seçenekler] kaynak paket1 [paket2 ...]\n"
+" apt-get [seçenekler] install|remove paket1 [paket2 ...]\n"
+" apt-get [seçenekler] kaynak paket1 [paket2 ...]\n"
"\n"
-"apt-get, paket indirmek ve kurmakta kullanılan basit bir komut satırı\n"
+"apt-get, paket indirme ve kurmada kullanılan basit bir komut satırı\n"
"arayüzüdür. En sık kullanılan komutlar güncelleme (update) ve kurma\n"
"(install) komutlarıdır.\n"
"\n"
"Komutlar:\n"
" update - Paket listelerini yenile\n"
" upgrade - Yükseltme işlemini gerçekleştir\n"
-" install - Yeni paket kur (paket adı libc6.deb değil libc6 şeklinde "
+" install - Yeni paket kur (paket libc6.deb değil libc6 şeklinde "
"olmalıdır)\n"
" remove - Paket(leri) kaldır\n"
" autoremove - Kullanılmayan tüm paketleri otomatik olarak kaldır\n"
@@ -619,7 +619,7 @@ msgstr ""
" -h Bu yardım metni.\n"
" -q Günlük tutmaya uygun çıktı - İlerleme göstergesi yok\n"
" -qq Hata olmadığı müddetçe çıktıya bir şey yazma\n"
-" -d Yalnızca indir - Paketleri açmaz ve kurmaz\n"
+" -d Sadece indir - Paketleri açmaz ve kurmaz\n"
" -s Bir şey yapma. Simülasyon kipinde çalış\n"
" -y Tüm sorulara Evet yanıtını ver ve soru sorma\n"
" -f Eksik bağımlılıklara sahip bir sistemi onarmaya çalış\n"
@@ -639,7 +639,7 @@ msgstr "Argüman olarak bir adet URL'ye ihtiyaç vardır"
#: cmdline/apt-helper.cc:49
msgid "Must specify at least one pair url/filename"
-msgstr "En az bir adet url/dosyaadı çifti belirtilmelidir"
+msgstr "En az bir adet url/dosya-adı çifti belirtilmelidir"
#: cmdline/apt-helper.cc:67
msgid "Download Failed"
@@ -661,7 +661,7 @@ msgstr ""
"Usage: apt-helper [seçenekler] komut\n"
" apt-helper [seçenekler] download-file uri hedef-konum\n"
"\n"
-"apt-helper apt'nin dahili yardımcı aracıdır\n"
+"apt-helper apt'nin dâhilî yardımcı aracıdır\n"
"\n"
"Komutlar:\n"
" download-file - verilen adresi hedef yola kaydet\n"
@@ -759,7 +759,7 @@ msgstr ""
" -h Bu yardım metni.\n"
" -q Günlük tutmaya uygun çıktı - İlerleme göstergesi yok\n"
" -qq Hata olmadığı müddetçe çıktıya bir şey yazma\n"
-" -s Bir şey yapma. Yalnızca ne yapılacağını söyler.\n"
+" -s Bir şey yapma. Sadece ne yapılacağını söyler.\n"
" -f read/write auto/manual marking in the given file\n"
" -c=? Belirtilen yapılandırma dosyası kullan\n"
" -o=? Yapılandırma seçeneği ayarla, örneğin -o dir::cache=/tmp\n"
@@ -817,7 +817,7 @@ msgid ""
"cannot be used to add new CD-ROMs"
msgstr ""
"Lütfen bu CD-ROM'un APT tarafından tanınması için apt-cdrom aracını "
-"kullanın. apt-get update yeni CD-ROM'lar eklemek için kullanılamaz."
+"kullanın. apt-get update yeni CD-ROM'lar eklemek için kullanılamaz"
#: methods/cdrom.cc:222
msgid "Wrong CD-ROM"
@@ -826,7 +826,7 @@ msgstr "Yanlış CD-ROM"
#: methods/cdrom.cc:249
#, c-format
msgid "Unable to unmount the CD-ROM in %s, it may still be in use."
-msgstr "%s konumundaki CD-ROM çıkarılamıyor, hala kullanımda olabilir."
+msgstr "%s konumundaki CD-ROM çıkarılamıyor, hâlâ kullanımda olabilir."
#: methods/cdrom.cc:254
msgid "Disk not found."
@@ -1065,7 +1065,7 @@ msgstr "'%s:%s' (%i - %s) adresi çözümlenirken bir şeyler kötü gitti"
#: methods/connect.cc:258
#, c-format
msgid "Unable to connect to %s:%s:"
-msgstr "Bağlanılamıyor %s:%s:"
+msgstr "Bağlanılamadı %s:%s:"
#: methods/gpgv.cc:168
msgid ""
@@ -1077,8 +1077,8 @@ msgid "At least one invalid signature was encountered."
msgstr "En az bir geçersiz imza ile karşılaşıldı."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "İmza doğrulama için 'gpgv' çalıştırılamadı (gpgv kurulu mu?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "İmza doğrulama için 'apt-key' çalıştırılamadı (gnupg kurulu mu?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1091,8 +1091,8 @@ msgstr ""
"gerektiriyor mu?)"
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "gpgv çalıştırılırken bilinmeyen hata"
+msgid "Unknown error executing apt-key"
+msgstr "apt-key çalıştırılırken bilinmeyen hata"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1211,7 +1211,7 @@ msgid ""
msgstr ""
"Ortam değişimi: Lütfen '%2$s' sürücüsüne\n"
" '%1$s'\n"
-"olarak etiketlenmiş diski takın ve enter tuşuna basın.\n"
+"olarak etiketlenmiş diski takın ve enter tuşuna basın\n"
#: apt-private/private-cachefile.cc:93
msgid "Correcting dependencies..."
@@ -1290,7 +1290,7 @@ msgstr "İç hata, Sıralama tamamlanamadı"
msgid "How odd... The sizes didn't match, email apt@packages.debian.org"
msgstr ""
"Ne kadar ilginç... Boyutlar eşleşmedi, apt@packages.debian.org adresine "
-"eposta atın."
+"eposta atın"
#. TRANSLATOR: The required space between number and unit is already included
#. in the replacement strings, so %sB will be correctly translate in e.g. 1,5 MB
@@ -1327,8 +1327,7 @@ msgstr "%s içinde yeterli boş alanınız yok."
#: apt-private/private-install.cc:216 apt-private/private-install.cc:238
msgid "Trivial Only specified but this is not a trivial operation."
-msgstr ""
-"Yalnızca Önemsiz seçeneği ayarlandı, fakat bu önemsiz bir işlem bir değil."
+msgstr "Sadece Önemsiz seçeneği ayarlandı, ama bu önemsiz bir işlem değil."
#. TRANSLATOR: This string needs to be typed by the user as a confirmation, so be
#. careful with hard to type or special characters (like non-breaking spaces)
@@ -1510,8 +1509,7 @@ msgstr ""
#, c-format
msgid "Reinstallation of %s is not possible, it cannot be downloaded.\n"
msgstr ""
-"%s paketinin yeniden kurulumu mümkün değil, çünkü paket internetten "
-"indirilemedi.\n"
+"%s paketinin yeniden kurulumu mümkün değil, çünkü paket indirilemedi.\n"
#: apt-private/private-install.cc:846
#, c-format
@@ -1539,7 +1537,7 @@ msgstr ""
#: apt-private/private-install.cc:947
#, c-format
msgid "Package '%s' is not installed, so not removed\n"
-msgstr "'%s' kurulu değildi, dolayısıyla kaldırılmadı.\n"
+msgstr "'%s' kurulu değildi, dolayısıyla kaldırılmadı\n"
#: apt-private/private-list.cc:129
msgid "Listing"
@@ -1550,11 +1548,10 @@ msgstr "Listeleme"
msgid "There is %i additional version. Please use the '-a' switch to see it"
msgid_plural ""
"There are %i additional versions. Please use the '-a' switch to see them."
-msgstr[0] ""
-"Fazladan %i sürüm daha var. Görmek için '-a' anahtarını kullanabilirsiniz."
+msgstr[0] "Fazladan %i sürüm daha var. Görmek için '-a' anahtarını kullanın."
msgstr[1] ""
"Fazladan %i sürüm daha var. Bu sürümleri görmek için '-a' anahtarını "
-"kullanabilirsiniz."
+"kullanın."
#: apt-private/private-main.cc:32
msgid ""
@@ -1563,10 +1560,10 @@ msgid ""
" Keep also in mind that locking is deactivated,\n"
" so don't depend on the relevance to the real current situation!"
msgstr ""
-"NOT: Bu yalnızca bir benzetimdir!\n"
+"NOT: Bu sadece bir benzetimdir!\n"
" apt-get'i gerçekten çalıştırmak için root haklarına ihtiyaç vardır.\n"
" Unutmayın ki benzetim kipinde kilitleme yapılmaz, bu nedenle\n"
-" bu benzetimin gerçekteki durumla birebir aynı olacağına güvenmeyin."
+" bu benzetimin gerçekteki durumla birebir aynı olacağına güvenmeyin!"
#: apt-private/private-output.cc:103 apt-private/private-show.cc:84
#: apt-private/private-show.cc:89
@@ -1739,10 +1736,10 @@ msgstr "Tam Metin Arama"
msgid "There is %i additional record. Please use the '-a' switch to see it"
msgid_plural ""
"There are %i additional records. Please use the '-a' switch to see them."
-msgstr[0] ""
-"Fazladan %i kayıt daha var. Görmek için '-a' anahtarını kullanabilirsiniz."
+msgstr[0] "Fazladan %i kayıt daha var. Görmek için '-a' anahtarını kullanın."
msgstr[1] ""
-"Fazladan %i kayıt daha var. Görmek için '-a' anahtarını kullanabilirsiniz."
+"Fazladan %i kayıt daha var. Bu kayıtları görmek için '-a' anahtarını "
+"kullanın. kullanabilirsiniz."
#: apt-private/private-show.cc:163
msgid "not a real package (virtual)"
@@ -1751,7 +1748,7 @@ msgstr "gerçek bir paket değil (sanal)"
#: apt-private/private-sources.cc:58
#, c-format
msgid "Failed to parse %s. Edit again? "
-msgstr "%s ayrıştırılamadı. Tekrar düzenlemek ister misiniz?"
+msgstr "%s ayrıştırılamadı. Tekrar düzenlemek ister misiniz? "
#: apt-private/private-sources.cc:70
#, c-format
@@ -1760,7 +1757,7 @@ msgstr "'%s' dosyası değişti, lütfen 'apt-get update' komutunu çalıştır
#: apt-private/private-update.cc:31
msgid "The update command takes no arguments"
-msgstr "'update' komutu bağımsız değişken almamaktadır"
+msgstr "'update' komutu argüman almaz"
#: apt-private/private-update.cc:90
#, c-format
@@ -1862,7 +1859,7 @@ msgstr "Bu durum, çift hata iletilerine ya da eksik bağımlılıkların neden"
#: dselect/install:104
msgid "or errors caused by missing dependencies. This is OK, only the errors"
msgstr ""
-"olduğu hatalara yol açabilir. Bu durum bir sorun teşkil etmez, yalnızca bu "
+"olduğu hatalara yol açabilir. Bu durum bir sorun teşkil etmez, sadece bu "
"iletinin"
#: dselect/install:105
@@ -1931,7 +1928,7 @@ msgstr "Kaynak uzantı listesi çok uzun"
#: ftparchive/apt-ftparchive.cc:401
msgid "Error writing header to contents file"
-msgstr "İçindekiler dosyasına üstbilgi yazmada hata"
+msgstr "İçindekiler dosyasına başlık yazmada hata"
#: ftparchive/apt-ftparchive.cc:431
#, c-format
@@ -1980,12 +1977,12 @@ msgid ""
" -o=? Set an arbitrary configuration option"
msgstr ""
"Kullanım: apt-ftparchive [seçenekler] komut\n"
-"Komutlar: packages ikilikonumu [geçersizkılmadosyası [konumöneki]]\n"
-" sources kaynakkonumu [geçersizkılmadosyası [konumöneki]]\n"
-" contents konum\n"
-" release konum\n"
-" generate yapılandırma [gruplar]\n"
-" clean yapılandırma\n"
+"Komutlar: packages ikilikonumu [geçersizkılmadosyası [konumöneki]]\n"
+" sources kaynakkonumu [geçersizkılmadosyası [konumöneki]]\n"
+" contents konum\n"
+" release konum\n"
+" generate yapılandırma [gruplar]\n"
+" clean yapılandırma\n"
"\n"
"apt-ftparchive Debian arşivleri için indeks dosyaları üretir. \n"
"dpkg-scanpackages ve dpkg-scansources için tamamen otomatikten\n"
@@ -2016,7 +2013,7 @@ msgstr ""
" -s=? Kaynak geçersiz kılma dosyası\n"
" -q Sessiz\n"
" -d=? Seçimlik önbellek veritabanını seç\n"
-" --no-delink Bağlantılanmamış hata ayıklama kipini etkinleştir\n"
+" --no-delink Bağ kurulmamış hata ayıklama kipini etkinleştir\n"
" --contents İçerik dosyası üretimini denetle\n"
" -c=? Belirtilen yapılandırma dosyası kullan\n"
" -o=? Yapılandırma seçeneği ayarla"
@@ -2074,24 +2071,24 @@ msgstr "İmleç alınamıyor"
#: ftparchive/writer.cc:91
#, c-format
msgid "W: Unable to read directory %s\n"
-msgstr "W: %s dizini okunamıyor\n"
+msgstr "U: %s dizini okunamıyor\n"
#: ftparchive/writer.cc:96
#, c-format
msgid "W: Unable to stat %s\n"
-msgstr "W: %s durum bilgisi alınamıyor\n"
+msgstr "U: %s durum bilgisi alınamıyor\n"
#: ftparchive/writer.cc:152
msgid "E: "
-msgstr "E: "
+msgstr "H: "
#: ftparchive/writer.cc:154
msgid "W: "
-msgstr "W: "
+msgstr "U: "
#: ftparchive/writer.cc:161
msgid "E: Errors apply to file "
-msgstr "E: Hatalar şu dosya için geçerli: "
+msgstr "H: Hatalar şu dosya için geçerli: "
#: ftparchive/writer.cc:179 ftparchive/writer.cc:211
#, c-format
@@ -2115,12 +2112,12 @@ msgstr " DeLink %s [%s]\n"
#: ftparchive/writer.cc:286
#, c-format
msgid "Failed to readlink %s"
-msgstr "%s bağlantı okuması başarılamadı"
+msgstr "%s readlink çağrısı başarısız oldu"
#: ftparchive/writer.cc:290
#, c-format
msgid "Failed to unlink %s"
-msgstr "%s bağlantı koparma başarılamadı"
+msgstr "%s bağı koparılamadı"
#: ftparchive/writer.cc:298
#, c-format
@@ -2130,7 +2127,7 @@ msgstr "*** %s, %s konumuna bağlanamadı"
#: ftparchive/writer.cc:308
#, c-format
msgid " DeLink limit of %sB hit.\n"
-msgstr " %sB'lik bağlantı koparma (DeLink) sınırına ulaşıldı.\n"
+msgstr " %sB'lik bağ koparma (DeLink) sınırına ulaşıldı.\n"
#: ftparchive/writer.cc:417
msgid "Archive had no package field"
@@ -2200,7 +2197,7 @@ msgstr "Bilinmeyen sıkıştırma algoritması '%s'"
#: ftparchive/multicompress.cc:103
#, c-format
msgid "Compressed output %s needs a compression set"
-msgstr "Sıkıştırılmış %s çıktısı bir sıkıştırma kümesine ihtiyaç duymaktadır."
+msgstr "Sıkıştırılmış %s çıktısı bir sıkıştırma kümesine ihtiyaç duymaktadır"
#: ftparchive/multicompress.cc:192
msgid "Failed to create FILE*"
@@ -2252,7 +2249,7 @@ msgid ""
msgstr ""
"Kullanım: apt-internal-solver\n"
"\n"
-"apt-internal-solver mevcut dahili çözücüyü (hata ayıklama\n"
+"apt-internal-solver mevcut dâhilî çözücüyü (hata ayıklama\n"
"gibi sebeplerle) harici çözücü gibi kullanmaya yarayan bir\n"
"arayüzdür.\n"
"\n"
@@ -2355,11 +2352,11 @@ msgstr "%s durum bilgisi alınamadı"
#: apt-inst/filelist.cc:380
msgid "DropNode called on still linked node"
-msgstr "DropNode hala bağlı olan düğüm üzerinde çağrıldı"
+msgstr "DropNode hâlâ bağlı olan düğüm üzerinde çağrıldı"
#: apt-inst/filelist.cc:412
msgid "Failed to locate the hash element!"
-msgstr "Sağlama elementi bulunamadı"
+msgstr "Sağlama elementi bulunamadı!"
#: apt-inst/filelist.cc:459
msgid "Failed to allocate diversion"
@@ -2395,7 +2392,7 @@ msgstr "Arşiv üyesi başlığı okuma hatası"
#: apt-inst/contrib/arfile.cc:96
#, c-format
msgid "Invalid archive member header %s"
-msgstr "Geçerşiz arşiv üyesi başlığı %s"
+msgstr "Geçersiz arşiv üyesi başlığı %s"
#: apt-inst/contrib/arfile.cc:108
msgid "Invalid archive member header"
@@ -2610,8 +2607,7 @@ msgstr "Paket listeleri ya da durum dosyası ayrıştırılamadı ya da açılam
#: apt-pkg/cachefile.cc:98
msgid "You may want to run apt-get update to correct these problems"
-msgstr ""
-"Bu sorunları gidermek için apt-get update komutunu çalıştırabilirsiniz."
+msgstr "Bu sorunları gidermek için apt-get update komutunu çalıştırabilirsiniz"
#: apt-pkg/cachefile.cc:116
msgid "The list of sources could not be read."
@@ -2743,7 +2739,7 @@ msgstr ""
#: apt-pkg/cdrom.cc:819
msgid "Copying package lists..."
-msgstr "Paket listeleri kopyalanıyor.."
+msgstr "Paket listeleri kopyalanıyor..."
#: apt-pkg/cdrom.cc:863
msgid "Writing new source list\n"
@@ -2777,7 +2773,7 @@ msgstr "Durum bilgisi okunuyor"
#: apt-pkg/depcache.cc:250
#, c-format
msgid "Failed to open StateFile %s"
-msgstr "Durum dosyası (StateFile) %s açılamadı."
+msgstr "Durum dosyası (StateFile) %s açılamadı"
#: apt-pkg/depcache.cc:256
#, c-format
@@ -2827,7 +2823,7 @@ msgstr "%2$i eksik dosya ve %3$i eşleşmeyen dosyayla %1$i kayıt yazıldı\n"
#: apt-pkg/indexcopy.cc:515
#, c-format
msgid "Can't find authentication record for: %s"
-msgstr "%s için kimlik doğrulama kaydı bulunamadı."
+msgstr "%s için kimlik doğrulama kaydı bulunamadı"
#: apt-pkg/indexcopy.cc:521
#, c-format
@@ -2905,7 +2901,7 @@ msgstr ""
#: apt-pkg/pkgcache.cc:155
msgid "Empty package cache"
-msgstr "Boş paket önbelleği"
+msgstr "Paket önbelleği boş"
#: apt-pkg/pkgcache.cc:161
msgid "The package cache file is corrupted"
@@ -2922,7 +2918,7 @@ msgstr "Paket önbellek dosyası bozulmuş, çok küçük"
#: apt-pkg/pkgcache.cc:174
#, c-format
msgid "This APT does not support the versioning system '%s'"
-msgstr "Bu APT '%s' sürümleme sistemini desteklemiyor."
+msgstr "Bu APT '%s' sürümleme sistemini desteklemiyor"
#: apt-pkg/pkgcache.cc:179
msgid "The package cache was built for a different architecture"
@@ -3054,7 +3050,7 @@ msgid ""
"available in the sources"
msgstr ""
"APT::Default-Release için '%s' değeri geçersizdir, çünkü kaynaklarda böyle "
-"bir sürüm yok."
+"bir sürüm yok"
#: apt-pkg/policy.cc:422
#, c-format
@@ -3159,7 +3155,7 @@ msgstr "'%s' türü bilinmiyor (girdi: %u, kaynak listesi: %s)"
#: apt-pkg/srcrecords.cc:52
msgid "You must put some 'source' URIs in your sources.list"
-msgstr "'sources.list' dosyası içine bazı 'source' adresleri koymalısınız."
+msgstr "'sources.list' dosyası içine bazı 'source' adresleri koymalısınız"
#: apt-pkg/tagfile.cc:140
#, c-format
@@ -3212,7 +3208,7 @@ msgstr "Komut satırı seçeneği %s mantıksal değer değil"
#: apt-pkg/contrib/cmndline.cc:209 apt-pkg/contrib/cmndline.cc:230
#, c-format
msgid "Option %s requires an argument."
-msgstr "%s seçeneği bir bağımsız değişkene gerek duyar."
+msgstr "%s seçeneği bir argüman kullanımını gerektirir."
#: apt-pkg/contrib/cmndline.cc:243 apt-pkg/contrib/cmndline.cc:249
#, c-format
@@ -3224,7 +3220,8 @@ msgstr ""
#: apt-pkg/contrib/cmndline.cc:278
#, c-format
msgid "Option %s requires an integer argument, not '%s'"
-msgstr "%s seçeneği bir tam sayı bağımsız değişkene gerek duyar, '%s' değil"
+msgstr ""
+"%s seçeneği bir tam sayı argümanının kullanımını gerektirir, '%s' değil"
#: apt-pkg/contrib/cmndline.cc:309
#, c-format
@@ -3254,49 +3251,49 @@ msgstr "Yapılandırma dosyası (%s) açılıyor"
#: apt-pkg/contrib/configuration.cc:801
#, c-format
msgid "Syntax error %s:%u: Block starts with no name."
-msgstr "Sözdizim hatası %s:%u: Blok ad olmadan başlıyor."
+msgstr "Sözdizimi hatası %s:%u: Blok ad olmadan başlıyor."
#: apt-pkg/contrib/configuration.cc:820
#, c-format
msgid "Syntax error %s:%u: Malformed tag"
-msgstr "Sözdizim hatası %s:%u: Kötü biçimlendirilmiş etiket"
+msgstr "Sözdizimi hatası %s:%u: Kötü biçimlendirilmiş etiket"
#: apt-pkg/contrib/configuration.cc:837
#, c-format
msgid "Syntax error %s:%u: Extra junk after value"
-msgstr "Sözdizim hatası %s:%u: Değerden sonra ilave gereksiz"
+msgstr "Sözdizimi hatası %s:%u: Değerden sonra ilave gereksiz"
#: apt-pkg/contrib/configuration.cc:877
#, c-format
msgid "Syntax error %s:%u: Directives can only be done at the top level"
-msgstr "Sözdizim hatası %s:%u: Yönergeler yalnızca en üst düzeyde bitebilir"
+msgstr "Sözdizimi hatası %s:%u: Yönergeler sadece en üst düzeyde bitebilir"
#: apt-pkg/contrib/configuration.cc:884
#, c-format
msgid "Syntax error %s:%u: Too many nested includes"
-msgstr "Sözdizim hatası %s:%u: Çok fazla yuvalanmış 'include'"
+msgstr "Sözdizimi hatası %s:%u: Çok fazla yuvalanmış 'include'"
#: apt-pkg/contrib/configuration.cc:888 apt-pkg/contrib/configuration.cc:893
#, c-format
msgid "Syntax error %s:%u: Included from here"
-msgstr "Sözdizim hatası %s:%u: Buradan 'include' edilmiş"
+msgstr "Sözdizimi hatası %s:%u: Buradan 'include' edilmiş"
#: apt-pkg/contrib/configuration.cc:897
#, c-format
msgid "Syntax error %s:%u: Unsupported directive '%s'"
-msgstr "Sözdizim hatası %s:%u: Desteklenmeyen yönerge '%s'"
+msgstr "Sözdizimi hatası %s:%u: Desteklenmeyen yönerge '%s'"
#: apt-pkg/contrib/configuration.cc:900
#, c-format
msgid "Syntax error %s:%u: clear directive requires an option tree as argument"
msgstr ""
-"Sözdizim hatası %s:%u: clear yönergesi argüman olarak bir seçenek ağacı "
-"gerektirir."
+"Sözdizimi hatası %s:%u: clear yönergesi bir seçenek ağacı argümanını "
+"gerektirir"
#: apt-pkg/contrib/configuration.cc:950
#, c-format
msgid "Syntax error %s:%u: Extra junk at end of file"
-msgstr "Sözdizim hatası %s:%u: Dosya sonunda ilave gereksiz"
+msgstr "Sözdizimi hatası %s:%u: Dosya sonunda ilave gereksiz"
#: apt-pkg/contrib/fileutl.cc:190
#, c-format
@@ -3327,13 +3324,13 @@ msgstr "'%s' dizin olmadığı için dosya listeli oluşturulamıyor"
#, c-format
msgid "Ignoring '%s' in directory '%s' as it is not a regular file"
msgstr ""
-"'%2$s' dizinindeki '%1$s' normal bir dosya olmadığı için görmezden geliniyor."
+"'%2$s' dizinindeki '%1$s' normal bir dosya olmadığı için görmezden geliniyor"
#: apt-pkg/contrib/fileutl.cc:412
#, c-format
msgid "Ignoring file '%s' in directory '%s' as it has no filename extension"
msgstr ""
-"'%2$s' dizinindeki '%1$s' dosyası uzantısı olmadığı için görmezden geliniyor."
+"'%2$s' dizinindeki '%1$s' dosyası uzantısı olmadığı için görmezden geliniyor"
#: apt-pkg/contrib/fileutl.cc:421
#, c-format
@@ -3341,7 +3338,7 @@ msgid ""
"Ignoring file '%s' in directory '%s' as it has an invalid filename extension"
msgstr ""
"'%2$s' dizinindeki '%1$s' dosyası geçersiz bir dosya uzantısı olduğu için "
-"yok sayılıyor."
+"yok sayılıyor"
#: apt-pkg/contrib/fileutl.cc:824
#, c-format
@@ -3351,7 +3348,7 @@ msgstr "%s altsüreci bir bölümleme hatası aldı (segmentation fault)."
#: apt-pkg/contrib/fileutl.cc:826
#, c-format
msgid "Sub-process %s received signal %u."
-msgstr "%s altsüreci %u sinyali aldı"
+msgstr "%s altsüreci %u sinyali aldı."
#: apt-pkg/contrib/fileutl.cc:830 apt-pkg/contrib/gpgv.cc:239
#, c-format
@@ -3389,7 +3386,7 @@ msgstr "Sıkıştırma programı çalıştırılamadı "
#: apt-pkg/contrib/fileutl.cc:1514
#, c-format
msgid "read, still have %llu to read but none left"
-msgstr "read, %llu bayt okunması gerekli fakat hiç kalmamış"
+msgstr "read, %llu bayt okunması gerekli ama hiç kalmamış"
#: apt-pkg/contrib/fileutl.cc:1627 apt-pkg/contrib/fileutl.cc:1649
#, c-format
diff --git a/po/uk.po b/po/uk.po
index 4f7b34a37..910e48dbf 100644
--- a/po/uk.po
+++ b/po/uk.po
@@ -1057,8 +1057,8 @@ msgid "At least one invalid signature was encountered."
msgstr "Знайдено як мінімум один невірний підпис."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "Неможливо виконати 'gpgv' для перевірки підпису (чи встановлено gpgv?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "Неможливо виконати 'apt-key' для перевірки підпису (чи встановлено gnupg?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1069,8 +1069,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Невідома помилка виконання gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Невідома помилка виконання apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1759,8 +1759,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Обчислення оновлень... "
+msgid "Calculating upgrade"
+msgstr "Обчислення оновлень"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/vi.po b/po/vi.po
index 71dcfa553..2532d68bf 100644
--- a/po/vi.po
+++ b/po/vi.po
@@ -1092,9 +1092,9 @@ msgid "At least one invalid signature was encountered."
msgstr "Gặp ít nhất một chữ ký không hợp lệ."
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
msgstr ""
-"Không thể thực hiện “gpgv” để thẩm tra chữ ký (gpgv đã được cài đặt chưa?)"
+"Không thể thực hiện “apt-key” để thẩm tra chữ ký (gnupg đã được cài đặt chưa?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1107,8 +1107,8 @@ msgstr ""
"không?)"
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "Gặp lỗi không rõ khi thực hiện gpgv"
+msgid "Unknown error executing apt-key"
+msgstr "Gặp lỗi không rõ khi thực hiện apt-key"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1774,8 +1774,8 @@ msgid "All packages are up to date."
msgstr "Mọi gói đã được cập nhật."
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "Đang tính toán nâng cấp... "
+msgid "Calculating upgrade"
+msgstr "Đang tính toán nâng cấp"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/zh_CN.po b/po/zh_CN.po
index 67e69f716..79e433f3d 100644
--- a/po/zh_CN.po
+++ b/po/zh_CN.po
@@ -1006,8 +1006,8 @@ msgid "At least one invalid signature was encountered."
msgstr "至少发现一个无效的签名。"
#: methods/gpgv.cc:174
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "无法运行 gpgv 以验证签名(您安装了 gpgv 吗?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "无法运行 apt-key 以验证签名(您安装了 gnupg 吗?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1018,8 +1018,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "运行 gpgv 时发生未知错误"
+msgid "Unknown error executing apt-key"
+msgstr "运行 apt-key 时发生未知错误"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1669,8 +1669,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "正在对升级进行计算... "
+msgid "Calculating upgrade"
+msgstr "正在对升级进行计算"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/po/zh_TW.po b/po/zh_TW.po
index 59a8dcac7..c39e039af 100644
--- a/po/zh_TW.po
+++ b/po/zh_TW.po
@@ -998,9 +998,8 @@ msgid "At least one invalid signature was encountered."
msgstr "至少發現一個無效的簽章。"
#: methods/gpgv.cc:174
-#, fuzzy
-msgid "Could not execute 'gpgv' to verify signature (is gpgv installed?)"
-msgstr "無法執行 '%s' 來驗證簽章(gpgv 是否安裝了?)"
+msgid "Could not execute 'apt-key' to verify signature (is gnupg installed?)"
+msgstr "無法執行 'apt-key' 來驗證簽章(gnupg 是否安裝了?)"
#. TRANSLATORS: %s is a single techy word like 'NODATA'
#: methods/gpgv.cc:180
@@ -1011,8 +1010,8 @@ msgid ""
msgstr ""
#: methods/gpgv.cc:184
-msgid "Unknown error executing gpgv"
-msgstr "在執行 gpgv 時發生未知的錯誤"
+msgid "Unknown error executing apt-key"
+msgstr "在執行 apt-key 時發生未知的錯誤"
#: methods/gpgv.cc:217 methods/gpgv.cc:224
msgid "The following signatures were invalid:\n"
@@ -1669,8 +1668,8 @@ msgid "All packages are up to date."
msgstr ""
#: apt-private/private-upgrade.cc:25
-msgid "Calculating upgrade... "
-msgstr "籌備升級中... "
+msgid "Calculating upgrade"
+msgstr "籌備升級中"
#: apt-private/private-upgrade.cc:28
msgid "Done"
diff --git a/test/integration/framework b/test/integration/framework
index 7923e23d9..a8d6bf3d0 100644
--- a/test/integration/framework
+++ b/test/integration/framework
@@ -43,10 +43,16 @@ msgtest() {
printf "…${CNORMAL} "
}
msgpass() { printf "${CPASS}PASS${CNORMAL}\n"; }
-msgskip() { printf "${CWARNING}SKIP${CNORMAL}\n" >&2; }
+msgskip() {
+ if [ $# -gt 0 ]; then printf "${CWARNING}SKIP: $*${CNORMAL}\n" >&2;
+ else printf "${CWARNING}SKIP${CNORMAL}\n" >&2; fi
+}
msgfail() {
if [ $# -gt 0 ]; then printf "${CFAIL}FAIL: $*${CNORMAL}\n" >&2;
else printf "${CFAIL}FAIL${CNORMAL}\n" >&2; fi
+ if [ -n "$APT_DEBUG_TESTS" ]; then
+ bash
+ fi
EXIT_CODE=$((EXIT_CODE+1));
}
@@ -102,10 +108,10 @@ runapt() {
local CMD="$1"
shift
case $CMD in
- sh|aptitude|*/*) ;;
+ sh|aptitude|*/*|command) ;;
*) CMD="${BUILDDIRECTORY}/$CMD";;
esac
- MALLOC_PERTURB_=21 MALLOC_CHECK_=2 APT_CONFIG="$(getaptconfig)" LD_LIBRARY_PATH=${BUILDDIRECTORY} $CMD "$@"
+ MALLOC_PERTURB_=21 MALLOC_CHECK_=2 APT_CONFIG="$(getaptconfig)" LD_LIBRARY_PATH=${LIBRARYPATH} $CMD "$@"
}
aptconfig() { runapt apt-config "$@"; }
aptcache() { runapt apt-cache "$@"; }
@@ -129,18 +135,9 @@ dpkgcheckbuilddeps() {
command dpkg-checkbuilddeps --admindir=${TMPWORKINGDIRECTORY}/rootdir/var/lib/dpkg "$@"
}
gdb() {
- echo "gdb: run »$*«"
- CMD="$1"
+ local CMD="$1"
shift
-
- APT_CONFIG=aptconfig.conf LD_LIBRARY_PATH=${LIBRARYPATH} command gdb ${BUILDDIRECTORY}/$CMD --args ${BUILDDIRECTORY}/$CMD "$@"
-}
-gpg() {
- # see apt-key for the whole trickery. Setup is done in setupenvironment
- command gpg --ignore-time-conflict --no-options --no-default-keyring \
- --homedir "${TMPWORKINGDIRECTORY}/gnupghome" \
- --no-auto-check-trustdb --trust-model always \
- "$@"
+ runapt command gdb --quiet -ex run "${BUILDDIRECTORY}/$CMD" --args "${BUILDDIRECTORY}/$CMD" "$@"
}
exitwithstatus() {
@@ -173,9 +170,16 @@ addtrap() {
setupenvironment() {
TMPWORKINGDIRECTORY=$(mktemp -d)
- TESTDIRECTORY=$(readlink -f $(dirname $0))
+ addtrap "cd /; rm -rf $TMPWORKINGDIRECTORY;"
msgninfo "Preparing environment for ${CCMD}$(basename $0)${CINFO} in ${TMPWORKINGDIRECTORY}… "
+ if [ "$(id -u)" = '0' ]; then
+ # relax permissions so that running as root with user switching works
+ umask 022
+ chmod o+rx "$TMPWORKINGDIRECTORY"
+ fi
+
+ TESTDIRECTORY=$(readlink -f $(dirname $0))
# allow overriding the default BUILDDIR location
BUILDDIRECTORY=${APT_INTEGRATION_TESTS_BUILD_DIR:-"${TESTDIRECTORY}/../../build/bin"}
LIBRARYPATH=${APT_INTEGRATION_TESTS_LIBRARY_PATH:-"${BUILDDIRECTORY}"}
@@ -186,12 +190,11 @@ setupenvironment() {
test -x "${BUILDDIRECTORY}/apt-get" || msgdie "You need to build tree first"
# -----
- addtrap "cd /; rm -rf $TMPWORKINGDIRECTORY;"
cd $TMPWORKINGDIRECTORY
mkdir rootdir aptarchive keys
cd rootdir
mkdir -p etc/apt/apt.conf.d etc/apt/sources.list.d etc/apt/trusted.gpg.d etc/apt/preferences.d
- mkdir -p var/cache var/lib/apt var/log tmp
+ mkdir -p usr/bin var/cache var/lib/apt var/log tmp
mkdir -p var/lib/dpkg/info var/lib/dpkg/updates var/lib/dpkg/triggers
touch var/lib/dpkg/available
mkdir -p usr/lib/apt
@@ -219,12 +222,23 @@ setupenvironment() {
cp "${TESTDIRECTORY}/${SOURCESSFILE}" aptarchive/Sources
fi
cp $(find $TESTDIRECTORY -name '*.pub' -o -name '*.sec') keys/
+ chmod 644 $(find keys -name '*.pub' -o -name '*.sec')
ln -s ${TMPWORKINGDIRECTORY}/keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+
echo "Dir \"${TMPWORKINGDIRECTORY}/rootdir\";" > aptconfig.conf
echo "Dir::state::status \"${TMPWORKINGDIRECTORY}/rootdir/var/lib/dpkg/status\";" >> aptconfig.conf
echo "Debug::NoLocking \"true\";" >> aptconfig.conf
echo "APT::Get::Show-User-Simulation-Note \"false\";" >> aptconfig.conf
echo "Dir::Bin::Methods \"${METHODSDIR}\";" >> aptconfig.conf
+ # store apt-key were we can access it, even if we run it as a different user
+ # destroys coverage reporting though, so just do it for root for now
+ if [ "$(id -u)" = '0' ]; then
+ cp "${BUILDDIRECTORY}/apt-key" "${TMPWORKINGDIRECTORY}/rootdir/usr/bin/"
+ chmod o+rx "${TMPWORKINGDIRECTORY}/rootdir/usr/bin/apt-key"
+ echo "Dir::Bin::apt-key \"${TMPWORKINGDIRECTORY}/rootdir/usr/bin/apt-key\";" >> aptconfig.conf
+ else
+ echo "Dir::Bin::apt-key \"${BUILDDIRECTORY}/apt-key\";" >> aptconfig.conf
+ fi
echo "Dir::Bin::dpkg \"fakeroot\";" >> aptconfig.conf
echo "DPKG::options:: \"dpkg\";" >> aptconfig.conf
echo "DPKG::options:: \"--root=${TMPWORKINGDIRECTORY}/rootdir\";" >> aptconfig.conf
@@ -235,25 +249,28 @@ setupenvironment() {
fi
echo "DPKG::options:: \"--log=${TMPWORKINGDIRECTORY}/rootdir/var/log/dpkg.log\";" >> aptconfig.conf
echo 'quiet::NoUpdate "true";' >> aptconfig.conf
- echo "Acquire::https::CaInfo \"${TESTDIR}/apt.pem\";" > rootdir/etc/apt/apt.conf.d/99https
- echo "Apt::Cmd::Disable-Script-Warning \"1\";" > rootdir/etc/apt/apt.conf.d/apt-binary
+ echo 'quiet::NoStatistic "true";' >> aptconfig.conf
+ # too distracting for users, but helpful to detect changes
+ echo 'Acquire::Progress::Ignore::ShowErrorText "true";' >> aptconfig.conf
+ # in testcases, it can appear as if localhost has a rotation setup,
+ # hide this as we can't really deal with it properly
+ echo 'Acquire::Failure::ShowIP "false";' >> aptconfig.conf
+
+ cp "${TESTDIRECTORY}/apt.pem" "${TMPWORKINGDIRECTORY}/rootdir/etc/webserver.pem"
+ if [ "$(id -u)" = '0' ]; then
+ chown _apt:root "${TMPWORKINGDIRECTORY}/rootdir/etc/webserver.pem"
+ fi
+ echo "Acquire::https::CaInfo \"${TMPWORKINGDIRECTORY}/rootdir/etc/webserver.pem\";" > rootdir/etc/apt/apt.conf.d/99https
+ echo "Apt::Cmd::Disable-Script-Warning \"1\";" > rootdir/etc/apt/apt.conf.d/apt-binary
configcompression '.' 'gz' #'bz2' 'lzma' 'xz'
- # gpg needs a trustdb to function, but it can't be invalid (not even empty)
- # see also apt-key where this trickery comes from:
- local TRUSTDBDIR="${TMPWORKINGDIRECTORY}/gnupghome"
- mkdir "$TRUSTDBDIR"
- chmod 700 "$TRUSTDBDIR"
- # We also don't use a secret keyring, of course, but gpg panics and
- # implodes if there isn't one available - and writeable for imports
- local SECRETKEYRING="${TRUSTDBDIR}/secring.gpg"
- touch $SECRETKEYRING
- # now create the trustdb with an (empty) dummy keyring
- # newer gpg versions are fine without it, but play it safe for now
- gpg --quiet --check-trustdb --secret-keyring $SECRETKEYRING --keyring $SECRETKEYRING >/dev/null 2>&1
+ # Acquire::AllowInsecureRepositories=false is not yet the default
+ # but we want it to be the default soon
+ configallowinsecurerepositories "false";
# cleanup the environment a bit
- export PATH="${PATH}:/usr/local/sbin:/usr/sbin:/sbin"
+ # prefer our apt binaries over the system apt binaries
+ export PATH="${BUILDDIRECTORY}:${PATH}:/usr/local/sbin:/usr/sbin:/sbin"
export LC_ALL=C.UTF-8
unset LANGUAGE APT_CONFIG
unset GREP_OPTIONS DEB_BUILD_PROFILES
@@ -325,6 +342,11 @@ configdpkg() {
fi
}
+configallowinsecurerepositories() {
+ echo "Acquire::AllowInsecureRepositories \"$1\";" > rootdir/etc/apt/apt.conf.d/allow-insecure-repositories.conf
+
+}
+
configcompression() {
while [ -n "$1" ]; do
case "$1" in
@@ -478,8 +500,8 @@ Package: $NAME" >> ${BUILDDIR}/debian/control
| while read SRC; do
echo "pool/${SRC}" >> ${BUILDDIR}/../${RELEASE}.${DISTSECTION}.srclist
# if expr match "${SRC}" '.*\.dsc' >/dev/null 2>&1; then
-# gpg --yes --secret-keyring ./keys/joesixpack.sec \
-# --keyring ./keys/joesixpack.pub --default-key 'Joe Sixpack' \
+# aptkey --keyring ./keys/joesixpack.pub --secret-keyring ./keys/joesixpack.sec --quiet --readonly \
+# adv --yes --default-key 'Joe Sixpack' \
# --clearsign -o "${BUILDDIR}/../${SRC}.sign" "${BUILDDIR}/../$SRC"
# mv "${BUILDDIR}/../${SRC}.sign" "${BUILDDIR}/../$SRC"
# fi
@@ -733,7 +755,7 @@ buildaptarchivefromincoming() {
aptftparchive -qq generate ftparchive.conf
cd - > /dev/null
msgdone "info"
- generatereleasefiles
+ generatereleasefiles "$@"
}
buildaptarchivefromfiles() {
@@ -848,24 +870,29 @@ setupflataptarchive() {
}
setupaptarchive() {
- buildaptarchive
+ local NOUPDATE=0
+ if [ "$1" = '--no-update' ]; then
+ NOUPDATE=1
+ shift
+ fi
+ buildaptarchive "$@"
if [ -e aptarchive/dists ]; then
setupdistsaptarchive
else
setupflataptarchive
fi
- signreleasefiles
- if [ "$1" != '--no-update' ]; then
- msgninfo "\tSync APT's cache with the archive… "
- aptget update -qq
- msgdone "info"
+ signreleasefiles 'Joe Sixpack'
+ if [ "1" != "$NOUPDATE" ]; then
+ testsuccess aptget update -o Debug::pkgAcquire::Worker=true -o Debug::Acquire::gpgv=true
fi
}
signreleasefiles() {
local SIGNER="${1:-Joe Sixpack}"
- local GPG="gpg --batch --yes"
- msgninfo "\tSign archive with $SIGNER key… "
+ local REPODIR="${2:-aptarchive}"
+ local KEY="keys/$(echo "$SIGNER" | tr 'A-Z' 'a-z' | sed 's# ##g')"
+ local GPG="aptkey --quiet --keyring ${KEY}.pub --secret-keyring ${KEY}.sec --readonly adv --batch --yes"
+ msgninfo "\tSign archive with $SIGNER key $KEY… "
local REXKEY='keys/rexexpired'
local SECEXPIREBAK="${REXKEY}.sec.bak"
local PUBEXPIREBAK="${REXKEY}.pub.bak"
@@ -881,18 +908,15 @@ signreleasefiles() {
cp $SECUNEXPIRED ${REXKEY}.sec
cp $PUBUNEXPIRED ${REXKEY}.pub
else
- printf "expire\n1w\nsave\n" | $GPG --keyring ${REXKEY}.pub --secret-keyring ${REXKEY}.sec --command-fd 0 --edit-key "${SIGNER}" >/dev/null 2>&1 || true
+ if ! printf "expire\n1w\nsave\n" | $GPG --default-key "$SIGNER" --command-fd 0 --edit-key "${SIGNER}" >setexpire.gpg 2>&1; then
+ cat setexpire.gpg
+ exit 1
+ fi
cp ${REXKEY}.sec $SECUNEXPIRED
cp ${REXKEY}.pub $PUBUNEXPIRED
fi
fi
- for KEY in $(find keys/ -name '*.sec'); do
- GPG="$GPG --secret-keyring $KEY"
- done
- for KEY in $(find keys/ -name '*.pub'); do
- GPG="$GPG --keyring $KEY"
- done
- for RELEASE in $(find aptarchive/ -name Release); do
+ for RELEASE in $(find ${REPODIR}/ -name Release); do
$GPG --default-key "$SIGNER" --armor --detach-sign --sign --output ${RELEASE}.gpg ${RELEASE}
local INRELEASE="$(echo "${RELEASE}" | sed 's#/Release$#/InRelease#')"
$GPG --default-key "$SIGNER" --clearsign --output $INRELEASE $RELEASE
@@ -974,7 +998,7 @@ changetohttpswebserver() {
changetowebserver --no-rewrite "$@"
fi
echo "pid = ${TMPWORKINGDIRECTORY}/aptarchive/stunnel.pid
-cert = ${TESTDIRECTORY}/apt.pem
+cert = ${TMPWORKINGDIRECTORY}/rootdir/etc/webserver.pem
output = /dev/null
[https]
@@ -1174,9 +1198,10 @@ testsuccess() {
if $@ >${OUTPUT} 2>&1; then
msgpass
else
+ local EXITCODE=$?
echo >&2
cat >&2 $OUTPUT
- msgfail
+ msgfail "exitcode $EXITCODE"
fi
}
@@ -1188,14 +1213,28 @@ testfailure() {
fi
local OUTPUT="${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output"
if $@ >${OUTPUT} 2>&1; then
+ local EXITCODE=$?
echo >&2
cat >&2 $OUTPUT
- msgfail
+ msgfail "exitcode $EXITCODE"
else
msgpass
fi
}
+testaccessrights() {
+ msgtest "Test that file $1 has access rights set to" "$2"
+ if [ "$2" = "$(stat --format '%a' "$1")" ]; then
+ msgpass
+ else
+ echo >&2
+ ls -l >&2 "$1"
+ echo -n >&2 "stat(1) reports access rights: "
+ stat --format '%a' "$1"
+ msgfail
+ fi
+}
+
testwebserverlaststatuscode() {
local DOWNLOG='rootdir/tmp/webserverstatus-testfile.log'
local STATUS='rootdir/tmp/webserverstatus-statusfile.log'
diff --git a/test/integration/skip-aptwebserver b/test/integration/skip-aptwebserver
new file mode 100755
index 000000000..0622941ce
--- /dev/null
+++ b/test/integration/skip-aptwebserver
@@ -0,0 +1,25 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture 'amd64'
+
+buildsimplenativepackage 'apt' 'all' '1.0' 'stable'
+
+setupaptarchive
+changetowebserver
+
+rm -rf rootdir/var/lib/apt/lists
+aptget update -qq
+testequal 'Hit http://localhost stable InRelease
+Hit http://localhost stable/main Sources
+Hit http://localhost stable/main amd64 Packages
+Hit http://localhost stable/main Translation-en
+Reading package lists...' aptget update
+
+mv rootdir/var/lib/apt/lists/localhost* rootdir/var/lib/apt/lists/partial
+aptget update
+
diff --git a/test/integration/test-allow-scores-for-all-dependency-types b/test/integration/test-allow-scores-for-all-dependency-types
index a5c98f3d6..d1bcf1130 100755
--- a/test/integration/test-allow-scores-for-all-dependency-types
+++ b/test/integration/test-allow-scores-for-all-dependency-types
@@ -39,6 +39,7 @@ insertinstalledpackage 'libdb-dev' 'amd64' '5.1.7' 'Depends: libdb5.1-dev'
insertinstalledpackage 'libdb5.1-dev' 'amd64' '5.1.29-7'
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
libdb5.1-dev
The following NEW packages will be installed:
@@ -53,6 +54,7 @@ Conf libdb5.3-dev (5.3.28-3 unversioned [amd64])
Conf libdb-dev (5.3.0 unversioned [amd64])' aptget dist-upgrade -st unversioned
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
libdb5.1-dev
The following NEW packages will be installed:
@@ -71,21 +73,25 @@ insertinstalledpackage 'foo' 'amd64' '1'
insertinstalledpackage 'bar' 'amd64' '1'
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages have been kept back:
bar foo
0 upgraded, 0 newly installed, 0 to remove and 2 not upgraded.' aptget dist-upgrade -st unversioned
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages have been kept back:
bar foo
0 upgraded, 0 newly installed, 0 to remove and 2 not upgraded.' aptget dist-upgrade -st versioned
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages have been kept back:
bar foo
0 upgraded, 0 newly installed, 0 to remove and 2 not upgraded.' aptget dist-upgrade -st multipleno
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
foo
The following packages will be upgraded:
diff --git a/test/integration/test-apt-by-hash-update b/test/integration/test-apt-by-hash-update
new file mode 100755
index 000000000..6e1ecdaff
--- /dev/null
+++ b/test/integration/test-apt-by-hash-update
@@ -0,0 +1,49 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+insertpackage 'unstable' 'foo' 'all' '1.0'
+
+setupaptarchive --no-update
+
+APTARCHIVE=$(readlink -f ./aptarchive)
+
+# make Packages *only* accessable by-hash for this test
+mkdir -p aptarchive/dists/unstable/main/binary-i386/by-hash/SHA512
+(cd aptarchive/dists/unstable/main/binary-i386/by-hash/SHA512 &&
+ mv ../../Packages* . &&
+ ln -s Packages.gz $(sha512sum Packages.gz|cut -f1 -d' ') )
+
+# add sources
+mkdir -p aptarchive/dists/unstable/main/source/by-hash/SHA512
+(cd aptarchive/dists/unstable/main/source/by-hash/SHA512 &&
+ ln -s ../../Sources.gz $(sha512sum ../../Sources.gz|cut -f1 -d' ')
+)
+
+# we moved the Packages file away, normal update won't work
+testfailure aptget upate
+
+# ensure we do not know about "foo"
+testequal "Reading package lists...
+Building dependency tree...
+E: Unable to locate package foo" aptget install -q -s foo
+
+# ensure we can apt-get update by hash
+testsuccess aptget update -o APT::Acquire::By-Hash=1
+
+# ensure it works
+testequal "Inst foo (1.0 unstable [all])
+Conf foo (1.0 unstable [all])" aptget install -qq -s foo
+
+# add magic string to Release file ...
+MAGIC="Acquire-By-Hash: true"
+sed -i "s#Suite: unstable#Suite: unstable\n$MAGIC#" aptarchive/dists/unstable/Release
+signreleasefiles
+# ... and verify that it fetches by hash now
+testsuccess aptget update
+
diff --git a/test/integration/test-apt-cdrom b/test/integration/test-apt-cdrom
index 44eccb7bf..2220a290c 100755
--- a/test/integration/test-apt-cdrom
+++ b/test/integration/test-apt-cdrom
@@ -29,7 +29,7 @@ aptcdromlog() {
test ! -e rootdir/media/cdrom || echo "CD-ROM is mounted, but shouldn't be!"
test -e rootdir/media/cdrom-unmounted || echo "Unmounted CD-ROM doesn't exist, but it should!"
aptcdrom "$@" -o quiet=1 >rootdir/tmp/apt-cdrom.log 2>&1 </dev/null
- sed -e '/gpgv/ d' -e '/^Identifying/ d' -e '/Reading / d' rootdir/tmp/apt-cdrom.log
+ sed -e '/gpgv\?: Signature made/ d' -e '/gpgv\?: Good signature/ d' -e '/^Identifying/ d' -e '/Reading / d' rootdir/tmp/apt-cdrom.log
test ! -e rootdir/media/cdrom || echo "CD-ROM is mounted, but shouldn't be!"
test -e rootdir/media/cdrom-unmounted || echo "Unmounted CD-ROM doesn't exist, but it should!"
}
diff --git a/test/integration/test-apt-ftparchive-src-cachedb b/test/integration/test-apt-ftparchive-src-cachedb
index adcca6217..e7b148530 100755
--- a/test/integration/test-apt-ftparchive-src-cachedb
+++ b/test/integration/test-apt-ftparchive-src-cachedb
@@ -177,6 +177,11 @@ assert_correct_sources_file
mkdir aptarchive/pool/invalid
printf "meep" > aptarchive/pool/invalid/invalid_1.0.dsc
testequal "
+E: Could not find a record in the DSC 'aptarchive/pool/invalid/invalid_1.0.dsc'" aptftparchive sources aptarchive/pool/invalid
+rm -f aptarchive/pool/invalid/invalid_1.0.dsc
+
+printf "meep: yes" > aptarchive/pool/invalid/invalid_1.0.dsc
+testequal "
E: Could not find a Source entry in the DSC 'aptarchive/pool/invalid/invalid_1.0.dsc'" aptftparchive sources aptarchive/pool/invalid
rm -f aptarchive/pool/invalid/invalid_1.0.dsc
diff --git a/test/integration/test-apt-get-build-dep b/test/integration/test-apt-get-build-dep
new file mode 100755
index 000000000..87ec6e54d
--- /dev/null
+++ b/test/integration/test-apt-get-build-dep
@@ -0,0 +1,129 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+buildsimplenativepackage 'debhelper' 'i386' '7' 'stable'
+buildsimplenativepackage 'build-essential' 'i386' '1' 'stable'
+
+setupaptarchive
+cat > 2vcard_0.5-3.dsc <<EOF
+Format: 1.0
+Source: 2vcard
+Binary: 2vcard
+Architecture: all
+Version: 0.5-3
+Maintainer: Martin Albisetti <argentina@gmail.com>
+Uploaders: Marcela Tiznado <mlt@debian.org>
+Standards-Version: 3.8.0
+Build-Depends: debhelper (>= 5.0.37)
+Checksums-Sha1:
+ b7f1ce31ec856414a3f0f1090689f91aa7456d56 9398 2vcard_0.5.orig.tar.gz
+ 5f9acd07ebda6ab00fa6b4fe3198c13e94090862 2036 2vcard_0.5-3.diff.gz
+Checksums-Sha256:
+ efdc22859ac2f8f030d038dc4faa9020082ebae34212498c288968ffd45c9764 9398 2vcard_0.5.orig.tar.gz
+ 82673ff3456af571094066c89bcea87b25c23c87cf1d0050b731e5222563626b 2036 2vcard_0.5-3.diff.gz
+Files:
+ f73a69c170f772f3f6e75f2d11bbb792 9398 2vcard_0.5.orig.tar.gz
+ 1e806d32233af87437258d86b1561f57 2036 2vcard_0.5-3.diff.gz
+EOF
+
+testequal "Reading package lists...
+Building dependency tree...
+Note, using file '2vcard_0.5-3.dsc' to get the build dependencies
+The following NEW packages will be installed:
+ build-essential debhelper
+0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded.
+Inst build-essential (1 stable [i386])
+Inst debhelper (7 stable [i386])
+Conf build-essential (1 stable [i386])
+Conf debhelper (7 stable [i386])" aptget build-dep -s 2vcard_0.5-3.dsc
+
+cat > 2vcard_0.5-3.dsc <<EOF
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA1
+
+Format: 1.0
+Source: 2vcard
+Binary: 2vcard
+Architecture: all
+Version: 0.5-3
+Maintainer: Martin Albisetti <argentina@gmail.com>
+Uploaders: Marcela Tiznado <mlt@debian.org>
+Standards-Version: 3.8.0
+Build-Depends: debhelper (>= 5.0.37)
+Checksums-Sha1:
+ b7f1ce31ec856414a3f0f1090689f91aa7456d56 9398 2vcard_0.5.orig.tar.gz
+ 5f9acd07ebda6ab00fa6b4fe3198c13e94090862 2036 2vcard_0.5-3.diff.gz
+Checksums-Sha256:
+ efdc22859ac2f8f030d038dc4faa9020082ebae34212498c288968ffd45c9764 9398 2vcard_0.5.orig.tar.gz
+ 82673ff3456af571094066c89bcea87b25c23c87cf1d0050b731e5222563626b 2036 2vcard_0.5-3.diff.gz
+Files:
+ f73a69c170f772f3f6e75f2d11bbb792 9398 2vcard_0.5.orig.tar.gz
+ 1e806d32233af87437258d86b1561f57 2036 2vcard_0.5-3.diff.gz
+
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1.4.9 (GNU/Linux)
+
+iEYEARECAAYFAkijKhsACgkQsrBfRdYmq7aA2gCfaOW9riTYVQMx5ajKQVAcctlC
+z2UAn1oXgTai6opwhVfkxrlmJ+iRxzuc
+=4eRd
+-----END PGP SIGNATURE-----
+EOF
+
+testequal "Reading package lists...
+Building dependency tree...
+Note, using file '2vcard_0.5-3.dsc' to get the build dependencies
+The following NEW packages will be installed:
+ build-essential debhelper
+0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded.
+Inst build-essential (1 stable [i386])
+Inst debhelper (7 stable [i386])
+Conf build-essential (1 stable [i386])
+Conf debhelper (7 stable [i386])" aptget build-dep --simulate 2vcard_0.5-3.dsc
+
+
+# unpacked source dir
+mkdir -p foo-1.0/debian
+cat > foo-1.0/debian/control <<'EOF'
+Source: apturl
+Section: admin
+Priority: optional
+Maintainer: Michael Vogt <mvo@ubuntu.com>
+Build-Depends: debhelper (>= 7)
+X-Python3-Version: >= 3.2
+Standards-Version: 3.9.3
+
+Package: apturl-common
+Architecture: any
+Depends: ${python3:Depends},
+ ${shlibs:Depends},
+ ${misc:Depends},
+ python3-apt,
+ python3-update-manager
+Replaces: apturl (<< 0.3.6ubuntu2)
+Description: install packages using the apt protocol - common data
+ AptUrl is a simple graphical application that takes an URL (which follows the
+ apt-protocol) as a command line option, parses it and carries out the
+ operations that the URL describes (that is, it asks the user if he wants the
+ indicated packages to be installed and if the answer is positive does so for
+ him).
+ .
+ This package contains the common data shared between the frontends.
+
+EOF
+
+testequal "Reading package lists...
+Building dependency tree...
+Note, using directory './foo-1.0' to get the build dependencies
+The following NEW packages will be installed:
+ build-essential debhelper
+0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded.
+Inst build-essential (1 stable [i386])
+Inst debhelper (7 stable [i386])
+Conf build-essential (1 stable [i386])
+Conf debhelper (7 stable [i386])" aptget build-dep --simulate ./foo-1.0
diff --git a/test/integration/test-apt-get-changelog b/test/integration/test-apt-get-changelog
index 4ee113482..76a32a122 100755
--- a/test/integration/test-apt-get-changelog
+++ b/test/integration/test-apt-get-changelog
@@ -13,6 +13,12 @@ setupaptarchive --no-update
changetowebserver
testsuccess aptget update
+# simulate normal user with non-existent root-owned directories
+rm -rf rootdir/var/cache/apt/archives/
+mkdir rootdir/var/cache/apt/archives/
+addtrap 'prefix' "chmod -f -R +w $PWD/rootdir/var/cache/apt/archives || true;"
+chmod -R -w rootdir/var/cache/apt/archives
+
echo 'Apt::Changelogs::Server "http://localhost:8080/";' > rootdir/etc/apt/apt.conf.d/changelog.conf
testequal "'http://localhost:8080//pool/apt_1.0/changelog'" aptget changelog apt --print-uris
@@ -20,19 +26,18 @@ testequal "'http://localhost:8080//pool/apt_1.0/changelog'" aptget changelog apt
testequal "'http://localhost:8080//pool/apt_1.0/changelog'
'http://localhost:8080//pool/apt_1.0/changelog'" aptget changelog apt apt --print-uris
-aptget changelog apt -qq > apt.changelog
-testfileequal 'apt.changelog' "$(cat aptarchive/pool/apt_1.0/changelog)"
-rm apt.changelog
+testsuccess aptget changelog apt -qq
+testfileequal 'rootdir/tmp/testsuccess.output' "$(cat aptarchive/pool/apt_1.0/changelog)"
testsuccess aptget changelog apt -d
testfileequal 'apt.changelog' "$(cat aptarchive/pool/apt_1.0/changelog)"
-rm apt.changelog aptarchive/pool/apt_1.0/changelog
+rm -f apt.changelog aptarchive/pool/apt_1.0/changelog
testequal "$(cat aptarchive/pool/apt_1.0.changelog)" aptget changelog apt \
-qq -o APT::Changelogs::Server='http://not-on-the-main-server:8080/'
testsuccess aptget changelog apt -d
testfileequal 'apt.changelog' "$(cat aptarchive/pool/apt_1.0.changelog)"
-rm apt.changelog aptarchive/pool/apt_1.0.changelog
+rm -f apt.changelog aptarchive/pool/apt_1.0.changelog
testequal 'E: changelog download failed' aptget changelog apt -qq -o APT::Changelogs::Server='http://not-on-the-main-server:8080/'
diff --git a/test/integration/test-apt-get-clean b/test/integration/test-apt-get-clean
new file mode 100755
index 000000000..98f7c84d0
--- /dev/null
+++ b/test/integration/test-apt-get-clean
@@ -0,0 +1,35 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture 'amd64'
+
+insertpackage 'testing' 'foo' 'all' '1'
+insertpackage 'unstable' 'foo' 'all' '2'
+insertinstalledpackage 'foo' 'all' '3'
+
+setupaptarchive
+
+# nothing to do always works
+testsuccess aptget clean
+
+# generate some dirt and clean it up
+touch rootdir/var/lib/apt/lists/partial/http.debian.net_debian_dists_sid_main_i18n_Translation-en
+mkdir -p rootdir/var/cache/apt/archives
+touch rootdir/var/cache/apt/archives/foo_1_all.deb
+touch rootdir/var/cache/apt/archives/foo_2_all.deb
+touch rootdir/var/cache/apt/archives/foo_3_all.deb
+touch rootdir/var/cache/apt/archives/foo_4_all.deb
+
+testsuccess aptget clean
+
+testsuccess test ! -e rootdir/var/lib/apt/lists/partial/http.debian.net_debian_dists_sid_main_i18n_Translation-en
+testsuccess test ! -e rootdir/var/cache/apt/archives/foo_1_all.deb
+testsuccess test ! -e rootdir/var/cache/apt/archives/foo_2_all.deb
+testsuccess test ! -e rootdir/var/cache/apt/archives/foo_3_all.deb
+testsuccess test ! -e rootdir/var/cache/apt/archives/foo_4_all.deb
+
+
diff --git a/test/integration/test-apt-get-download b/test/integration/test-apt-get-download
index be3144e1f..9a154e5fb 100755
--- a/test/integration/test-apt-get-download
+++ b/test/integration/test-apt-get-download
@@ -11,7 +11,31 @@ buildsimplenativepackage 'apt' 'all' '1.0' 'stable'
buildsimplenativepackage 'apt' 'all' '2.0' 'unstable'
insertinstalledpackage 'vrms' 'all' '1.0'
-setupaptarchive
+OLD_UMASK="$(umask)"
+umask 0027
+setupaptarchive --no-update
+umask "$OLD_UMASK"
+
+# directories should be readable by everyone
+find aptarchive/dists -type d | while read dir; do
+ chmod o+rx "$dir"
+done
+# apt-ftparchive knows how to chmod files
+find aptarchive/dists -name '*Packages*' -type f | while read file; do
+ testaccessrights "$file" '644'
+ chmod 640 "$file"
+done
+# created by the framework without special care
+find aptarchive/dists -name '*Release*' -type f | while read file; do
+ testaccessrights "$file" '640'
+done
+
+testsuccess aptget update
+
+# all copied files are properly chmodded
+find rootdir/var/lib/apt/lists -type f | while read file; do
+ testaccessrights "$file" '644'
+done
testdownload() {
local APT="$2"
@@ -20,10 +44,19 @@ testdownload() {
fi
msgtest "Test download of package file $1 with" "$APT"
testsuccess --nomsg aptget download ${APT}
- testsuccess test -f $1
- rm $1
+ testsuccess test -f "$1"
+ rm -f "$1"
}
+# normal case as "root"
+testdownload apt_2.0_all.deb apt
+
+# simulate normal user with non-existent root-owned directories
+rm -rf rootdir/var/cache/apt/archives/
+mkdir rootdir/var/cache/apt/archives/
+addtrap 'prefix' "chmod -f -R +w $PWD/rootdir/var/cache/apt/archives || true;"
+chmod -R -w rootdir/var/cache/apt/archives
+
# normal case(es)
testdownload apt_1.0_all.deb apt stable
testdownload apt_2.0_all.deb apt
@@ -45,3 +78,18 @@ rm -f apt_1.0_all.deb apt_2.0_all.deb
testsuccess aptget download apt apt apt/unstable apt=2.0
testsuccess test -s apt_2.0_all.deb
+# restore "root" rights
+chmod -f -R +w $PWD/rootdir/var/cache/apt/archives
+rm -rf rootdir/var/cache/apt/archives/
+
+# file: debs aren't copied to archives, so change to http which obviously are
+changetowebserver
+testsuccess aptget update
+
+# test with already stored deb
+testsuccess aptget install -d apt
+testsuccess test -s rootdir/var/cache/apt/archives/apt_2.0_all.deb
+testaccessrights 'aptarchive/pool/apt_2.0_all.deb' '644'
+mv aptarchive/pool/apt_2.0_all.deb aptarchive/pool/apt_2.0_all.deb.gone
+testdownload apt_2.0_all.deb apt
+mv aptarchive/pool/apt_2.0_all.deb.gone aptarchive/pool/apt_2.0_all.deb
diff --git a/test/integration/test-apt-get-install-deb b/test/integration/test-apt-get-install-deb
new file mode 100755
index 000000000..700009da5
--- /dev/null
+++ b/test/integration/test-apt-get-install-deb
@@ -0,0 +1,30 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+# regression test for #754904
+testequal 'E: Unable to locate package /dev/null' aptget install -qq /dev/null
+
+# and ensure we fail for invalid debs
+cat > foo.deb <<EOF
+I'm not a deb, I'm a teapot.
+EOF
+testequal 'E: Sub-process Popen returned an error code (100)
+E: Encountered a section with no Package: header
+E: Problem with MergeLister for ./foo.deb
+E: The package lists or status file could not be parsed or opened.' aptget install -qq ./foo.deb
+
+# fakeroot is currently not found, framwork needs updating
+#
+#buildsimplenativepackage 'foo' 'all' '1.0'
+#
+#testequal 'Selecting previously unselected package foo.
+#(Reading database ... 0 files and directories currently installed.)
+# Preparing to unpack .../incoming/foo_1.0_all.deb ...
+#Unpacking foo (1.0) ...
+#Setting up foo (1.0) ...' aptget install -qq ./incoming/foo_1.0_all.deb
diff --git a/test/integration/test-apt-get-source-authenticated b/test/integration/test-apt-get-source-authenticated
index 2cee13923..d833ddd85 100755
--- a/test/integration/test-apt-get-source-authenticated
+++ b/test/integration/test-apt-get-source-authenticated
@@ -21,7 +21,7 @@ APTARCHIVE=$(readlink -f ./aptarchive)
rm -f $APTARCHIVE/dists/unstable/*Release*
# update without authenticated InRelease file
-testsuccess aptget update
+testsuccess aptget update --allow-insecure-repositories
# this all should fail
testfailure aptget install -y foo
diff --git a/test/integration/test-apt-get-update-unauth-warning b/test/integration/test-apt-get-update-unauth-warning
new file mode 100755
index 000000000..0389415c1
--- /dev/null
+++ b/test/integration/test-apt-get-update-unauth-warning
@@ -0,0 +1,43 @@
+#!/bin/sh
+#
+# ensure we print warnings for unauthenticated repositories
+#
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+# a "normal" package with source and binary
+buildsimplenativepackage 'foo' 'all' '2.0'
+
+setupaptarchive --no-update
+
+APTARCHIVE=$(readlink -f ./aptarchive)
+rm -f $APTARCHIVE/dists/unstable/*Release*
+
+# update without authenticated files leads to warning
+testequal "Ign file: unstable InRelease
+ File not found
+Err file: unstable Release
+ File not found
+W: The repository 'file: unstable Release' does not have a Release file. This is deprecated, please contact the owner of the repository.
+E: Use --allow-insecure-repositories to force the update" aptget update --no-allow-insecure-repositories
+
+# no package foo
+testequal "Listing..." apt list foo
+testequal "partial" ls rootdir/var/lib/apt/lists
+
+# allow override
+testequal "Ign file: unstable InRelease
+ File not found
+Ign file: unstable Release
+ File not found
+Reading package lists...
+W: The repository 'file: unstable Release' does not have a Release file. This is deprecated, please contact the owner of the repository." aptget update --allow-insecure-repositories
+# ensure we can not install the package
+testequal "WARNING: The following packages cannot be authenticated!
+ foo
+E: There are problems and -y was used without --force-yes" aptget install -qq -y foo
diff --git a/test/integration/test-apt-get-upgrade b/test/integration/test-apt-get-upgrade
index 23446299c..5335c243a 100755
--- a/test/integration/test-apt-get-upgrade
+++ b/test/integration/test-apt-get-upgrade
@@ -31,6 +31,7 @@ setupaptarchive
# Test if normal upgrade works as expected
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages have been kept back:
upgrade-with-conflict upgrade-with-new-dep
The following packages will be upgraded:
@@ -42,6 +43,7 @@ Conf upgrade-simple (2.0 unstable [all])' aptget -s upgrade
# Test if apt-get upgrade --with-new-pkgs works
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following NEW packages will be installed:
new-dep
The following packages have been kept back:
@@ -59,6 +61,7 @@ Conf upgrade-with-new-dep (2.0 unstable [all])' aptget -s upgrade --with-new-pkg
# Test if apt-get dist-upgrade works
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
conflicting-dep
The following NEW packages will be installed:
diff --git a/test/integration/test-apt-helper b/test/integration/test-apt-helper
index c749224ca..42c40bb9e 100755
--- a/test/integration/test-apt-helper
+++ b/test/integration/test-apt-helper
@@ -5,30 +5,30 @@ TESTDIR=$(readlink -f $(dirname $0))
. $TESTDIR/framework
setupenvironment
-configarchitecture "i386"
+configarchitecture 'i386'
changetohttpswebserver
test_apt_helper_download() {
- echo "foo" > aptarchive/foo
+ echo 'foo' > aptarchive/foo
- msgtest 'apt-file download-file md5sum'
+ msgtest 'apt-file download-file' 'md5sum'
apthelper -qq download-file http://localhost:8080/foo foo2 MD5Sum:d3b07384d113edec49eaa6238ad5ff00 && msgpass || msgfail
testfileequal foo2 'foo'
- msgtest 'apt-file download-file sha1'
+ msgtest 'apt-file download-file' 'sha1'
apthelper -qq download-file http://localhost:8080/foo foo1 SHA1:f1d2d2f924e986ac86fdf7b36c94bcdf32beec15 && msgpass || msgfail
testfileequal foo1 'foo'
- msgtest 'apt-file download-file sha256'
+ msgtest 'apt-file download-file' 'sha256'
apthelper -qq download-file http://localhost:8080/foo foo3 SHA256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c && msgpass || msgfail
testfileequal foo3 'foo'
- msgtest 'apt-file download-file no-hash'
+ msgtest 'apt-file download-file' 'no-hash'
apthelper -qq download-file http://localhost:8080/foo foo4 && msgpass || msgfail
testfileequal foo4 'foo'
- msgtest 'apt-file download-file wrong hash'
+ msgtest 'apt-file download-file' 'wrong hash'
if ! apthelper -qq download-file http://localhost:8080/foo foo5 MD5Sum:aabbcc 2>&1 2> download.stderr; then
msgpass
else
diff --git a/test/integration/test-apt-key b/test/integration/test-apt-key
index 68b3f9710..e6ac530a6 100755
--- a/test/integration/test-apt-key
+++ b/test/integration/test-apt-key
@@ -7,101 +7,177 @@ TESTDIR=$(readlink -f $(dirname $0))
setupenvironment
configarchitecture 'amd64'
-msgtest 'Check that paths in list output are not' 'double-slashed'
-aptkey list 2>&1 | grep -q '//' && msgfail || msgpass
+# start from a clean plate again
+cleanplate() {
+ rm -rf rootdir/etc/apt/trusted.gpg.d/ rootdir/etc/apt/trusted.gpg
+ mkdir rootdir/etc/apt/trusted.gpg.d/
+}
-msgtest 'Check that paths in finger output are not' 'double-slashed'
-aptkey finger 2>&1 | grep -q '//' && msgfail || msgpass
+testaptkeys() {
+ if ! aptkey list | grep '^pub' > aptkey.list; then
+ echo -n > aptkey.list
+ fi
+ testequal "$1" cat ./aptkey.list
+}
echo 'APT::Key::ArchiveKeyring "./keys/joesixpack.pub";
APT::Key::RemovedKeys "./keys/rexexpired.pub";' > rootdir/etc/apt/apt.conf.d/aptkey.conf
-aptkey list | grep '^pub' > aptkey.list
-testfileequal ./aptkey.list 'pub 2048R/DBAC8DAE 2010-08-18'
+testrun() {
+ cleanplate
+ ln -sf ${TMPWORKINGDIRECTORY}/keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
-testequal 'gpg: key DBAC8DAE: "Joe Sixpack (APT Testcases Dummy) <joe@example.org>" not changed
+ msgtest 'Check that paths in list output are not' 'double-slashed'
+ aptkey list 2>&1 | grep -q '//' && msgfail || msgpass
+
+ msgtest 'Check that paths in finger output are not' 'double-slashed'
+ aptkey finger 2>&1 | grep -q '//' && msgfail || msgpass
+
+ testaptkeys 'pub 2048R/DBAC8DAE 2010-08-18'
+
+ testequal 'gpg: key DBAC8DAE: "Joe Sixpack (APT Testcases Dummy) <joe@example.org>" not changed
gpg: Total number processed: 1
gpg: unchanged: 1' aptkey --fakeroot update
-aptkey list | grep '^pub' > aptkey.list
-testfileequal ./aptkey.list 'pub 2048R/DBAC8DAE 2010-08-18'
+ testaptkeys 'pub 2048R/DBAC8DAE 2010-08-18'
-testsuccess aptkey --fakeroot add ./keys/rexexpired.pub
+ testsuccess test ! -e rootdir/etc/apt/trusted.gpg
+ testsuccess aptkey --fakeroot add ./keys/rexexpired.pub
+ msgtest 'Check if trusted.gpg is created with permissions set to' '0644'
+ if [ "$(stat -c '%a' rootdir/etc/apt/trusted.gpg )" = '644' ]; then
+ msgpass
+ else
+ msgfail
+ fi
-aptkey list | grep '^pub' > aptkey.list
-testfileequal ./aptkey.list 'pub 2048R/27CE74F9 2013-07-12 [expired: 2013-07-13]
+ testaptkeys 'pub 2048R/27CE74F9 2013-07-12 [expired: 2013-07-13]
pub 2048R/DBAC8DAE 2010-08-18'
-msgtest 'Execute update again to trigger removal of' 'Rex Expired key'
-testsuccess --nomsg aptkey --fakeroot update
-
-aptkey list | grep '^pub' > aptkey.list
-testfileequal ./aptkey.list 'pub 2048R/DBAC8DAE 2010-08-18'
-
-msgtest "Try to remove a key which exists, but isn't in the" 'forced keyring'
-testsuccess --nomsg aptkey --fakeroot --keyring rootdir/etc/apt/trusted.gpg del DBAC8DAE
+ msgtest 'Check that Sixpack key can be' 'exported'
+ aptkey export 'Sixpack' > aptkey.export
+ aptkey --keyring rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg exportall > aptkey.exportall
+ testsuccess --nomsg cmp aptkey.export aptkey.exportall
+ testsuccess test -s aptkey.export
+ testsuccess test -s aptkey.exportall
+
+ msgtest 'Execute update again to trigger removal of' 'Rex Expired key'
+ testsuccess --nomsg aptkey --fakeroot update
+
+ testaptkeys 'pub 2048R/DBAC8DAE 2010-08-18'
+
+ msgtest "Try to remove a key which exists, but isn't in the" 'forced keyring'
+ testsuccess --nomsg aptkey --fakeroot --keyring rootdir/etc/apt/trusted.gpg del DBAC8DAE
+
+ testaptkeys 'pub 2048R/DBAC8DAE 2010-08-18'
+
+ testsuccess aptkey --fakeroot del DBAC8DAE
+ testempty aptkey list
+
+ msgtest 'Test key removal with' 'single key in real file'
+ cleanplate
+ cp -a keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
+ testempty aptkey list
+ testsuccess test ! -e rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ testsuccess cmp keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg~
+
+ msgtest 'Test key removal with' 'long key ID'
+ cleanplate
+ cp -a keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ testsuccess --nomsg aptkey --fakeroot del 5A90D141DBAC8DAE
+ testempty aptkey list
+ testsuccess test ! -e rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ testsuccess cmp keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg~
+
+ msgtest 'Test key removal with' 'fingerprint'
+ cleanplate
+ cp -a keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ testsuccess --nomsg aptkey --fakeroot del 34A8E9D18DB320F367E8EAA05A90D141DBAC8DAE
+ testempty aptkey list
+ testsuccess test ! -e rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ testsuccess cmp keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg~
+
+ msgtest 'Test key removal with' 'single key in softlink'
+ cleanplate
+ ln -s $(readlink -f ./keys/joesixpack.pub) rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
+ testempty aptkey list
+ testsuccess test ! -e rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ testsuccess test -L rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg~
+
+ cleanplate
+ testsuccess aptkey --fakeroot add ./keys/joesixpack.pub
+ testsuccess aptkey --fakeroot add ./keys/marvinparanoid.pub
+ testaptkeys 'pub 2048R/DBAC8DAE 2010-08-18
+pub 2048R/528144E2 2011-01-16'
+ cp -a rootdir/etc/apt/trusted.gpg keys/testcase-multikey.pub # store for reuse
+
+ msgtest 'Test key removal with' 'multi key in real file'
+ cleanplate
+ cp -a keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg
+ testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
+ testaptkeys 'pub 2048R/528144E2 2011-01-16'
+ testsuccess cmp keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg~
+
+ msgtest 'Test key removal with' 'multi key in softlink'
+ cleanplate
+ ln -s $(readlink -f ./keys/testcase-multikey.pub) rootdir/etc/apt/trusted.gpg.d/multikey.gpg
+ testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
+ testaptkeys 'pub 2048R/528144E2 2011-01-16'
+ testsuccess cmp keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg~
+ testsuccess test ! -L rootdir/etc/apt/trusted.gpg.d/multikey.gpg
+ testsuccess test -L rootdir/etc/apt/trusted.gpg.d/multikey.gpg~
+
+ msgtest 'Test key removal with' 'multiple files including key'
+ cleanplate
+ cp -a keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ cp -a keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg
+ testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
+ testaptkeys 'pub 2048R/528144E2 2011-01-16'
+ testsuccess test ! -e rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ testsuccess cmp keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg~
+ testsuccess cmp keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg~
+
+ cleanplate
+ cp -a keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
+ cp -a keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg
+ testaptkeys 'pub 2048R/DBAC8DAE 2010-08-18
+pub 2048R/DBAC8DAE 2010-08-18
+pub 2048R/528144E2 2011-01-16'
+ msgtest 'Test merge-back of' 'added keys'
+ testsuccess --nomsg aptkey adv --batch --yes --import keys/rexexpired.pub
+ testaptkeys 'pub 2048R/27CE74F9 2013-07-12 [expired: 2013-07-13]
+pub 2048R/DBAC8DAE 2010-08-18
+pub 2048R/DBAC8DAE 2010-08-18
+pub 2048R/528144E2 2011-01-16'
-aptkey list | grep '^pub' > aptkey.list
-testfileequal ./aptkey.list 'pub 2048R/DBAC8DAE 2010-08-18'
+ msgtest 'Test merge-back of' 'removed keys'
+ testsuccess --nomsg aptkey adv --batch --yes --delete-keys 27CE74F9
+ testaptkeys 'pub 2048R/DBAC8DAE 2010-08-18
+pub 2048R/DBAC8DAE 2010-08-18
+pub 2048R/528144E2 2011-01-16'
-testsuccess aptkey --fakeroot del DBAC8DAE
-testempty aptkey list
+ msgtest 'Test merge-back of' 'removed duplicate keys'
+ testsuccess --nomsg aptkey adv --batch --yes --delete-keys DBAC8DAE
+ testaptkeys 'pub 2048R/528144E2 2011-01-16'
+}
-# start from a clean plate again
-cleanplate() {
- rm -rf rootdir/etc/apt/trusted.gpg.d/ rootdir/etc/apt/trusted.gpg
- mkdir rootdir/etc/apt/trusted.gpg.d/
+setupgpgcommand() {
+ echo "APT::Key::GPGCommand \"$1\";" > rootdir/etc/apt/apt.conf.d/00gpgcmd
+ msgtest 'Test that apt-key uses for the following tests command' "$1"
+ aptkey adv --version >aptkey.version 2>&1
+ if grep -q "^Executing: $1 --" aptkey.version; then
+ msgpass
+ else
+ cat aptkey.version
+ msgfail
+ fi
}
-msgtest 'Test key removal with' 'single key in real file'
-cleanplate
-cp -a keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
-testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
-testempty aptkey list
-testsuccess test ! -e rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
-testsuccess cmp keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg~
-
-msgtest 'Test key removal with' 'single key in softlink'
-cleanplate
-ln -s $(readlink -f ./keys/joesixpack.pub) rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
-testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
-testempty aptkey list
-testsuccess test ! -e rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
-testsuccess test -L rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg~
-
-cleanplate
-testsuccess aptkey --fakeroot add ./keys/joesixpack.pub
-testsuccess aptkey --fakeroot add ./keys/marvinparanoid.pub
-aptkey list | grep '^pub' > aptkey.list
-testfileequal ./aptkey.list 'pub 2048R/DBAC8DAE 2010-08-18
-pub 2048R/528144E2 2011-01-16'
-cp -a rootdir/etc/apt/trusted.gpg keys/testcase-multikey.pub # store for reuse
-
-msgtest 'Test key removal with' 'multi key in real file'
-cleanplate
-cp -a keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg
-testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
-aptkey list | grep '^pub' > aptkey.list
-testfileequal ./aptkey.list 'pub 2048R/528144E2 2011-01-16'
-testsuccess cmp keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg~
-
-msgtest 'Test key removal with' 'multi key in softlink'
-cleanplate
-ln -s $(readlink -f ./keys/testcase-multikey.pub) rootdir/etc/apt/trusted.gpg.d/multikey.gpg
-testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
-aptkey list | grep '^pub' > aptkey.list
-testfileequal ./aptkey.list 'pub 2048R/528144E2 2011-01-16'
-testsuccess cmp keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg~
-testsuccess test ! -L rootdir/etc/apt/trusted.gpg.d/multikey.gpg
-testsuccess test -L rootdir/etc/apt/trusted.gpg.d/multikey.gpg~
-
-msgtest 'Test key removal with' 'multiple files including key'
-cleanplate
-cp -a keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
-cp -a keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg
-testsuccess --nomsg aptkey --fakeroot del DBAC8DAE
-aptkey list | grep '^pub' > aptkey.list
-testfileequal ./aptkey.list 'pub 2048R/528144E2 2011-01-16'
-testsuccess test ! -e rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
-testsuccess cmp keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg~
-testsuccess cmp keys/testcase-multikey.pub rootdir/etc/apt/trusted.gpg.d/multikey.gpg~
+# run with default (whatever this is)
+testrun
+# run with …
+setupgpgcommand 'gpg'
+testrun
+setupgpgcommand 'gpg2'
+testrun
diff --git a/test/integration/test-apt-key-net-update b/test/integration/test-apt-key-net-update
index 4b38cd9b5..b3c118555 100755
--- a/test/integration/test-apt-key-net-update
+++ b/test/integration/test-apt-key-net-update
@@ -28,6 +28,9 @@ gpg: key F68C85A3: public key "Test Automatic Archive Signing Key <ftpmaster@exa
gpg: Total number processed: 1
gpg: imported: 1 (RSA: 1)' aptkey --fakeroot net-update
+aptkey list | grep '^pub' > aptkey.list
+testfileequal ./aptkey.list 'pub 1024R/F68C85A3 2013-12-19
+pub 2048R/DBAC8DAE 2010-08-18'
# now try a different one
# setup archive-keyring
@@ -38,6 +41,8 @@ echo 'APT::Key::Net-Update-Enabled "1";' >> ./aptconfig.conf
# test against the "real" webserver
testequal "Checking for new archive signing keys now
-Key 'E8525D47528144E2' not added. It is not signed with a master key" aptkey --fakeroot net-update
-
+Key 'DE66AECA9151AFA1877EC31DE8525D47528144E2' not added. It is not signed with a master key" aptkey --fakeroot net-update
+aptkey list | grep '^pub' > aptkey.list
+testfileequal ./aptkey.list 'pub 1024R/F68C85A3 2013-12-19
+pub 2048R/DBAC8DAE 2010-08-18'
diff --git a/test/integration/test-apt-progress-fd b/test/integration/test-apt-progress-fd
index d72e7e72d..af022f582 100755
--- a/test/integration/test-apt-progress-fd
+++ b/test/integration/test-apt-progress-fd
@@ -16,7 +16,7 @@ setupaptarchive
exec 3> apt-progress.log
testsuccess aptget install testing=0.1 -y -o APT::Status-Fd=3
testequal "dlstatus:1:0:Retrieving file 1 of 1
-dlstatus:1:0:Retrieving file 1 of 1
+dlstatus:1:20:Retrieving file 1 of 1
pmstatus:dpkg-exec:0:Running dpkg
pmstatus:testing:0:Installing testing (amd64)
pmstatus:testing:20:Preparing testing (amd64)
@@ -31,7 +31,7 @@ pmstatus:testing:100:Installed testing (amd64)" cat apt-progress.log
exec 3> apt-progress.log
testsuccess aptget install testing=0.8.15 -y -o APT::Status-Fd=3
testequal "dlstatus:1:0:Retrieving file 1 of 1
-dlstatus:1:0:Retrieving file 1 of 1
+dlstatus:1:20:Retrieving file 1 of 1
pmstatus:dpkg-exec:0:Running dpkg
pmstatus:testing:0:Installing testing (amd64)
pmstatus:testing:20:Preparing testing (amd64)
@@ -46,7 +46,7 @@ pmstatus:testing:100:Installed testing (amd64)" cat apt-progress.log
exec 3> apt-progress.log
testsuccess aptget install testing=0.8.15 --reinstall -y -o APT::Status-Fd=3
testequal "dlstatus:1:0:Retrieving file 1 of 1
-dlstatus:1:0:Retrieving file 1 of 1
+dlstatus:1:20:Retrieving file 1 of 1
pmstatus:dpkg-exec:0:Running dpkg
pmstatus:testing:0:Installing testing (amd64)
pmstatus:testing:20:Preparing testing (amd64)
@@ -72,7 +72,7 @@ testsuccess aptget install testing2:i386 -y -o APT::Status-Fd=3
# and compare
testequal "dlstatus:1:0:Retrieving file 1 of 1
-dlstatus:1:0:Retrieving file 1 of 1
+dlstatus:1:20:Retrieving file 1 of 1
pmstatus:dpkg-exec:0:Running dpkg
pmstatus:testing2:0:Installing testing2 (i386)
pmstatus:testing2:20:Preparing testing2 (i386)
diff --git a/test/integration/test-apt-sources-deb822 b/test/integration/test-apt-sources-deb822
index 5f54b7531..d8b2334ad 100755
--- a/test/integration/test-apt-sources-deb822
+++ b/test/integration/test-apt-sources-deb822
@@ -23,14 +23,14 @@ Description: summay
msgtest 'Test sources.list' 'old style'
echo "deb http://ftp.debian.org/debian stable main" > $SOURCES
-testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 :
-'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 :
+testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0
+'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0
'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris
msgtest 'Test sources.list' 'simple deb822'
echo "$BASE" > $SOURCES
-testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 :
-'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 :
+testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0
+'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0
'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris
@@ -39,29 +39,29 @@ msgtest 'Test deb822 with' 'two entries'
echo "$BASE" > $SOURCES
echo "" >> $SOURCES
echo "$BASE" | sed s/stable/unstable/ >> $SOURCES
-testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 :
-'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 :
+testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0
+'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0
'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0
-'http://ftp.debian.org/debian/dists/unstable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_unstable_main_binary-i386_Packages 0 :
-'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 :
+'http://ftp.debian.org/debian/dists/unstable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_unstable_main_binary-i386_Packages 0
+'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0
'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 " aptget update --print-uris
# two suite entries
msgtest 'Test deb822 with' 'two Suite entries'
echo "$BASE" | sed -e "s/stable/stable unstable/" > $SOURCES
-testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 :
-'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 :
+testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0
+'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0
'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0
-'http://ftp.debian.org/debian/dists/unstable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_unstable_main_binary-i386_Packages 0 :
-'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 :
+'http://ftp.debian.org/debian/dists/unstable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_unstable_main_binary-i386_Packages 0
+'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0
'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 " aptget update --print-uris
msgtest 'Test deb822' 'architecture option'
echo "$BASE" > $SOURCES
echo "Architectures: amd64 armel" >> $SOURCES
-testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-amd64/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-amd64_Packages 0 :
-'http://ftp.debian.org/debian/dists/stable/main/binary-armel/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-armel_Packages 0 :
-'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 :
+testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-amd64/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-amd64_Packages 0
+'http://ftp.debian.org/debian/dists/stable/main/binary-armel/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-armel_Packages 0
+'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0
'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris
@@ -85,19 +85,19 @@ testempty aptget update --print-uris
# multiple URIs
msgtest 'Test deb822 sources.list file which has' 'Multiple URIs work'
echo "$BASE" | sed -e 's#http://ftp.debian.org/debian#http://ftp.debian.org/debian http://ftp.de.debian.org/debian#' > $SOURCES
-testequal --nomsg "'http://ftp.de.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.de.debian.org_debian_dists_stable_main_binary-i386_Packages 0 :
-'http://ftp.de.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.de.debian.org_debian_dists_stable_main_i18n_Translation-en 0 :
+testequal --nomsg "'http://ftp.de.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.de.debian.org_debian_dists_stable_main_binary-i386_Packages 0
+'http://ftp.de.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.de.debian.org_debian_dists_stable_main_i18n_Translation-en 0
'http://ftp.de.debian.org/debian/dists/stable/InRelease' ftp.de.debian.org_debian_dists_stable_InRelease 0
-'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 :
-'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 :
+'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0
+'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0
'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris
# multiple Type in one field
msgtest 'Test deb822 sources.list file which has' 'Multiple Types work'
echo "$BASE" | sed -e 's#Types: deb#Types: deb deb-src#' > $SOURCES
-testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/source/Sources.bz2' ftp.debian.org_debian_dists_stable_main_source_Sources 0 :
-'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 :
-'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 :
+testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/source/Sources.bz2' ftp.debian.org_debian_dists_stable_main_source_Sources 0
+'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0
+'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0
'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris
# a Suite
@@ -107,6 +107,6 @@ Types: deb
URIs: http://emacs.naquadah.org
Suites: stable/
EOF
-testequal --nomsg "'http://emacs.naquadah.org/stable/Packages.bz2' emacs.naquadah.org_stable_Packages 0 :
-'http://emacs.naquadah.org/stable/en.bz2' emacs.naquadah.org_stable_en 0 :
+testequal --nomsg "'http://emacs.naquadah.org/stable/Packages.bz2' emacs.naquadah.org_stable_Packages 0
+'http://emacs.naquadah.org/stable/en.bz2' emacs.naquadah.org_stable_en 0
'http://emacs.naquadah.org/stable/InRelease' emacs.naquadah.org_stable_InRelease 0 " aptget update --print-uris
diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size
new file mode 100755
index 000000000..a039e9e1c
--- /dev/null
+++ b/test/integration/test-apt-update-expected-size
@@ -0,0 +1,44 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+insertpackage 'unstable' 'apt' 'all' '1.0'
+
+setupaptarchive --no-update
+changetowebserver
+
+# normal update works fine
+testsuccess aptget update
+
+# make InRelease really big
+mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good
+dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null
+touch -d '+1hour' aptarchive/dists/unstable/InRelease
+aptget update -o Apt::Get::List-Cleanup=0 -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 > output.log
+msgtest 'Check that the max write warning is triggered'
+if grep -q "Writing more data than expected" output.log; then
+ msgpass
+else
+ cat output.log
+ msgfail
+fi
+# ensure the failed InRelease file got renamed
+testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED
+mv aptarchive/dists/unstable/InRelease.good aptarchive/dists/unstable/InRelease
+
+
+# append junk at the end of the Packages.gz/Packages
+SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)"
+echo "1234567890" >> aptarchive/dists/unstable/main/binary-i386/Packages.gz
+echo "1234567890" >> aptarchive/dists/unstable/main/binary-i386/Packages
+NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)"
+rm -f rootdir/var/lib/apt/lists/localhost*
+testequal "W: Failed to fetch http://localhost:8080/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE)
+
+E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq
+
diff --git a/test/integration/test-apt-update-file b/test/integration/test-apt-update-file
index fbcd473cc..e6332dc3b 100755
--- a/test/integration/test-apt-update-file
+++ b/test/integration/test-apt-update-file
@@ -14,7 +14,6 @@ configcompression 'bz2' 'gz'
insertpackage 'unstable' 'foo' 'all' '1.0'
-umask 022
setupaptarchive --no-update
# ensure the archive is not writable
@@ -27,10 +26,12 @@ aptget update -qq -o Debug::pkgAcquire::Auth=1 2> output.log
# ensure that the hash of the uncompressed file was verified even on a local
# ims hit
canary="SHA512:$(bzcat aptarchive/dists/unstable/main/binary-amd64/Packages.bz2 | sha512sum |cut -f1 -d' ')"
-grep -q "RecivedHash: $canary" output.log
+grep -q -- "- $canary" output.log
# foo is still available
testsuccess aptget install -s foo
# the cleanup should still work
chmod 750 aptarchive/dists/unstable/main/binary-amd64
+
+
diff --git a/test/integration/test-apt-update-ims b/test/integration/test-apt-update-ims
new file mode 100755
index 000000000..c74058c5d
--- /dev/null
+++ b/test/integration/test-apt-update-ims
@@ -0,0 +1,88 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+buildsimplenativepackage 'unrelated' 'all' '0.5~squeeze1' 'unstable'
+
+setupaptarchive
+changetowebserver
+
+runtest() {
+ rm -f rootdir/var/lib/apt/lists/localhost*
+
+ testsuccess aptget update
+
+ # ensure no leftovers in partial
+ testfailure ls "rootdir/var/lib/apt/lists/partial/*"
+
+ # check that I-M-S header is kept in redirections
+ testequal "$EXPECT" aptget update -o Debug::pkgAcquire::Worker=0 -o Debug::Acquire::http=0
+
+ # ensure that we still do a hash check on ims hit
+ msgtest 'Test I-M-S reverify'
+ aptget update -o Debug::pkgAcquire::Auth=1 2>&1 | grep -A1 'RecivedHash:' | grep -q -- '- SHA' && msgpass || msgfail
+
+ # ensure no leftovers in partial
+ testfailure ls "rootdir/var/lib/apt/lists/partial/*"
+}
+
+msgmsg "InRelease"
+EXPECT="Hit http://localhost:8080 unstable InRelease
+Hit http://localhost:8080 unstable/main Sources
+Hit http://localhost:8080 unstable/main amd64 Packages
+Hit http://localhost:8080 unstable/main Translation-en
+Reading package lists..."
+# with InRelease
+runtest
+
+# with gzip
+echo "Acquire::GzipIndexes "1";" > rootdir/etc/apt/apt.conf.d/02compressindex
+runtest
+
+msgmsg "Release/Release.gpg"
+# with Release/Release.gpg
+EXPECT="Ign http://localhost:8080 unstable InRelease
+ 404 Not Found
+Hit http://localhost:8080 unstable Release
+Hit http://localhost:8080 unstable Release.gpg
+Hit http://localhost:8080 unstable/main Sources
+Hit http://localhost:8080 unstable/main amd64 Packages
+Hit http://localhost:8080 unstable/main Translation-en
+Reading package lists..."
+
+find aptarchive -name "InRelease" | xargs rm -f
+
+echo "Acquire::GzipIndexes "0";" > rootdir/etc/apt/apt.conf.d/02compressindex
+runtest
+
+echo "Acquire::GzipIndexes "1";" > rootdir/etc/apt/apt.conf.d/02compressindex
+runtest
+
+
+# no Release.gpg or InRelease
+configallowinsecurerepositories "true"
+
+msgmsg "Release only"
+EXPECT="Ign http://localhost:8080 unstable InRelease
+ 404 Not Found
+Hit http://localhost:8080 unstable Release
+Ign http://localhost:8080 unstable Release.gpg
+ 404 Not Found
+Hit http://localhost:8080 unstable/main Sources
+Hit http://localhost:8080 unstable/main amd64 Packages
+Hit http://localhost:8080 unstable/main Translation-en
+Reading package lists...
+W: The data from 'http://localhost:8080 unstable Release.gpg' is not signed. Packages from that repository can not be authenticated."
+
+find aptarchive -name "Release.gpg" | xargs rm -f
+
+echo 'Acquire::AllowInsecureRepositories "1";' > rootdir/etc/apt/apt.conf.d/insecure.conf
+echo "Acquire::GzipIndexes "0";" > rootdir/etc/apt/apt.conf.d/02compressindex
+runtest
+
+echo "Acquire::GzipIndexes "1";" > rootdir/etc/apt/apt.conf.d/02compressindex
+runtest
diff --git a/test/integration/test-apt-update-nofallback b/test/integration/test-apt-update-nofallback
new file mode 100755
index 000000000..321472c2e
--- /dev/null
+++ b/test/integration/test-apt-update-nofallback
@@ -0,0 +1,234 @@
+#!/bin/sh
+#
+# ensure we never fallback from a signed to a unsigned repo
+#
+# hash checks are done in
+#
+set -e
+
+simulate_mitm_and_inject_evil_package()
+{
+ rm -f $APTARCHIVE/dists/unstable/InRelease
+ rm -f $APTARCHIVE/dists/unstable/Release.gpg
+ inject_evil_package
+}
+
+inject_evil_package()
+{
+ cat > $APTARCHIVE/dists/unstable/main/binary-i386/Packages <<EOF
+Package: evil
+Installed-Size: 29
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Version: 1.0
+Filename: pool/evil_1.0_all.deb
+Size: 1270
+Description: an autogenerated evil package
+EOF
+ # avoid ims hit
+ touch -d '+1hour' aptarchive/dists/unstable/main/binary-i386/Packages
+}
+
+assert_update_is_refused_and_last_good_state_used()
+{
+ testequal "E: The repository 'file: unstable Release.gpg' is no longer signed." aptget update -qq
+
+ assert_repo_is_intact
+}
+
+assert_repo_is_intact()
+{
+ testequal "foo/unstable 2.0 all" apt list -q
+ testsuccess "" aptget install -y -s foo
+ testfailure "" aptget install -y evil
+
+ LISTDIR=rootdir/var/lib/apt/lists
+ if ! ( ls $LISTDIR/*InRelease >/dev/null 2>&1 ||
+ ls $LISTDIR/*Release.gpg >/dev/null 2>&1 ); then
+ echo "Can not find InRelease/Release.gpg in $(ls $LISTDIR)"
+ msgfail
+ fi
+}
+
+setupaptarchive_with_lists_clean()
+{
+ setupaptarchive --no-update
+ rm -f rootdir/var/lib/apt/lists/_*
+ #rm -rf rootdir/var/lib/apt/lists
+}
+
+test_from_inrelease_to_unsigned()
+{
+ # setup archive with InRelease file
+ setupaptarchive_with_lists_clean
+ testsuccess aptget update
+
+ simulate_mitm_and_inject_evil_package
+ assert_update_is_refused_and_last_good_state_used
+}
+
+test_from_release_gpg_to_unsigned()
+{
+ # setup archive with Release/Release.gpg (but no InRelease)
+ setupaptarchive_with_lists_clean
+ rm $APTARCHIVE/dists/unstable/InRelease
+ testsuccess aptget update
+
+ simulate_mitm_and_inject_evil_package
+ assert_update_is_refused_and_last_good_state_used
+}
+
+test_from_inrelease_to_unsigned_with_override()
+{
+ # setup archive with InRelease file
+ setupaptarchive_with_lists_clean
+ testsuccess aptget update
+
+ # simulate moving to a unsigned but otherwise valid repo
+ simulate_mitm_and_inject_evil_package
+ generatereleasefiles
+
+ # and ensure we can update to it (with enough force)
+ testsuccess aptget update --allow-insecure-repositories \
+ -o Acquire::AllowDowngradeToInsecureRepositories=1
+ # but that the individual packages are still considered untrusted
+ testequal "WARNING: The following packages cannot be authenticated!
+ evil
+E: There are problems and -y was used without --force-yes" aptget install -qq -y evil
+}
+
+test_cve_2012_0214()
+{
+ # see https://bugs.launchpad.net/ubuntu/+source/apt/+bug/947108
+ #
+ # it was possible to MITM the download so that InRelease/Release.gpg
+ # are not delivered (404) and a altered Release file was send
+ #
+ # apt left the old InRelease file in /var/lib/apt/lists and downloaded
+ # the unauthenticated Release file too giving the false impression that
+ # Release was authenticated
+ #
+ # Note that this is pretty much impossible nowdays because:
+ # a) InRelease is left as is, not split to InRelease/Release as it was
+ # in the old days
+ # b) we refuse to go from signed->unsigned
+ #
+ # Still worth having a regression test the simulates the condition
+
+ # setup archive with InRelease
+ setupaptarchive_with_lists_clean
+ testsuccess aptget update
+
+ # do what CVE-2012-0214 did
+ rm $APTARCHIVE/dists/unstable/InRelease
+ rm $APTARCHIVE/dists/unstable/Release.gpg
+ inject_evil_package
+ # build valid Release file
+ aptftparchive -qq release ./aptarchive > aptarchive/dists/unstable/Release
+
+ assert_update_is_refused_and_last_good_state_used
+
+ # ensure there is no _Release file downloaded
+ testfailure ls rootdir/var/lib/apt/lists/*_Release
+}
+
+test_subvert_inrelease()
+{
+ # setup archive with InRelease
+ setupaptarchive_with_lists_clean
+ testsuccess aptget update
+
+ # replace InRelease with something else
+ mv $APTARCHIVE/dists/unstable/Release $APTARCHIVE/dists/unstable/InRelease
+
+ testequal "W: Failed to fetch file:${APTARCHIVE}/dists/unstable/InRelease Does not start with a cleartext signature
+
+E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq
+
+ # ensure we keep the repo
+ assert_repo_is_intact
+}
+
+test_inrelease_to_invalid_inrelease()
+{
+ # setup archive with InRelease
+ setupaptarchive_with_lists_clean
+ testsuccess aptget update
+
+ # now remove InRelease and subvert Release do no longer verify
+ sed -i 's/Codename.*/Codename: evil!'/ $APTARCHIVE/dists/unstable/InRelease
+ inject_evil_package
+
+ testequal "W: An error occurred during the signature verification. The repository is not updated and the previous index files will be used. GPG error: file: unstable InRelease: The following signatures were invalid: BADSIG 5A90D141DBAC8DAE Joe Sixpack (APT Testcases Dummy) <joe@example.org>
+
+W: Failed to fetch file:${APTARCHIVE}/dists/unstable/InRelease The following signatures were invalid: BADSIG 5A90D141DBAC8DAE Joe Sixpack (APT Testcases Dummy) <joe@example.org>
+
+W: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq
+
+ # ensure we keep the repo
+ assert_repo_is_intact
+ testfailure grep "evil" rootdir/var/lib/apt/lists/*InRelease
+}
+
+test_release_gpg_to_invalid_release_release_gpg()
+{
+ # setup archive with InRelease
+ setupaptarchive_with_lists_clean
+ rm $APTARCHIVE/dists/unstable/InRelease
+ testsuccess aptget update
+
+ # now subvert Release do no longer verify
+ echo "Some evil data" >> $APTARCHIVE/dists/unstable/Release
+ inject_evil_package
+
+ testequal "W: An error occurred during the signature verification. The repository is not updated and the previous index files will be used. GPG error: file: unstable Release.gpg: The following signatures were invalid: BADSIG 5A90D141DBAC8DAE Joe Sixpack (APT Testcases Dummy) <joe@example.org>
+
+W: Failed to fetch file:${APTARCHIVE}/dists/unstable/Release.gpg
+
+W: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq
+
+ assert_repo_is_intact
+ testfailure grep "evil" rootdir/var/lib/apt/lists/*Release
+}
+
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+# a "normal" package with source and binary
+buildsimplenativepackage 'foo' 'all' '2.0'
+
+# setup the archive and ensure we have a single package that installs fine
+setupaptarchive
+APTARCHIVE=$(readlink -f ./aptarchive)
+assert_repo_is_intact
+
+# test the various cases where a repo may go from signed->unsigned
+msgmsg "test_from_inrelease_to_unsigned"
+test_from_inrelease_to_unsigned
+
+msgmsg "test_from_release_gpg_to_unsigned"
+test_from_release_gpg_to_unsigned
+
+# ensure we do not regress on CVE-2012-0214
+msgmsg "test_cve_2012_0214"
+test_cve_2012_0214
+
+# ensure InRelase can not be subverted
+msgmsg "test_subvert_inrelease"
+test_subvert_inrelease
+
+# ensure we revert to last good state if InRelease does not verify
+msgmsg "test_inrelease_to_invalid_inrelease"
+test_inrelease_to_invalid_inrelease
+
+# ensure we revert to last good state if Release/Release.gpg does not verify
+msgmsg "test_release_gpg_to_invalid_release_release_gpg"
+test_release_gpg_to_invalid_release_release_gpg
+
+# ensure we can ovveride the downgrade error
+msgmsg "test_from_inrelease_to_unsigned"
+test_from_inrelease_to_unsigned_with_override
diff --git a/test/integration/test-apt-update-rollback b/test/integration/test-apt-update-rollback
new file mode 100755
index 000000000..220c3052b
--- /dev/null
+++ b/test/integration/test-apt-update-rollback
@@ -0,0 +1,195 @@
+#!/bin/sh
+#
+# test that apt-get update is transactional
+#
+set -e
+
+avoid_ims_hit() {
+ touch -d '+1hour' aptarchive/dists/unstable/main/binary-i386/Packages*
+ touch -d '+1hour' aptarchive/dists/unstable/main/source/Sources*
+ touch -d '+1hour' aptarchive/dists/unstable/*Release*
+
+ touch -d '-1hour' rootdir/var/lib/apt/lists/*
+}
+
+create_fresh_archive()
+{
+ rm -rf aptarchive/*
+ rm -f rootdir/var/lib/apt/lists/_* rootdir/var/lib/apt/lists/partial/*
+
+ insertpackage 'unstable' 'old' 'all' '1.0'
+
+ setupaptarchive --no-update
+}
+
+add_new_package() {
+ insertpackage "unstable" "new" "all" "1.0"
+ insertsource "unstable" "new" "all" "1.0"
+
+ setupaptarchive --no-update "$@"
+}
+
+break_repository_sources_index() {
+ printf 'xxx' > $APTARCHIVE/dists/unstable/main/source/Sources
+ compressfile "$APTARCHIVE/dists/unstable/main/source/Sources" "$@"
+}
+
+start_with_good_inrelease() {
+ create_fresh_archive
+ testsuccess aptget update
+ testequal "old/unstable 1.0 all" apt list -q
+}
+
+test_inrelease_to_new_inrelease() {
+ msgmsg 'Test InRelease to new InRelease works fine'
+ start_with_good_inrelease
+
+ add_new_package '+1hour'
+ testsuccess aptget update -o Debug::Acquire::Transaction=1
+ testequal "new/unstable 1.0 all
+old/unstable 1.0 all" apt list -q
+}
+
+test_inrelease_to_broken_hash_reverts_all() {
+ msgmsg 'Test InRelease to broken InRelease reverts everything'
+ start_with_good_inrelease
+
+ add_new_package '+1hour'
+ # break the Sources file
+ break_repository_sources_index '+1hour'
+
+ # test the error condition
+ testequal "W: Failed to fetch file:${APTARCHIVE}/dists/unstable/main/source/Sources Hash Sum mismatch
+
+E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq
+ # ensure that the Packages file is also rolled back
+ testequal "E: Unable to locate package new" aptget install new -s -qq
+}
+
+test_inrelease_to_valid_release() {
+ msgmsg 'Test InRelease to valid Release'
+ start_with_good_inrelease
+
+ add_new_package '+1hour'
+ # switch to a unsigned repo now
+ rm $APTARCHIVE/dists/unstable/InRelease
+ rm $APTARCHIVE/dists/unstable/Release.gpg
+
+ # update fails
+ testequal "E: The repository 'file: unstable Release.gpg' is no longer signed." aptget update -qq
+
+ # test that we can install the new packages but do no longer have a sig
+ testsuccess aptget install old -s
+ testfailure aptget install new -s
+ testsuccess ls $ROOTDIR/var/lib/apt/lists/*_InRelease
+ testfailure ls $ROOTDIR/var/lib/apt/lists/*_Release
+}
+
+test_inrelease_to_release_reverts_all() {
+ msgmsg 'Test InRelease to broken Release reverts everything'
+ start_with_good_inrelease
+
+ # switch to a unsigned repo now
+ add_new_package '+1hour'
+ rm $APTARCHIVE/dists/unstable/InRelease
+ rm $APTARCHIVE/dists/unstable/Release.gpg
+
+ # break it
+ break_repository_sources_index '+1hour'
+
+ # ensure error
+ testequal "E: The repository 'file: unstable Release.gpg' is no longer signed." aptget update -qq # -o Debug::acquire::transaction=1
+
+ # ensure that the Packages file is also rolled back
+ testsuccess aptget install old -s
+ testfailure aptget install new -s
+ testsuccess ls $ROOTDIR/var/lib/apt/lists/*_InRelease
+ testfailure ls $ROOTDIR/var/lib/apt/lists/*_Release
+}
+
+test_unauthenticated_to_invalid_inrelease() {
+ msgmsg 'Test UnAuthenticated to invalid InRelease reverts everything'
+ create_fresh_archive
+ rm $APTARCHIVE/dists/unstable/InRelease
+ rm $APTARCHIVE/dists/unstable/Release.gpg
+
+ testsuccess aptget update --allow-insecure-repositories
+ testequal "WARNING: The following packages cannot be authenticated!
+ old
+E: There are problems and -y was used without --force-yes" aptget install -qq -y old
+
+ # go to authenticated but not correct
+ add_new_package '+1hour'
+ break_repository_sources_index '+1hour'
+
+ testequal "W: Failed to fetch file:$APTARCHIVE/dists/unstable/main/source/Sources Hash Sum mismatch
+
+E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq
+
+ testfailure ls rootdir/var/lib/apt/lists/*_InRelease
+ testequal "WARNING: The following packages cannot be authenticated!
+ old
+E: There are problems and -y was used without --force-yes" aptget install -qq -y old
+}
+
+test_inrelease_to_unauth_inrelease() {
+ msgmsg 'Test InRelease to InRelease without good sig'
+ start_with_good_inrelease
+
+ signreleasefiles 'Marvin Paranoid'
+
+ testequal "W: An error occurred during the signature verification. The repository is not updated and the previous index files will be used. GPG error: file: unstable InRelease: The following signatures couldn't be verified because the public key is not available: NO_PUBKEY E8525D47528144E2
+
+W: Failed to fetch file:$APTARCHIVE/dists/unstable/InRelease The following signatures couldn't be verified because the public key is not available: NO_PUBKEY E8525D47528144E2
+
+W: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq
+
+ testsuccess ls rootdir/var/lib/apt/lists/*_InRelease
+}
+
+test_inrelease_to_broken_gzip() {
+ msgmsg "Test InRelease to broken gzip"
+ start_with_good_inrelease
+
+ # append junk at the end of the compressed file
+ echo "lala" >> $APTARCHIVE/dists/unstable/main/source/Sources.gz
+ touch -d '+2min' $APTARCHIVE/dists/unstable/main/source/Sources.gz
+ # remove uncompressed file to avoid fallback
+ rm $APTARCHIVE/dists/unstable/main/source/Sources
+
+ testfailure aptget update
+}
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+# setup the archive and ensure we have a single package that installs fine
+setupaptarchive
+APTARCHIVE=$(readlink -f ./aptarchive)
+ROOTDIR=${TMPWORKINGDIRECTORY}/rootdir
+APTARCHIVE_LISTS="$(echo $APTARCHIVE | tr "/" "_" )"
+
+# test the following cases:
+# - InRelease -> broken InRelease revert to previous state
+# - empty lists dir and broken remote leaves nothing on the system
+# - InRelease -> hashsum mismatch for one file reverts all files to previous state
+# - Release/Release.gpg -> hashsum mismatch
+# - InRelease -> Release with hashsum mismatch revert entire state and kills Release
+# - Release -> InRelease with broken Sig/Hash removes InRelease
+# going from Release/Release.gpg -> InRelease and vice versa
+# - unauthenticated -> invalid InRelease
+
+# stuff to do:
+# - ims-hit
+# - gzip-index tests
+
+test_inrelease_to_new_inrelease
+test_inrelease_to_broken_hash_reverts_all
+test_inrelease_to_valid_release
+test_inrelease_to_release_reverts_all
+test_unauthenticated_to_invalid_inrelease
+test_inrelease_to_unauth_inrelease
+test_inrelease_to_broken_gzip
diff --git a/test/integration/test-apt-update-transactions b/test/integration/test-apt-update-transactions
new file mode 100755
index 000000000..247334991
--- /dev/null
+++ b/test/integration/test-apt-update-transactions
@@ -0,0 +1,24 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+insertpackage 'unstable' 'foo' 'all' '1.0'
+
+setupaptarchive --no-update
+changetowebserver
+
+# break package file
+cat > aptarchive/dists/unstable/main/binary-i386/Packages <<EOF
+Package: bar
+EOF
+compressfile aptarchive/dists/unstable/main/binary-i386/Packages '+1hour'
+
+# ensure that a update will only succeed entirely or not at all
+testfailure aptget update
+testequal "partial" ls rootdir/var/lib/apt/lists
+
diff --git a/test/integration/test-apt-update-unauth b/test/integration/test-apt-update-unauth
index 13487603c..b7ccd6cf3 100755
--- a/test/integration/test-apt-update-unauth
+++ b/test/integration/test-apt-update-unauth
@@ -8,6 +8,8 @@ set -e
TESTDIR=$(readlink -f $(dirname $0))
. $TESTDIR/framework
+umask 022
+
setupenvironment
configarchitecture "i386"
@@ -17,23 +19,53 @@ insertsource 'unstable' 'foo' 'all' '1.0'
setupaptarchive
changetowebserver
+# FIXME:
+# - also check the unauth -> auth success case, i.e. that all files are
+# reverified
runtest() {
# start unauthenticated
find rootdir/var/lib/apt/lists/ -type f | xargs rm -f
rm -f aptarchive/dists/unstable/*Release*
- aptget update -qq
+
+ testsuccess aptget update -qq --allow-insecure-repositories
+
+ # FIXME: this really shouldn't be needed
+ rm -f rootdir/var/lib/apt/lists/partial/*
# become authenticated
generatereleasefiles
signreleasefiles
- # and ensure we do download the data again
- msgtest "Check that the data is check when going to authenticated"
- if aptget update |grep -q Hit; then
- msgfail
- else
+ # move uncompressed away
+ mv aptarchive/dists/unstable/main/binary-i386/Packages \
+ aptarchive/dists/unstable/main/binary-i386/Packages.uncompressed
+
+ # and ensure we re-check the downloaded data
+
+ # change the local packages file
+ PKGS=$(ls rootdir/var/lib/apt/lists/*Packages*)
+ echo "meep" > $PKGS
+ ls rootdir/var/lib/apt/lists/ > lists.before
+
+ # update and ensure all is reverted on the hashsum failure
+ testfailure aptget update -o Debug::Acquire::Transaction=0 -o Debug::pkgAcquire::Auth=1 -o Debug::pkgAcquire::worker=0 -o Debug::acquire::http=0
+
+ # ensure we have before what we have after
+ msgtest 'Check rollback on going from' 'unauth -> auth'
+ ls rootdir/var/lib/apt/lists/ > lists.after
+ if cmp lists.before lists.after; then
msgpass
+ else
+ echo >&2 '### Output of previous apt-get update ###'
+ cat >&2 rootdir/tmp/testfailure.output
+ echo >&2 '### Changes in the lists-directory: ###'
+ diff -u >&2 lists.before lists.after
+ msgfail
fi
+
+ # move uncompressed back for release file
+ mv aptarchive/dists/unstable/main/binary-i386/Packages.uncompressed \
+ aptarchive/dists/unstable/main/binary-i386/Packages
}
for COMPRESSEDINDEXES in 'false' 'true'; do
@@ -43,6 +75,5 @@ for COMPRESSEDINDEXES in 'false' 'true'; do
else
msgmsg 'Run tests with GzipIndexes disabled'
fi
-
- runtest
+ runtest
done
diff --git a/test/integration/test-bug-254770-segfault-if-cache-not-buildable b/test/integration/test-bug-254770-segfault-if-cache-not-buildable
index 59102ddc9..6ae8944b2 100755
--- a/test/integration/test-bug-254770-segfault-if-cache-not-buildable
+++ b/test/integration/test-bug-254770-segfault-if-cache-not-buildable
@@ -3,17 +3,25 @@ set -e
TESTDIR=$(readlink -f $(dirname $0))
. $TESTDIR/framework
+
+msgtest 'Test run as' 'non-root'
+if [ "$(id -u)" = '0' ]; then
+ msgskip 'root has by definition no problems accessing files'
+ exit 0
+else
+ msgpass
+fi
+
setupenvironment
configarchitecture "i386"
setupaptarchive
-CURRENTTRAP="chmod a+x rootdir/var/lib/dpkg; $CURRENTTRAP"
-trap "$CURRENTTRAP" 0 HUP INT QUIT ILL ABRT FPE SEGV PIPE TERM
+addtrap 'prefix' 'chmod a+x rootdir/var/lib/dpkg;'
chmod a-x rootdir/var/lib/dpkg
testsegfault() {
msgtest "No segfault in" "$*"
- local TEST="$($* 2>&1 | grep -v 'E:')"
+ local TEST="$("$@" 2>&1 | grep -v 'E:')"
if [ -z "$TEST" ]; then
msgpass
else
diff --git a/test/integration/test-bug-507998-dist-upgrade-recommends b/test/integration/test-bug-507998-dist-upgrade-recommends
index 513421a94..f3b4e04fb 100755
--- a/test/integration/test-bug-507998-dist-upgrade-recommends
+++ b/test/integration/test-bug-507998-dist-upgrade-recommends
@@ -16,6 +16,7 @@ setupaptarchive
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
tshark wireshark-common
2 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
diff --git a/test/integration/test-bug-591882-conkeror b/test/integration/test-bug-591882-conkeror
index e1c0b42d1..891ddb8b7 100755
--- a/test/integration/test-bug-591882-conkeror
+++ b/test/integration/test-bug-591882-conkeror
@@ -9,6 +9,7 @@ setupaptarchive
UPGRADEFAIL="Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
conkeror libdatrie0 libkrb53 libxcb-xlib0 xulrunner-1.9
The following NEW packages will be installed:
@@ -40,6 +41,7 @@ E: Trivial Only specified but this is not a trivial operation."
UPGRADESUCCESS="Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
libdatrie0 libkrb53 libxcb-xlib0 xulrunner-1.9
The following NEW packages will be installed:
diff --git a/test/integration/test-bug-595691-empty-and-broken-archive-files b/test/integration/test-bug-595691-empty-and-broken-archive-files
index aea340203..fedf82c92 100755
--- a/test/integration/test-bug-595691-empty-and-broken-archive-files
+++ b/test/integration/test-bug-595691-empty-and-broken-archive-files
@@ -13,7 +13,7 @@ setupflataptarchive
testaptgetupdate() {
rm -rf rootdir/var/lib/apt
aptget update 2>> testaptgetupdate.diff >> testaptgetupdate.diff || true
- sed -i -e '/^Fetched / d' -e '/Ign / d' -e '/Release/ d' -e 's#Get:[0-9]\+ #Get: #' -e 's#\[[0-9]* [kMGTPY]*B\]#\[\]#' testaptgetupdate.diff
+ sed -i -e '/Ign /,+1d' -e '/Release/ d' -e 's#Get:[0-9]\+ #Get: #' -e 's#\[[0-9]* [kMGTPY]*B\]#\[\]#' testaptgetupdate.diff
GIVEN="$1"
shift
msgtest "Test for correctness of" "apt-get update with $*"
diff --git a/test/integration/test-bug-596498-trusted-unsigned-repo b/test/integration/test-bug-596498-trusted-unsigned-repo
index 06c9c8285..a08c153f8 100755
--- a/test/integration/test-bug-596498-trusted-unsigned-repo
+++ b/test/integration/test-bug-596498-trusted-unsigned-repo
@@ -12,7 +12,7 @@ setupaptarchive
aptgetupdate() {
rm -rf rootdir/var/lib/apt/ rootdir/var/cache/apt/*.bin
- aptget update -qq
+ testsuccess aptget update --allow-insecure-repositories
}
PKGTEXT="$(aptget install cool --assume-no -d | head -n 7)"
diff --git a/test/integration/test-bug-605394-versioned-or-groups b/test/integration/test-bug-605394-versioned-or-groups
index 0f09d2927..bb72d59e3 100755
--- a/test/integration/test-bug-605394-versioned-or-groups
+++ b/test/integration/test-bug-605394-versioned-or-groups
@@ -9,6 +9,7 @@ setupaptarchive
testequal "Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
php5 php5-cgi
2 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
diff --git a/test/integration/test-bug-612099-multiarch-conflicts b/test/integration/test-bug-612099-multiarch-conflicts
index 20dc3a7e5..c32600037 100755
--- a/test/integration/test-bug-612099-multiarch-conflicts
+++ b/test/integration/test-bug-612099-multiarch-conflicts
@@ -70,6 +70,7 @@ Conf foobar (1.0 stable [i386])' aptget install foobar/stable libc6 -st testing
testequal 'Reading package lists...
Building dependency tree...
Reading state information...
+Calculating upgrade...
The following packages will be upgraded:
libc6
1 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
@@ -168,6 +169,7 @@ Conf libc6-same:amd64 (1.0 stable [amd64])' aptget install libc6-same:amd64 -s -
testequal 'Reading package lists...
Building dependency tree...
Reading state information...
+Calculating upgrade...
The following packages will be upgraded:
libc6-same
1 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
diff --git a/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted b/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted
index f93510fd7..0736bb6dc 100755
--- a/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted
+++ b/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted
@@ -24,15 +24,18 @@ testfilemissing() {
testrun() {
rm -rf rootdir/var/lib/apt
- testsuccess aptget update
if [ "$1" = 'trusted' ]; then
+ testsuccess aptget update
+
testsuccess aptget download cool
testfileexists 'cool_1.0_i386.deb'
testsuccess aptget download cool --allow-unauthenticated
testfileexists 'cool_1.0_i386.deb'
else
+ testsuccess aptget update --allow-insecure-repositories
+
testfailure aptget download cool
testfilemissing 'cool_1.0_i386.deb'
diff --git a/test/integration/test-bug-64141-install-dependencies-for-on-hold b/test/integration/test-bug-64141-install-dependencies-for-on-hold
index 9a9e7be10..9e6c223a8 100755
--- a/test/integration/test-bug-64141-install-dependencies-for-on-hold
+++ b/test/integration/test-bug-64141-install-dependencies-for-on-hold
@@ -21,6 +21,7 @@ setupaptarchive
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
oldcrap
The following NEW packages will be installed:
@@ -35,6 +36,7 @@ testsuccess aptmark hold apt
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages have been kept back:
apt
The following packages will be upgraded:
diff --git a/test/integration/test-bug-657695-resolver-breaks-on-virtuals b/test/integration/test-bug-657695-resolver-breaks-on-virtuals
index e9b27cfcd..1b92a04fe 100755
--- a/test/integration/test-bug-657695-resolver-breaks-on-virtuals
+++ b/test/integration/test-bug-657695-resolver-breaks-on-virtuals
@@ -18,6 +18,7 @@ setupaptarchive
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
xserver-xorg-video-driver1 xserver-xorg-video-driver10
xserver-xorg-video-driver11 xserver-xorg-video-driver12
diff --git a/test/integration/test-bug-675449-essential-are-protected b/test/integration/test-bug-675449-essential-are-protected
index 7d8cc3484..2a27c62b1 100755
--- a/test/integration/test-bug-675449-essential-are-protected
+++ b/test/integration/test-bug-675449-essential-are-protected
@@ -69,6 +69,7 @@ Purg pkg-none-foreign:i386 [1]' aptget purge pkg-none-foreign:i386 -s
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following NEW packages will be installed:
pkg-depends-new:i386 pkg-none-new
The following packages will be upgraded:
diff --git a/test/integration/test-bug-680041-apt-mark-holds-correctly b/test/integration/test-bug-680041-apt-mark-holds-correctly
index 2e5e39c8e..3f40c23dc 100755
--- a/test/integration/test-bug-680041-apt-mark-holds-correctly
+++ b/test/integration/test-bug-680041-apt-mark-holds-correctly
@@ -19,6 +19,7 @@ runtests() {
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
pkgall pkgarch
2 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
@@ -30,6 +31,7 @@ E: Trivial Only specified but this is not a trivial operation.' aptget dist-upgr
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages have been kept back:
pkgarch
The following packages will be upgraded:
@@ -43,6 +45,7 @@ E: Trivial Only specified but this is not a trivial operation.' aptget dist-upgr
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
pkgall pkgarch
2 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
@@ -54,6 +57,7 @@ E: Trivial Only specified but this is not a trivial operation.' aptget dist-upgr
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages have been kept back:
pkgall
The following packages will be upgraded:
diff --git a/test/integration/test-bug-683786-build-dep-on-virtual-packages b/test/integration/test-bug-683786-build-dep-on-virtual-packages
index 879d6a3bc..65862c572 100755
--- a/test/integration/test-bug-683786-build-dep-on-virtual-packages
+++ b/test/integration/test-bug-683786-build-dep-on-virtual-packages
@@ -38,8 +38,8 @@ Building dependency tree...
The following NEW packages will be installed:
po-debconf
0 upgraded, 1 newly installed, 0 to remove and 0 not upgraded.
-Inst po-debconf (1 unstable, unstable [all])
-Conf po-debconf (1 unstable, unstable [all])' aptget build-dep dash -s
+Inst po-debconf (1 unstable [all])
+Conf po-debconf (1 unstable [all])' aptget build-dep dash -s
testequal 'Reading package lists...
Building dependency tree...
diff --git a/test/integration/test-bug-686346-package-missing-architecture b/test/integration/test-bug-686346-package-missing-architecture
index dc51861ab..8024f81da 100755
--- a/test/integration/test-bug-686346-package-missing-architecture
+++ b/test/integration/test-bug-686346-package-missing-architecture
@@ -53,6 +53,7 @@ testnopackage pkge:*
# this difference seems so important that it has to be maintained …
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.' aptget dist-upgrade -s
# pkgd has no update with an architecture
diff --git a/test/integration/test-bug-712435-missing-descriptions b/test/integration/test-bug-712435-missing-descriptions
index 53ecbbeb3..7a3518745 100755
--- a/test/integration/test-bug-712435-missing-descriptions
+++ b/test/integration/test-bug-712435-missing-descriptions
@@ -87,13 +87,10 @@ $DESCRIPTION
Description-md5: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
" aptcache show apt-normal
-# displaying the translated Description would be equally valid,
-# but we assume only one description is in a Packages file and
-# so we prefer "Description" over "Description-*" currently.
for variant in 'below' 'middle' 'top'; do
testequal "Package: apt-both-$variant
$PACKAGESTANZA
-$DESCRIPTION
+$TRANSDESCRIPTION
Description-md5: bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
" aptcache show apt-both-$variant
done
@@ -122,7 +119,7 @@ X-Some-Flag: yes
testequal "Package: apt-intermixed2
$PACKAGESTANZA
-$DESCRIPTION
+$TRANSDESCRIPTION
Description-md5: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
X-Some-Flag: yes
X-Foo-Flag: Something with a Description
@@ -131,7 +128,7 @@ X-Bar-Flag: no
testequal "Package: apt-intermixed3
$PACKAGESTANZA
-$DESCRIPTION
+$TRANSDESCRIPTION
Description-md5: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
X-Some-Flag: yes
X-Foo-Flag: Something with a Description
diff --git a/test/integration/test-bug-717891-abolute-uris-for-proxies b/test/integration/test-bug-717891-abolute-uris-for-proxies
index ac1d6ec11..54a616686 100755
--- a/test/integration/test-bug-717891-abolute-uris-for-proxies
+++ b/test/integration/test-bug-717891-abolute-uris-for-proxies
@@ -12,7 +12,7 @@ setupaptarchive
changetowebserver --request-absolute='uri'
msgtest 'Check that absolute paths are' 'not accepted'
-testfailure --nomsg aptget update
+testfailure --nomsg aptget update --allow-insecure-repositories
echo 'Acquire::http::Proxy "http://localhost:8080";' > rootdir/etc/apt/apt.conf.d/99proxy
diff --git a/test/integration/test-bug-722207-print-uris-even-if-very-quiet b/test/integration/test-bug-722207-print-uris-even-if-very-quiet
index f2d95da19..9a5685703 100755
--- a/test/integration/test-bug-722207-print-uris-even-if-very-quiet
+++ b/test/integration/test-bug-722207-print-uris-even-if-very-quiet
@@ -16,10 +16,10 @@ setupaptarchive
APTARCHIVE=$(readlink -f ./aptarchive)
-testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget upgrade -qq --print-uris
-testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget dist-upgrade -qq --print-uris
-testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget install apt -qq --print-uris
-testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget download apt -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 " aptget upgrade -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 " aptget dist-upgrade -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 " aptget install apt -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 " aptget download apt -qq --print-uris
testequal "'file://${APTARCHIVE}/apt_2.dsc' apt_2.dsc 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e
'file://${APTARCHIVE}/apt_2.tar.gz' apt_2.tar.gz 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e" aptget source apt -qq --print-uris
testequal "'http://packages.debian.org/changelogs/pool/main/apt/apt_2/changelog'" aptget changelog apt -qq --print-uris
diff --git a/test/integration/test-bug-728500-tempdir b/test/integration/test-bug-728500-tempdir
index 0451fc1ed..37e5a013e 100755
--- a/test/integration/test-bug-728500-tempdir
+++ b/test/integration/test-bug-728500-tempdir
@@ -17,7 +17,7 @@ msgtest 'Test apt-get update with incorrect' 'TMPDIR'
OUTPUT=$(mktemp)
addtrap "rm $OUTPUT;"
export TMPDIR=/does-not-exists
-if aptget update >${OUTPUT} 2>&1; then
+if aptget update -o Debug::Acquire::gpg=1 >${OUTPUT} 2>&1; then
msgpass
else
echo
@@ -27,3 +27,4 @@ fi
unset TMPDIR
testequal 'coolstuff' aptcache pkgnames
+testsuccess ls rootdir/var/lib/apt/lists/*InRelease
diff --git a/test/integration/test-bug-733028-gpg-resource-limit b/test/integration/test-bug-733028-gpg-resource-limit
new file mode 100755
index 000000000..f9c804963
--- /dev/null
+++ b/test/integration/test-bug-733028-gpg-resource-limit
@@ -0,0 +1,27 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'i386'
+
+insertpackage 'unstable' 'foobar' 'all' '1'
+
+setupaptarchive --no-update
+
+for i in $(seq 1 50); do
+ touch rootdir/etc/apt/trusted.gpg.d/emptykey-${i}.gpg
+done
+
+aptkey list | grep '^pub' > aptkey.list
+testfileequal ./aptkey.list 'pub 2048R/DBAC8DAE 2010-08-18'
+
+msgtest 'Test for no gpg errors/warnings in' 'apt-get update'
+aptget update > update.log 2>&1
+if grep -iq 'GPG' update.log; then
+ msgfail
+ cat update.log
+else
+ msgpass
+fi
diff --git a/test/integration/test-bug-735967-lib32-to-i386-unavailable b/test/integration/test-bug-735967-lib32-to-i386-unavailable
index e9f3bf96d..826931fe4 100755
--- a/test/integration/test-bug-735967-lib32-to-i386-unavailable
+++ b/test/integration/test-bug-735967-lib32-to-i386-unavailable
@@ -33,6 +33,7 @@ testsuccess aptget update
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
lib32nss-mdns
The following packages will be upgraded:
@@ -60,6 +61,7 @@ testsuccess aptget update
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following NEW packages will be installed:
libnss-mdns:i386 libnss-mdns-i386:i386
The following packages will be upgraded:
diff --git a/test/integration/test-bug-738785-switch-protocol b/test/integration/test-bug-738785-switch-protocol
index 1e5748eae..f450e5e5a 100755
--- a/test/integration/test-bug-738785-switch-protocol
+++ b/test/integration/test-bug-738785-switch-protocol
@@ -49,8 +49,14 @@ rm https
cd - >/dev/null
echo "Dir::Bin::Methods \"${COPYMETHODS}\";" >> aptconfig.conf
-testequal "E: The method driver $(pwd)/rootdir/usr/lib/apt/methods/https could not be found.
+if [ "$(id -u)" = '0' ]; then
+ testequal "Can't drop privileges for downloading as file '$(pwd)/apt_1.0_all.deb' couldn't be accessed by user '_apt'.
+E: The method driver $(pwd)/rootdir/usr/lib/apt/methods/https could not be found.
N: Is the package apt-transport-https installed?" aptget download apt -q=0
+else
+ testequal "E: The method driver $(pwd)/rootdir/usr/lib/apt/methods/https could not be found.
+N: Is the package apt-transport-https installed?" aptget download apt -q=0
+fi
testsuccess test ! -e apt_1.0_all.deb
# revert to all methods
@@ -60,4 +66,4 @@ mv rootdir/${COPYMETHODS}.bak rootdir/${COPYMETHODS}
# check that downgrades from https to http are not allowed
webserverconfig 'aptwebserver::support::http' 'true'
sed -i -e 's#:8080/redirectme#:4433/downgrademe#' -e 's# http:# https:#' rootdir/etc/apt/sources.list.d/*
-testfailure aptget update
+testfailure aptget update --allow-insecure-repositories
diff --git a/test/integration/test-bug-740843-versioned-up-down-breaks b/test/integration/test-bug-740843-versioned-up-down-breaks
index cb035a71f..9426ffad1 100755
--- a/test/integration/test-bug-740843-versioned-up-down-breaks
+++ b/test/integration/test-bug-740843-versioned-up-down-breaks
@@ -24,6 +24,7 @@ setupaptarchive
testequalor2 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
foo-driver libfoo libfoo:i386 libgl1-foo-glx libgl1-foo-glx:i386
5 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
@@ -38,6 +39,7 @@ Conf libgl1-foo-glx:i386 (2 stable [i386])
Conf libgl1-foo-glx (2 stable [amd64])
Conf foo-driver (2 stable [amd64])' 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
foo-driver libfoo libfoo:i386 libgl1-foo-glx libgl1-foo-glx:i386
5 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
diff --git a/test/integration/test-bug-745036-new-foreign-invalidates-cache b/test/integration/test-bug-745036-new-foreign-invalidates-cache
new file mode 100755
index 000000000..490cbecdd
--- /dev/null
+++ b/test/integration/test-bug-745036-new-foreign-invalidates-cache
@@ -0,0 +1,29 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+insertpackage 'unstable' 'cool-foo' 'amd64' '1.0' 'Depends: foo'
+insertpackage 'unstable' 'foo' 'amd64' '1.0' 'Multi-Arch: foreign'
+insertinstalledpackage 'cool-foo' 'amd64' '1.0' 'Depends: foo'
+insertinstalledpackage 'foo' 'amd64' '1.0' 'Multi-Arch: foreign'
+
+setupaptarchive
+
+testsuccess aptget check -s
+
+configarchitecture 'amd64' 'i386'
+testequal 'E: The package cache was built for different architectures: amd64 vs amd64,i386' aptget check -s -o pkgCacheFile::Generate=false
+
+testsuccess aptget check -s
+
+insertinstalledpackage 'awesome-foo' 'i386' '1.0' 'Depends: foo'
+
+testsuccess aptget check -s
+
+testsuccess aptget update --no-download
+
+testsuccess aptget check -s
diff --git a/test/integration/test-bug-758153-versioned-provides-support b/test/integration/test-bug-758153-versioned-provides-support
index 2904ae5a1..21f9123c9 100755
--- a/test/integration/test-bug-758153-versioned-provides-support
+++ b/test/integration/test-bug-758153-versioned-provides-support
@@ -28,6 +28,7 @@ setupaptarchive
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
webapp webserver
2 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
diff --git a/test/integration/test-compressed-indexes b/test/integration/test-compressed-indexes
index 805ed5964..f67077973 100755
--- a/test/integration/test-compressed-indexes
+++ b/test/integration/test-compressed-indexes
@@ -39,10 +39,10 @@ testrun() {
test -e rootdir/var/lib/apt/lists/*_Translation-en.${COMPRESS} || F=1
# there is no point in trying pdiff if we have compressed indexes
# as we can't patch compressed files (well, we can, but what is the point?)
- ! test -e rootdir/var/lib/apt/lists/*.IndexDiff || F=1
+ ! test -e rootdir/var/lib/apt/lists/*diff_Index || F=1
else
# clear the faked pdiff indexes so the glob below works
- rm -f rootdir/var/lib/apt/lists/*.IndexDiff
+ rm -f rootdir/var/lib/apt/lists/*diff_Index
test -e rootdir/var/lib/apt/lists/*_Packages || F=1
test -e rootdir/var/lib/apt/lists/*_Sources || F=1
test -e rootdir/var/lib/apt/lists/*_Translation-en || F=1
@@ -51,6 +51,7 @@ testrun() {
! test -e rootdir/var/lib/apt/lists/*_Translation-en.* || F=1
fi
if [ -n "$F" ]; then
+ cat rootdir/tmp/testsuccess.output
ls -laR rootdir/var/lib/apt/lists/
msgfail
else
@@ -103,15 +104,15 @@ testovermethod() {
INDCOMP='compressed'
fi
- testsuccess aptget update
+ testsuccess aptget update -o Debug::Acquire::http=1
msgmsg "${1}: ${COMPRESSOR}: Test with $INDCOMP indexes"
testrun "$INDCOMP"
- testsuccess aptget update -o Acquire::Pdiffs=1
+ testsuccess aptget update -o Acquire::Pdiffs=1 -o Debug::Acquire::http=1
msgmsg "${1}: ${COMPRESSOR}: Test with $INDCOMP indexes (update unchanged with pdiffs)"
testrun "$INDCOMP"
- testsuccess aptget update -o Acquire::Pdiffs=0
+ testsuccess aptget update -o Acquire::Pdiffs=0 -o Debug::Acquire::http=1
msgmsg "${1}: ${COMPRESSOR}: Test with $INDCOMP indexes (update unchanged without pdiffs)"
testrun "$INDCOMP"
diff --git a/test/integration/test-conflicts-loop b/test/integration/test-conflicts-loop
index a2c411aaf..0906ef8fa 100755
--- a/test/integration/test-conflicts-loop
+++ b/test/integration/test-conflicts-loop
@@ -17,6 +17,7 @@ setupaptarchive
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
openjdk-6-jre openjdk-6-jre-headless openjdk-6-jre-lib
3 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
diff --git a/test/integration/test-cve-2013-1051-InRelease-parsing b/test/integration/test-cve-2013-1051-InRelease-parsing
index 41b27f691..8f9803991 100755
--- a/test/integration/test-cve-2013-1051-InRelease-parsing
+++ b/test/integration/test-cve-2013-1051-InRelease-parsing
@@ -42,7 +42,7 @@ touch -d '+1hour' aptarchive/dists/stable/InRelease
# ensure the update fails
# useful for debugging to add "-o Debug::pkgAcquire::auth=true"
msgtest 'apt-get update for should fail with the modified' 'InRelease'
-aptget update 2>&1 | grep -q 'Hash Sum mismatch' > /dev/null && msgpass || msgfail
+aptget update 2>&1 | grep -E -q '(Writing more data than expected|Hash Sum mismatch)' > /dev/null && msgpass || msgfail
# ensure there is no package
testequal 'Reading package lists...
diff --git a/test/integration/test-hashsum-verification b/test/integration/test-hashsum-verification
index 2a400dcb4..5f88110b3 100755
--- a/test/integration/test-hashsum-verification
+++ b/test/integration/test-hashsum-verification
@@ -70,9 +70,13 @@ runtest() {
rm -rf rootdir/var/lib/apt/lists
rm aptarchive/InRelease aptarchive/Release.gpg
msgtest 'unsigned apt-get update gets the expected hashsum mismatch'
- aptget update 2>&1 | grep "Hash Sum mismatch" > /dev/null && msgpass || msgfail
-
-
+ aptget update --allow-insecure-repositories >output.log 2>&1 || true
+ if grep -q "Hash Sum mismatch" output.log; then
+ msgpass
+ else
+ cat output.log
+ msgfail
+ fi
}
for COMPRESSEDINDEXES in 'false' 'true'; do
diff --git a/test/integration/test-http-pipeline-messup b/test/integration/test-http-pipeline-messup
new file mode 100755
index 000000000..405574e8a
--- /dev/null
+++ b/test/integration/test-http-pipeline-messup
@@ -0,0 +1,47 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+# try a little harder to create a size mismatch
+buildsimplenativepackage 'pkga' 'all' '1.0' 'stable' "Depends: foo" '' '' '' '' 'none'
+buildsimplenativepackage 'pkgb' 'all' '1.0' 'stable' "Depends: foo" '' '' '' '' 'none'
+buildsimplenativepackage 'pkgc' 'all' '1.0' 'stable' "Depends: f$(for i in $(seq 0 1000); do printf 'o'; done)" '' '' '' '' 'none'
+buildsimplenativepackage 'pkgd' 'all' '1.0' 'stable' "Depends: f$(for i in $(seq 0 1000); do printf 'o'; done)" '' '' '' '' 'none'
+
+setupaptarchive --no-update
+
+# simulate (and be a predictable) pipeline mess-up by the server/proxy
+changetowebserver \
+ -o 'aptwebserver::overwrite::.*pkga.*::filename=/pool/pkgd_1.0_all.deb' \
+ -o 'aptwebserver::overwrite::.*pkgc.*::filename=/pool/pkgb_1.0_all.deb' \
+ -o 'aptwebserver::overwrite::.*pkgb.*::filename=/pool/pkgc_1.0_all.deb' \
+ -o 'aptwebserver::overwrite::.*pkgd.*::filename=/pool/pkga_1.0_all.deb'
+
+echo 'Debug::Acquire::http "true";
+Debug::pkgAcquire::Worker "true";' > rootdir/etc/apt/apt.conf.d/99debug
+
+testsuccess aptget update
+
+# messup is bigger than pipeline: checks if fixup isn't trying to hard
+testfailure aptget download pkga pkgb pkgc pkgd -o Acquire::http::Pipeline-Depth=2
+testfailure test -f pkga_1.0_all.deb
+
+# ensure that pipeling is enabled for rest of this test
+echo 'Acquire::http::Pipeline-Depth 10;' > rootdir/etc/apt/apt.conf.d/99enable-pipeline
+
+# the output is a bit strange: it looks like it has downloaded pkga 4 times
+testsuccess aptget download pkga pkgb pkgc pkgd
+for pkg in 'pkga' 'pkgb' 'pkgc' 'pkgd'; do
+ testsuccess test -f ${pkg}_1.0_all.deb
+ testsuccess cmp incoming/${pkg}_1.0_all.deb ${pkg}_1.0_all.deb
+ rm -f ${pkg}_1.0_all.deb
+done
+
+# while hashes will pass (as none are available), sizes will not match, so failure
+# checks that no hashes means that pipeline depth is ignored as we can't fixup
+testfailure aptget download pkga pkgb pkgc pkgd --allow-unauthenticated -o Acquire::ForceHash=ROT26
diff --git a/test/integration/test-kernel-helper-autoremove b/test/integration/test-kernel-helper-autoremove
index c51caa758..22c36890b 100755
--- a/test/integration/test-kernel-helper-autoremove
+++ b/test/integration/test-kernel-helper-autoremove
@@ -9,7 +9,7 @@ configarchitecture 'amd64'
# the executed script would use the installed apt-config,
# which is outside of our control
msgtest 'Check that the installed apt-config supports' '--no-empty'
-if apt-config dump --no-empty >/dev/null 2>&1; then
+if /usr/bin/apt-config dump --no-empty >/dev/null 2>&1; then
msgpass
else
msgskip
diff --git a/test/integration/test-pdiff-usage b/test/integration/test-pdiff-usage
index afe1ad443..5bad90214 100755
--- a/test/integration/test-pdiff-usage
+++ b/test/integration/test-pdiff-usage
@@ -13,12 +13,6 @@ changetowebserver
PKGFILE="${TESTDIR}/$(echo "$(basename $0)" | sed 's#^test-#Packages-#')"
-echo '#!/bin/sh
-touch merge-was-used
-/usr/bin/diffindex-rred "$@"' > extrred
-chmod +x extrred
-echo 'Dir::Bin::rred "./extrred";' > rootdir/etc/apt/apt.conf.d/99rred
-
wasmergeused() {
msgtest 'Test for successful execution of' "$*"
local OUTPUT=$(mktemp)
@@ -82,8 +76,15 @@ SHA1-History:
9f4148e06d7faa37062994ff10d0c842d7017513 33053002 2010-08-18-2013.28
$(sha1sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
SHA1-Patches:
- 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-0814.28
- $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
+ 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-2013.28
+ $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)
+SHA256-Current: $(sha256sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new)
+SHA256-History:
+ 01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b 33053002 2010-08-18-2013.28
+ $(sha256sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
+SHA256-Patches:
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28
+ $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
generatereleasefiles '+1hour'
signreleasefiles
find aptarchive -name 'Packages*' -type f -delete
@@ -93,7 +94,7 @@ SHA1-Patches:
" aptcache show apt newstuff
msgmsg "Testcase: index is already up-to-date: $*"
- find rootdir/var/lib/apt/lists -name '*.IndexDiff' -type f -delete
+ find rootdir/var/lib/apt/lists -name '*diff_Index' -type f -delete
testsuccess aptget update "$@"
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt newstuff
@@ -125,9 +126,18 @@ SHA1-History:
$(sha1sum ${PKGFILE} | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}) $(basename ${PATCHFILE})
$(sha1sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new) $(basename ${PATCHFILE2})
SHA1-Patches:
- 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-0814.28
+ 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-2013.28
$(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)
- $(sha1sum ${PATCHFILE2} | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}) $(basename ${PATCHFILE2})" > $PATCHINDEX
+ $(sha1sum ${PATCHFILE2} | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}) $(basename ${PATCHFILE2})
+SHA256-Current: $(sha256sum aptarchive/Packages | cut -d' ' -f 1) $(stat -c%s aptarchive/Packages)
+SHA256-History:
+ 01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b 33053002 2010-08-18-2013.28
+ $(sha256sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
+ $(sha256sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new) $(basename ${PATCHFILE2})
+SHA256-Patches:
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28
+ $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)
+ $(sha256sum ${PATCHFILE2} | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}) $(basename ${PATCHFILE2})" > $PATCHINDEX
generatereleasefiles '+2hour'
signreleasefiles
cp -a aptarchive/Packages Packages-future
@@ -153,8 +163,15 @@ SHA1-History:
9f4148e06d7faa37062994ff10d0c842d7017513 33053002 2010-08-18-2013.28
$(sha1sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
SHA1-Patches:
- 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-0814.28
- $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
+ 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-2013.28
+ $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)
+SHA256-Current: $(sha256sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new)
+SHA256-History:
+ 01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b 33053002 2010-08-18-2013.28
+ $(sha256sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
+SHA256-Patches:
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28
+ $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
echo 'I am Mallory and I change files' >> $PATCHFILE
cat $PATCHFILE | gzip > ${PATCHFILE}.gz
generatereleasefiles '+1hour'
@@ -165,9 +182,23 @@ SHA1-Patches:
" aptcache show apt newstuff
}
echo 'Debug::pkgAcquire::Diffs "true";
+Debug::Acquire::Transaction "true";
Debug::pkgAcquire::rred "true";' > rootdir/etc/apt/apt.conf.d/rreddebug.conf
testrun -o Acquire::PDiffs::Merge=0 -o APT::Get::List-Cleanup=1
testrun -o Acquire::PDiffs::Merge=1 -o APT::Get::List-Cleanup=1
testrun -o Acquire::PDiffs::Merge=0 -o APT::Get::List-Cleanup=0
testrun -o Acquire::PDiffs::Merge=1 -o APT::Get::List-Cleanup=0
+
+sha256sum() {
+ echo '01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b -'
+}
+testrun -o Acquire::PDiffs::Merge=0 -o Acquire::ForceHash=SHA1
+testrun -o Acquire::PDiffs::Merge=1 -o Acquire::ForceHash=SHA1
+
+unset -f sha256sum
+sha1sum() {
+ echo 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc -'
+}
+testrun -o Acquire::PDiffs::Merge=0 -o Acquire::ForceHash=SHA256
+testrun -o Acquire::PDiffs::Merge=1 -o Acquire::ForceHash=SHA256
diff --git a/test/integration/test-pin-non-existent-package b/test/integration/test-pin-non-existent-package
index 35de22115..c567e5285 100755
--- a/test/integration/test-pin-non-existent-package
+++ b/test/integration/test-pin-non-existent-package
@@ -26,6 +26,7 @@ testcandidate rapt '0.8.15'
testequal 'N: Unable to locate package doesntexist' aptcache policy doesntexist -q=0
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.' aptget dist-upgrade --trivial-only
echo 'Package: rapt
@@ -36,6 +37,7 @@ testcandidate rapt '(none)'
testequal 'N: Unable to locate package doesntexist' aptcache policy doesntexist -q=0
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.' aptget dist-upgrade --trivial-only
echo '
@@ -55,6 +57,7 @@ testequal 'N: Unable to locate package doesntexist' aptcache policy doesntexist
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.' aptget dist-upgrade --trivial-only
echo 'Package: arch:amd64
diff --git a/test/integration/test-policy-pinning b/test/integration/test-policy-pinning
index 8eb4bcbad..c08a2f103 100755
--- a/test/integration/test-policy-pinning
+++ b/test/integration/test-policy-pinning
@@ -28,7 +28,7 @@ Pinned packages:" aptcache policy $*
aptgetupdate() {
# just to be sure that no old files are used
rm -rf rootdir/var/lib/apt
- if aptget update -qq 2>&1 | grep '^E: '; then
+ if aptget update --allow-insecure-repositories -qq 2>&1 | grep '^E: '; then
msgwarn 'apt-get update failed with an error'
fi
}
@@ -36,6 +36,7 @@ aptgetupdate() {
### not signed archive
aptgetupdate
+
testequalpolicy 100 500
testequalpolicy 990 500 -t now
diff --git a/test/integration/test-prevent-markinstall-multiarch-same-versionscrew b/test/integration/test-prevent-markinstall-multiarch-same-versionscrew
index d647856cb..9d2ea2d5d 100755
--- a/test/integration/test-prevent-markinstall-multiarch-same-versionscrew
+++ b/test/integration/test-prevent-markinstall-multiarch-same-versionscrew
@@ -43,6 +43,7 @@ setupaptarchive
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
out-of-sync-gone-foreign:i386 out-of-sync-gone-native
The following packages have been kept back:
diff --git a/test/integration/test-provides-gone-with-upgrade b/test/integration/test-provides-gone-with-upgrade
index 70384ce29..3b4bc2d04 100755
--- a/test/integration/test-provides-gone-with-upgrade
+++ b/test/integration/test-provides-gone-with-upgrade
@@ -15,6 +15,7 @@ setupaptarchive
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following NEW packages will be installed:
libapt-pkg4.10
The following packages will be upgraded:
diff --git a/test/integration/test-releasefile-verification b/test/integration/test-releasefile-verification
index e558b83e8..3765a4b1f 100755
--- a/test/integration/test-releasefile-verification
+++ b/test/integration/test-releasefile-verification
@@ -235,10 +235,21 @@ runtest2() {
" aptcache show apt
failaptnew
}
-runtest2
+# diable some protection by default and ensure we still do the verification
+# correctly
+cat > rootdir/etc/apt/apt.conf.d/weaken-security <<EOF
+Acquire::AllowInsecureRepositories "1";
+Acquire::AllowDowngradeToInsecureRepositories "1";
+EOF
+
+msgmsg "Runing base test"
+runtest2
DELETEFILE="InRelease"
+msgmsg "Running test with deletion of $DELETEFILE"
runtest
+
DELETEFILE="Release.gpg"
+msgmsg "Running test with deletion of $DELETEFILE"
runtest
diff --git a/test/integration/test-resolve-by-keep-new-recommends b/test/integration/test-resolve-by-keep-new-recommends
index 8134b76aa..6b1772877 100755
--- a/test/integration/test-resolve-by-keep-new-recommends
+++ b/test/integration/test-resolve-by-keep-new-recommends
@@ -13,6 +13,7 @@ setupaptarchive
UPGRADE_KEEP="Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages have been kept back:
foo
0 upgraded, 0 newly installed, 0 to remove and 1 not upgraded."
diff --git a/test/integration/test-sourceslist-trusted-options b/test/integration/test-sourceslist-trusted-options
new file mode 100755
index 000000000..c954f2f4f
--- /dev/null
+++ b/test/integration/test-sourceslist-trusted-options
@@ -0,0 +1,195 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture 'amd64'
+
+buildsimplenativepackage 'foo' 'amd64' '1' 'stable'
+buildsimplenativepackage 'foo' 'amd64' '2' 'testing'
+
+setupaptarchive --no-update
+
+APTARCHIVE=$(readlink -f ./aptarchive)
+
+everythingsucceeds() {
+ testequal 'Listing...
+foo/testing 2 amd64
+foo/stable 1 amd64
+' apt list foo -a
+
+ rm -f foo_1_amd64.deb foo_2_amd64.deb
+ testsuccess aptget download foo "$@"
+ testsuccess test -s foo_1_amd64.deb -o -s foo_2_amd64.deb
+
+ rm -f foo_1.dsc foo_2.dsc
+ testsuccess aptget source foo --dsc-only -d "$@"
+ testsuccess test -s foo_1.dsc -o -s foo_2.dsc
+}
+
+everythingfails() {
+ testequal 'Listing...
+foo/testing 2 amd64
+foo/stable 1 amd64
+' apt list foo -a
+
+ local WARNING='WARNING: The following packages cannot be authenticated!
+ foo
+E: Some packages could not be authenticated'
+
+ rm -f foo_1_amd64.deb foo_2_amd64.deb
+ testfailure aptget download foo "$@"
+ testequal "$WARNING" tail -n 3 rootdir/tmp/testfailure.output
+ testfailure test -s foo_1_amd64.deb -o -s foo_2_amd64.deb
+
+ rm -f foo_1.dsc foo_2.dsc
+ testfailure aptget source foo --dsc-only -d "$@"
+ testequal "$WARNING" tail -n 3 rootdir/tmp/testfailure.output
+ testfailure test -s foo_1.dsc -o -s foo_2.dsc
+}
+
+cp -a rootdir/etc/apt/sources.list.d/ rootdir/etc/apt/sources.list.d.bak/
+
+aptgetupdate() {
+ rm -rf rootdir/var/lib/apt/lists
+ # note that insecure with trusted=yes are allowed
+ # as the trusted=yes indicates that security is provided by
+ # something above the understanding of apt
+ testsuccess aptget update --no-allow-insecure-repositories
+}
+
+insecureaptgetupdate() {
+ rm -rf rootdir/var/lib/apt/lists
+ testfailure aptget update --no-allow-insecure-repositories
+ rm -rf rootdir/var/lib/apt/lists
+ testsuccess aptget update --allow-insecure-repositories
+}
+
+msgmsg 'Test without trusted option and good sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+aptgetupdate
+everythingsucceeds
+everythingsucceeds -t stable
+everythingsucceeds -t testing
+
+msgmsg 'Test with trusted=yes option and good sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=yes] #' rootdir/etc/apt/sources.list.d/*
+aptgetupdate
+everythingsucceeds
+everythingsucceeds -t stable
+everythingsucceeds -t testing
+
+msgmsg 'Test with trusted=no option and good sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=no] #' rootdir/etc/apt/sources.list.d/*
+# we want the warnings on the actions, but for 'update' everything is fine
+aptgetupdate
+everythingfails
+everythingfails -t stable
+everythingfails -t testing
+
+find aptarchive/dists/stable \( -name 'InRelease' -o -name 'Release.gpg' \) -delete
+
+msgmsg 'Test without trusted option and good and unsigned sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+insecureaptgetupdate
+everythingsucceeds
+everythingfails -t stable
+everythingsucceeds -t testing
+
+msgmsg 'Test with trusted=yes option and good and unsigned sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=yes] #' rootdir/etc/apt/sources.list.d/*
+aptgetupdate
+everythingsucceeds
+everythingsucceeds -t stable
+everythingsucceeds -t testing
+
+msgmsg 'Test with trusted=no option and good and unsigned sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=no] #' rootdir/etc/apt/sources.list.d/*
+insecureaptgetupdate
+everythingfails
+everythingfails -t stable
+everythingfails -t testing
+
+signreleasefiles 'Marvin Paranoid' 'aptarchive/dists/stable'
+
+msgmsg 'Test without trusted option and good and unknown sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+insecureaptgetupdate
+everythingsucceeds
+everythingfails -t stable
+everythingsucceeds -t testing
+
+msgmsg 'Test with trusted=yes option and good and unknown sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=yes] #' rootdir/etc/apt/sources.list.d/*
+aptgetupdate
+everythingsucceeds
+everythingsucceeds -t stable
+everythingsucceeds -t testing
+
+msgmsg 'Test with trusted=no option and good and unknown sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=no] #' rootdir/etc/apt/sources.list.d/*
+insecureaptgetupdate
+everythingfails
+everythingfails -t stable
+everythingfails -t testing
+
+signreleasefiles 'Rex Expired' 'aptarchive/dists/stable'
+cp -a keys/rexexpired.pub rootdir/etc/apt/trusted.gpg.d/rexexpired.gpg
+
+msgmsg 'Test without trusted option and good and expired sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+insecureaptgetupdate
+everythingsucceeds
+everythingfails -t stable
+everythingsucceeds -t testing
+
+msgmsg 'Test with trusted=yes option and good and expired sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=yes] #' rootdir/etc/apt/sources.list.d/*
+aptgetupdate
+everythingsucceeds
+everythingsucceeds -t stable
+everythingsucceeds -t testing
+
+msgmsg 'Test with trusted=no option and good and expired sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=no] #' rootdir/etc/apt/sources.list.d/*
+insecureaptgetupdate
+everythingfails
+everythingfails -t stable
+everythingfails -t testing
+
+# same as the one further above, but this time testing is unsigned
+find aptarchive/ \( -name 'InRelease' -o -name 'Release.gpg' \) -delete
+signreleasefiles 'Joe Sixpack' 'aptarchive/dists/stable'
+
+msgmsg 'Test without trusted option and unsigned and good sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+insecureaptgetupdate
+everythingfails
+everythingsucceeds -t stable
+everythingfails -t testing
+
+msgmsg 'Test with trusted=yes option and unsigned and good sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=yes] #' rootdir/etc/apt/sources.list.d/*
+aptgetupdate
+everythingsucceeds
+everythingsucceeds -t stable
+everythingsucceeds -t testing
+
+msgmsg 'Test with trusted=no option and unsigned and good sources'
+cp -a rootdir/etc/apt/sources.list.d.bak/* rootdir/etc/apt/sources.list.d/
+sed -i 's#^deb\(-src\)\? #deb\1 [trusted=no] #' rootdir/etc/apt/sources.list.d/*
+insecureaptgetupdate
+everythingfails
+everythingfails -t stable
+everythingfails -t testing
diff --git a/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum b/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum
new file mode 100755
index 000000000..8c9c9c767
--- /dev/null
+++ b/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum
@@ -0,0 +1,262 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture 'native'
+
+cat > aptarchive/Sources <<EOF
+Package: pkg-md5-ok
+Binary: pkg-md5-ok
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Files:
+ 9604ba9427a280db542279d9ed78400b 3 pkg-md5-ok_1.0.dsc
+ db5570bf61464b46e2bde31ed61a7dc6 3 pkg-md5-ok_1.0.tar.gz
+
+Package: pkg-sha256-ok
+Binary: pkg-sha256-ok
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Files:
+ 9604ba9427a280db542279d9ed78400b 3 pkg-sha256-ok_1.0.dsc
+ db5570bf61464b46e2bde31ed61a7dc6 3 pkg-sha256-ok_1.0.tar.gz
+Checksums-Sha1:
+ 324f464e6151a92cf57b26ef95dcfcf2059a8c44 3 pkg-sha256-ok_1.0.dsc
+ 680254bad1d7ca0d65ec46aaa315d363abf6a50a 3 pkg-sha256-ok_1.0.tar.gz
+Checksums-Sha256:
+ 943d3bf22ac661fb0f59bc4ff68cc12b04ff17a838dfcc2537008eb9c7f3770a 3 pkg-sha256-ok_1.0.dsc
+ 90aebae315675cbf04612de4f7d5874850f48e0b8dd82becbeaa47ca93f5ebfb 3 pkg-sha256-ok_1.0.tar.gz
+
+Package: pkg-sha256-bad
+Binary: pkg-sha256-bad
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Files:
+ 9604ba9427a280db542279d9ed78400b 3 pkg-sha256-bad_1.0.dsc
+ db5570bf61464b46e2bde31ed61a7dc6 3 pkg-sha256-bad_1.0.tar.gz
+Checksums-Sha1:
+ 324f464e6151a92cf57b26ef95dcfcf2059a8c44 3 pkg-sha256-bad_1.0.dsc
+ 680254bad1d7ca0d65ec46aaa315d363abf6a50a 3 pkg-sha256-bad_1.0.tar.gz
+Checksums-Sha256:
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa 3 pkg-sha256-bad_1.0.dsc
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 3 pkg-sha256-bad_1.0.tar.gz
+
+Package: pkg-no-md5
+Binary: pkg-no-md5
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Checksums-Sha1:
+ 324f464e6151a92cf57b26ef95dcfcf2059a8c44 3 pkg-no-md5_1.0.dsc
+ 680254bad1d7ca0d65ec46aaa315d363abf6a50a 3 pkg-no-md5_1.0.tar.gz
+Checksums-Sha256:
+ 943d3bf22ac661fb0f59bc4ff68cc12b04ff17a838dfcc2537008eb9c7f3770a 3 pkg-no-md5_1.0.dsc
+ 90aebae315675cbf04612de4f7d5874850f48e0b8dd82becbeaa47ca93f5ebfb 3 pkg-no-md5_1.0.tar.gz
+
+Package: pkg-mixed-ok
+Binary: pkg-mixed-ok
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Checksums-Sha1:
+ 680254bad1d7ca0d65ec46aaa315d363abf6a50a 3 pkg-mixed-ok_1.0.tar.gz
+Checksums-Sha256:
+ 943d3bf22ac661fb0f59bc4ff68cc12b04ff17a838dfcc2537008eb9c7f3770a 3 pkg-mixed-ok_1.0.dsc
+
+Package: pkg-mixed-sha1-bad
+Binary: pkg-mixed-sha1-bad
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Checksums-Sha1:
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa 3 pkg-mixed-sha1-bad_1.0.dsc
+Checksums-Sha256:
+ 90aebae315675cbf04612de4f7d5874850f48e0b8dd82becbeaa47ca93f5ebfb 3 pkg-mixed-sha1-bad_1.0.tar.gz
+
+Package: pkg-mixed-sha2-bad
+Binary: pkg-mixed-sha2-bad
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Checksums-Sha1:
+ 324f464e6151a92cf57b26ef95dcfcf2059a8c44 3 pkg-mixed-sha2-bad_1.0.dsc
+Checksums-Sha256:
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 3 pkg-mixed-sha2-bad_1.0.tar.gz
+
+Package: pkg-md5-disagree
+Binary: pkg-md5-disagree
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Files:
+ 9604ba9427a280db542279d9ed78400b 3 pkg-md5-disagree_1.0.dsc
+ db5570bf61464b46e2bde31ed61a7dc6 3 pkg-md5-disagree_1.0.tar.gz
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa 3 pkg-md5-disagree_1.0.dsc
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 3 pkg-md5-disagree_1.0.tar.gz
+
+Package: pkg-md5-agree
+Binary: pkg-md5-agree
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Files:
+ 9604ba9427a280db542279d9ed78400b 3 pkg-md5-agree_1.0.dsc
+ db5570bf61464b46e2bde31ed61a7dc6 3 pkg-md5-agree_1.0.tar.gz
+ db5570bf61464b46e2bde31ed61a7dc6 3 pkg-md5-agree_1.0.tar.gz
+ 9604ba9427a280db542279d9ed78400b 3 pkg-md5-agree_1.0.dsc
+
+Package: pkg-sha256-disagree
+Binary: pkg-sha256-disagree
+Version: 1.0
+Maintainer: Joe Sixpack <joe@example.org>
+Architecture: all
+Files:
+ 9604ba9427a280db542279d9ed78400b 3 pkg-sha256-disagree_1.0.dsc
+ db5570bf61464b46e2bde31ed61a7dc6 3 pkg-sha256-disagree_1.0.tar.gz
+Checksums-Sha1:
+ 324f464e6151a92cf57b26ef95dcfcf2059a8c44 3 pkg-sha256-disagree_1.0.dsc
+ 680254bad1d7ca0d65ec46aaa315d363abf6a50a 3 pkg-sha256-disagree_1.0.tar.gz
+Checksums-Sha256:
+ 943d3bf22ac661fb0f59bc4ff68cc12b04ff17a838dfcc2537008eb9c7f3770a 3 pkg-sha256-disagree_1.0.dsc
+ 90aebae315675cbf04612de4f7d5874850f48e0b8dd82becbeaa47ca93f5ebfb 3 pkg-sha256-disagree_1.0.tar.gz
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa 3 pkg-sha256-disagree_1.0.dsc
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 3 pkg-sha256-disagree_1.0.tar.gz
+EOF
+
+# create fetchable files
+for x in 'pkg-md5-ok' 'pkg-sha256-ok' 'pkg-sha256-bad' 'pkg-no-md5' \
+ 'pkg-mixed-ok' 'pkg-mixed-sha1-bad' 'pkg-mixed-sha2-bad' \
+ 'pkg-md5-agree' 'pkg-md5-disagree' 'pkg-sha256-disagree'; do
+ echo -n 'dsc' > aptarchive/${x}_1.0.dsc
+ echo -n 'tar' > aptarchive/${x}_1.0.tar.gz
+done
+
+setupaptarchive
+changetowebserver
+testsuccess aptget update
+
+testok() {
+ rm -f ${1}_1.0.dsc ${1}_1.0.tar.gz
+ testequal "Reading package lists...
+Building dependency tree...
+Need to get 6 B of source archives.
+Get:1 http://localhost:8080/ $1 1.0 (dsc) [3 B]
+Get:2 http://localhost:8080/ $1 1.0 (tar) [3 B]
+Download complete and in download only mode" aptget source -d "$@"
+ msgtest 'Files were successfully downloaded for' "$1"
+ testsuccess --nomsg test -e ${1}_1.0.dsc -a -e ${1}_1.0.tar.gz
+ rm -f ${1}_1.0.dsc ${1}_1.0.tar.gz
+}
+
+testkeep() {
+ echo -n 'dsc' > ${1}_1.0.dsc
+ echo -n 'tar' > ${1}_1.0.tar.gz
+ testequal "Reading package lists...
+Building dependency tree...
+Skipping already downloaded file '${1}_1.0.dsc'
+Skipping already downloaded file '${1}_1.0.tar.gz'
+Need to get 0 B of source archives.
+Download complete and in download only mode" aptget source -d "$@"
+ msgtest 'Files already downloaded are kept for' "$1"
+ testsuccess --nomsg test -e ${1}_1.0.dsc -a -e ${1}_1.0.tar.gz
+ rm -f ${1}_1.0.dsc ${1}_1.0.tar.gz
+}
+
+testmismatch() {
+ rm -f ${1}_1.0.dsc ${1}_1.0.tar.gz
+ testequal "Reading package lists...
+Building dependency tree...
+Need to get 6 B of source archives.
+Get:1 http://localhost:8080/ $1 1.0 (dsc) [3 B]
+Get:2 http://localhost:8080/ $1 1.0 (tar) [3 B]
+E: Failed to fetch http://localhost:8080/${1}_1.0.dsc Hash Sum mismatch
+
+E: Failed to fetch http://localhost:8080/${1}_1.0.tar.gz Hash Sum mismatch
+
+E: Failed to fetch some archives." aptget source -d "$@"
+ msgtest 'Files were not download as they have hashsum mismatches for' "$1"
+ testfailure --nomsg test -e ${1}_1.0.dsc -a -e ${1}_1.0.tar.gz
+
+ rm -f ${1}_1.0.dsc ${1}_1.0.tar.gz
+ testequal "Reading package lists...
+Building dependency tree...
+Skipping download of file 'pkg-sha256-bad_1.0.dsc' as requested hashsum is not available for authentication
+Skipping download of file 'pkg-sha256-bad_1.0.tar.gz' as requested hashsum is not available for authentication
+Need to get 0 B of source archives.
+Download complete and in download only mode" aptget source -d "$@" -o Acquire::ForceHash=ROT26
+ msgtest 'Files were not download as hash is unavailable for' "$1"
+ testfailure --nomsg test -e ${1}_1.0.dsc -a -e ${1}_1.0.tar.gz
+
+ rm -f ${1}_1.0.dsc ${1}_1.0.tar.gz
+ testequal "Reading package lists...
+Building dependency tree...
+Need to get 6 B of source archives.
+Get:1 http://localhost:8080/ $1 1.0 (dsc) [3 B]
+Get:2 http://localhost:8080/ $1 1.0 (tar) [3 B]
+Download complete and in download only mode" aptget source --allow-unauthenticated -d "$@" -o Acquire::ForceHash=ROT26
+ msgtest 'Files were downloaded unauthenticated as user allowed it' "$1"
+ testsuccess --nomsg test -e ${1}_1.0.dsc -a -e ${1}_1.0.tar.gz
+}
+
+testok pkg-md5-ok
+testkeep pkg-md5-ok
+testok pkg-sha256-ok
+testkeep pkg-sha256-ok
+
+# pkg-sha256-bad has a bad SHA sum, but good MD5 sum. If apt is
+# checking the best available hash (as it should), this will trigger
+# a hash mismatch.
+testmismatch pkg-sha256-bad
+testmismatch pkg-sha256-bad
+testok pkg-sha256-bad -o Acquire::ForceHash=MD5Sum
+
+# not having MD5 sum doesn't mean the file doesn't exist at all …
+testok pkg-no-md5
+testok pkg-no-md5 -o Acquire::ForceHash=SHA256
+testequal "Reading package lists...
+Building dependency tree...
+Skipping download of file 'pkg-no-md5_1.0.dsc' as requested hashsum is not available for authentication
+Skipping download of file 'pkg-no-md5_1.0.tar.gz' as requested hashsum is not available for authentication
+Need to get 0 B of source archives.
+Download complete and in download only mode" aptget source -d pkg-no-md5 -o Acquire::ForceHash=MD5Sum
+msgtest 'Files were not download as MD5 is not available for this package' 'pkg-no-md5'
+testfailure --nomsg test -e pkg-no-md5_1.0.dsc -a -e pkg-no-md5_1.0.tar.gz
+
+# deal with cases in which we haven't for all files the same checksum type
+# mostly pathologic as this shouldn't happen, but just to be sure
+testok pkg-mixed-ok
+testequal 'Reading package lists...
+Building dependency tree...
+Need to get 6 B of source archives.
+Get:1 http://localhost:8080/ pkg-mixed-sha1-bad 1.0 (tar) [3 B]
+Get:2 http://localhost:8080/ pkg-mixed-sha1-bad 1.0 (dsc) [3 B]
+E: Failed to fetch http://localhost:8080/pkg-mixed-sha1-bad_1.0.dsc Hash Sum mismatch
+
+E: Failed to fetch some archives.' aptget source -d pkg-mixed-sha1-bad
+msgtest 'Only tar file is downloaded as the dsc has hashsum mismatch' 'pkg-mixed-sha1-bad'
+testsuccess --nomsg test ! -e pkg-mixed-sha1-bad_1.0.dsc -a -e pkg-mixed-sha1-bad_1.0.tar.gz
+testequal 'Reading package lists...
+Building dependency tree...
+Need to get 6 B of source archives.
+Get:1 http://localhost:8080/ pkg-mixed-sha2-bad 1.0 (tar) [3 B]
+Get:2 http://localhost:8080/ pkg-mixed-sha2-bad 1.0 (dsc) [3 B]
+E: Failed to fetch http://localhost:8080/pkg-mixed-sha2-bad_1.0.tar.gz Hash Sum mismatch
+
+E: Failed to fetch some archives.' aptget source -d pkg-mixed-sha2-bad
+msgtest 'Only dsc file is downloaded as the tar has hashsum mismatch' 'pkg-mixed-sha2-bad'
+testsuccess --nomsg test -e pkg-mixed-sha2-bad_1.0.dsc -a ! -e pkg-mixed-sha2-bad_1.0.tar.gz
+
+# it gets even more pathologic: multiple entries for one file, some even disagreeing!
+testok pkg-md5-agree
+testequal 'Reading package lists...
+Building dependency tree...
+E: Error parsing checksum in Files of source package pkg-md5-disagree' aptget source -d pkg-md5-disagree
+testequal 'Reading package lists...
+Building dependency tree...
+E: Error parsing checksum in Checksums-SHA256 of source package pkg-sha256-disagree' aptget source -d pkg-sha256-disagree
diff --git a/test/integration/test-ubuntu-bug-1304403-obsolete-priority-standard b/test/integration/test-ubuntu-bug-1304403-obsolete-priority-standard
index 2f2d384e1..45f70a898 100755
--- a/test/integration/test-ubuntu-bug-1304403-obsolete-priority-standard
+++ b/test/integration/test-ubuntu-bug-1304403-obsolete-priority-standard
@@ -27,6 +27,7 @@ setupaptarchive
# discourage keeping obsolete high-priority packages …
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be REMOVED:
not-downloadable
The following packages will be upgraded:
@@ -43,6 +44,7 @@ done
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages have been kept back:
upgradable
0 upgraded, 0 newly installed, 0 to remove and 1 not upgraded.' aptget -s dist-upgrade
diff --git a/test/integration/test-ubuntu-bug-346386-apt-get-update-paywall b/test/integration/test-ubuntu-bug-346386-apt-get-update-paywall
index a773660d2..df2c69cf6 100755
--- a/test/integration/test-ubuntu-bug-346386-apt-get-update-paywall
+++ b/test/integration/test-ubuntu-bug-346386-apt-get-update-paywall
@@ -21,6 +21,7 @@ if downloadfile http://localhost:8080/holygrail ./knights-talking >/dev/null; th
else
msgfail
fi
+
testfileequal knights-talking 'ni ni ni'
ensure_n_canary_strings_in_dir() {
@@ -35,8 +36,8 @@ ensure_n_canary_strings_in_dir() {
LISTS='rootdir/var/lib/apt/lists'
rm -rf rootdir/var/lib/apt/lists
-msgtest 'Got expected NODATA failure in' 'apt-get update'
-aptget update -qq 2>&1 | grep -q 'E: GPG error.*NODATA' && msgpass || msgfail
+msgtest 'Got expected failure message' 'apt-get update'
+aptget update -qq 2>&1 | grep -q 'W:.*Does not start with a cleartext signature' && msgpass || msgfail
ensure_n_canary_strings_in_dir $LISTS 'ni ni ni' 0
testequal 'partial' ls $LISTS
@@ -46,8 +47,8 @@ for f in Release Release.gpg main_binary-amd64_Packages main_source_Sources; do
echo 'peng neee-wom' > $LISTS/localhost:8080_dists_stable_${f}
done
-msgtest 'Got expected NODATA failure in' 'apt-get update'
-aptget update -qq 2>&1 | grep -q 'E: GPG error.*NODATA' && msgpass || msgfail
+msgtest 'Got expected failure message in' 'apt-get update'
+aptget update -qq 2>&1 | grep -q 'W:.*Does not start with a cleartext signature' && msgpass || msgfail
ensure_n_canary_strings_in_dir $LISTS 'peng neee-wom' 4
ensure_n_canary_strings_in_dir $LISTS 'ni ni ni' 0
@@ -56,7 +57,7 @@ ensure_n_canary_strings_in_dir $LISTS 'ni ni ni' 0
echo 'peng neee-wom' > $LISTS/localhost:8080_dists_stable_InRelease
rm -f $LISTS/localhost:8080_dists_stable_Release $LISTS/localhost:8080_dists_stable_Release.gpg
msgtest 'excpected failure of' 'apt-get update'
-aptget update -qq 2>&1 | grep -q 'E: GPG error.*NODATA' && msgpass || msgfail
+aptget update -qq 2>&1 | grep -q 'W:.*Does not start with a cleartext signature' && msgpass || msgfail
ensure_n_canary_strings_in_dir $LISTS 'peng neee-wom' 3
ensure_n_canary_strings_in_dir $LISTS 'ni ni ni' 0
diff --git a/test/integration/test-ubuntu-bug-784473-InRelease-one-message-only b/test/integration/test-ubuntu-bug-784473-InRelease-one-message-only
index 50ca2bf57..09315868b 100755
--- a/test/integration/test-ubuntu-bug-784473-InRelease-one-message-only
+++ b/test/integration/test-ubuntu-bug-784473-InRelease-one-message-only
@@ -28,12 +28,10 @@ MD5Sum:
done
msgtest 'The unsigned garbage before signed block is' 'ignored'
-testsuccess --nomsg aptget update
+aptget update -qq 2>&1 | grep -q 'W:.*Does not start with a cleartext signature' && msgpass || msgfail
ROOTDIR="$(readlink -f .)"
testequal "Package files:
100 ${ROOTDIR}/rootdir/var/lib/dpkg/status
release a=now
- 500 file:${ROOTDIR}/aptarchive/ unstable/main i386 Packages
- release a=unstable,n=sid,c=main
Pinned packages:" aptcache policy
diff --git a/test/integration/test-ubuntu-bug-985852-pre-depends-or-group-ordering b/test/integration/test-ubuntu-bug-985852-pre-depends-or-group-ordering
index 462acad00..d2b6b9bad 100755
--- a/test/integration/test-ubuntu-bug-985852-pre-depends-or-group-ordering
+++ b/test/integration/test-ubuntu-bug-985852-pre-depends-or-group-ordering
@@ -14,6 +14,7 @@ setupaptarchive
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
custom
1 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
diff --git a/test/integration/test-very-tight-loop-configure-with-unpacking-new-packages b/test/integration/test-very-tight-loop-configure-with-unpacking-new-packages
index c1d454f88..409d1212c 100755
--- a/test/integration/test-very-tight-loop-configure-with-unpacking-new-packages
+++ b/test/integration/test-very-tight-loop-configure-with-unpacking-new-packages
@@ -28,6 +28,7 @@ setupaptarchive
testequalor2 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following NEW packages will be installed:
ure
The following packages will be upgraded:
@@ -44,6 +45,7 @@ Conf libreoffice-core (4 sid [amd64])
Conf libreoffice-style-galaxy (4 sid [amd64])
Conf libreoffice (4 sid [amd64])' 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following NEW packages will be installed:
ure
The following packages will be upgraded:
diff --git a/test/integration/test-xorg-break-providers b/test/integration/test-xorg-break-providers
index 139d2c915..0be57d979 100755
--- a/test/integration/test-xorg-break-providers
+++ b/test/integration/test-xorg-break-providers
@@ -26,6 +26,7 @@ E: Trivial Only specified but this is not a trivial operation.' aptget install x
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
xserver-xorg-core xserver-xorg-video-intel
2 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
@@ -35,6 +36,7 @@ E: Trivial Only specified but this is not a trivial operation.' aptget upgrade -
testequal 'Reading package lists...
Building dependency tree...
+Calculating upgrade...
The following packages will be upgraded:
xserver-xorg-core xserver-xorg-video-intel
2 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.
diff --git a/test/interactive-helper/makefile b/test/interactive-helper/makefile
index 8dc014b98..4633b78ce 100644
--- a/test/interactive-helper/makefile
+++ b/test/interactive-helper/makefile
@@ -39,7 +39,7 @@ include $(PROGRAM_H)
#SOURCE = rpmver.cc
#include $(PROGRAM_H)
-# Program for testing udevcdrom
+# very simple webserver for APT testing
PROGRAM=aptwebserver
SLIBS = -lapt-pkg -lpthread
LIB_MAKES = apt-pkg/makefile
diff --git a/test/libapt/commandline_test.cc b/test/libapt/commandline_test.cc
index e403a28c8..627f1b486 100644
--- a/test/libapt/commandline_test.cc
+++ b/test/libapt/commandline_test.cc
@@ -2,6 +2,7 @@
#include <apt-pkg/cmndline.h>
#include <apt-pkg/configuration.h>
+#include <apt-private/private-cmndline.h>
#include <gtest/gtest.h>
@@ -85,3 +86,70 @@ TEST(CommandLineTest, BoolParsing)
}
}
+
+bool DoVoid(CommandLine &) { return false; }
+
+TEST(CommandLineTest,GetCommand)
+{
+ CommandLine::Dispatch Cmds[] = { {"install",&DoVoid}, {"remove", &DoVoid}, {0,0} };
+ {
+ char const * argv[] = { "apt-get", "-t", "unstable", "remove", "-d", "foo" };
+ char const * com = CommandLine::GetCommand(Cmds, sizeof(argv)/sizeof(argv[0]), argv);
+ EXPECT_STREQ("remove", com);
+ std::vector<CommandLine::Args> Args = getCommandArgs("apt-get", com);
+ ::Configuration c;
+ CommandLine CmdL(Args.data(), &c);
+ ASSERT_TRUE(CmdL.Parse(sizeof(argv)/sizeof(argv[0]), argv));
+ EXPECT_EQ(c.Find("APT::Default-Release"), "unstable");
+ EXPECT_TRUE(c.FindB("APT::Get::Download-Only"));
+ ASSERT_EQ(2, CmdL.FileSize());
+ EXPECT_EQ(std::string(CmdL.FileList[0]), "remove");
+ EXPECT_EQ(std::string(CmdL.FileList[1]), "foo");
+ }
+ {
+ char const * argv[] = {"apt-get", "-t", "unstable", "remove", "--", "-d", "foo" };
+ char const * com = CommandLine::GetCommand(Cmds, sizeof(argv)/sizeof(argv[0]), argv);
+ EXPECT_STREQ("remove", com);
+ std::vector<CommandLine::Args> Args = getCommandArgs("apt-get", com);
+ ::Configuration c;
+ CommandLine CmdL(Args.data(), &c);
+ ASSERT_TRUE(CmdL.Parse(sizeof(argv)/sizeof(argv[0]), argv));
+ EXPECT_EQ(c.Find("APT::Default-Release"), "unstable");
+ EXPECT_FALSE(c.FindB("APT::Get::Download-Only"));
+ ASSERT_EQ(3, CmdL.FileSize());
+ EXPECT_EQ(std::string(CmdL.FileList[0]), "remove");
+ EXPECT_EQ(std::string(CmdL.FileList[1]), "-d");
+ EXPECT_EQ(std::string(CmdL.FileList[2]), "foo");
+ }
+ {
+ char const * argv[] = {"apt-get", "-t", "unstable", "--", "remove", "-d", "foo" };
+ char const * com = CommandLine::GetCommand(Cmds, sizeof(argv)/sizeof(argv[0]), argv);
+ EXPECT_STREQ("remove", com);
+ std::vector<CommandLine::Args> Args = getCommandArgs("apt-get", com);
+ ::Configuration c;
+ CommandLine CmdL(Args.data(), &c);
+ ASSERT_TRUE(CmdL.Parse(sizeof(argv)/sizeof(argv[0]), argv));
+ EXPECT_EQ(c.Find("APT::Default-Release"), "unstable");
+ EXPECT_FALSE(c.FindB("APT::Get::Download-Only"));
+ ASSERT_EQ(CmdL.FileSize(), 3);
+ EXPECT_EQ(std::string(CmdL.FileList[0]), "remove");
+ EXPECT_EQ(std::string(CmdL.FileList[1]), "-d");
+ EXPECT_EQ(std::string(CmdL.FileList[2]), "foo");
+ }
+ {
+ char const * argv[] = {"apt-get", "install", "-t", "unstable", "--", "remove", "-d", "foo" };
+ char const * com = CommandLine::GetCommand(Cmds, sizeof(argv)/sizeof(argv[0]), argv);
+ EXPECT_STREQ("install", com);
+ std::vector<CommandLine::Args> Args = getCommandArgs("apt-get", com);
+ ::Configuration c;
+ CommandLine CmdL(Args.data(), &c);
+ ASSERT_TRUE(CmdL.Parse(sizeof(argv)/sizeof(argv[0]), argv));
+ EXPECT_EQ(c.Find("APT::Default-Release"), "unstable");
+ EXPECT_FALSE(c.FindB("APT::Get::Download-Only"));
+ ASSERT_EQ(CmdL.FileSize(), 4);
+ EXPECT_EQ(std::string(CmdL.FileList[0]), "install");
+ EXPECT_EQ(std::string(CmdL.FileList[1]), "remove");
+ EXPECT_EQ(std::string(CmdL.FileList[2]), "-d");
+ EXPECT_EQ(std::string(CmdL.FileList[3]), "foo");
+ }
+}
diff --git a/test/libapt/fileutl_test.cc b/test/libapt/fileutl_test.cc
index 643c02297..cdf7ea479 100644
--- a/test/libapt/fileutl_test.cc
+++ b/test/libapt/fileutl_test.cc
@@ -224,3 +224,61 @@ TEST(FileUtlTest, GetTempDir)
if (old_tmpdir.empty() == false)
setenv("TMPDIR", old_tmpdir.c_str(), 1);
}
+TEST(FileUtlTest, Popen)
+{
+ FileFd Fd;
+ pid_t Child;
+ char buf[1024];
+ std::string s;
+ unsigned long long n = 0;
+ std::vector<std::string> OpenFds;
+
+ // count Fds to ensure we don't have a resource leak
+ if(FileExists("/proc/self/fd"))
+ OpenFds = Glob("/proc/self/fd/*");
+
+ // output something
+ const char* Args[10] = {"/bin/echo", "meepmeep", NULL};
+ bool res = Popen(Args, Fd, Child, FileFd::ReadOnly);
+ Fd.Read(buf, sizeof(buf)-1, &n);
+ buf[n] = 0;
+ EXPECT_NE(n, 0);
+ EXPECT_EQ(res, true);
+ EXPECT_STREQ(buf, "meepmeep\n");
+
+ // wait for the child to exit and cleanup
+ ExecWait(Child, "PopenRead");
+ Fd.Close();
+
+ // ensure that after a close all is good again
+ if(FileExists("/proc/self/fd"))
+ EXPECT_EQ(Glob("/proc/self/fd/*").size(), OpenFds.size());
+
+
+ // ReadWrite is not supported
+ res = Popen(Args, Fd, Child, FileFd::ReadWrite);
+ EXPECT_EQ(res, false);
+ _error->Discard();
+
+ // write something
+ Args[0] = "/bin/bash";
+ Args[1] = "-c";
+ Args[2] = "read";
+ Args[3] = NULL;
+ res = Popen(Args, Fd, Child, FileFd::WriteOnly);
+ s = "\n";
+ Fd.Write(s.c_str(), s.size());
+ Fd.Close();
+ ExecWait(Child, "PopenWrite");
+}
+TEST(FileUtlTest, flAbsPath)
+{
+ std::string cwd = SafeGetCWD();
+ int res = chdir("/bin/");
+ EXPECT_EQ(res, 0);
+ std::string p = flAbsPath("ls");
+ EXPECT_EQ(p, "/bin/ls");
+
+ res = chdir(cwd.c_str());
+ EXPECT_EQ(res, 0);
+}
diff --git a/test/libapt/hashsums_test.cc b/test/libapt/hashsums_test.cc
index c06d85e03..2159996ff 100644
--- a/test/libapt/hashsums_test.cc
+++ b/test/libapt/hashsums_test.cc
@@ -1,5 +1,6 @@
#include <config.h>
+#include <apt-pkg/configuration.h>
#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
#include <apt-pkg/sha2.h>
@@ -166,20 +167,26 @@ TEST(HashSumsTest, FileBased)
{
Hashes hashes;
hashes.AddFD(fd.Fd());
- EXPECT_EQ(md5.Value(), hashes.MD5.Result().Value());
- EXPECT_EQ(sha1.Value(), hashes.SHA1.Result().Value());
- EXPECT_EQ(sha256.Value(), hashes.SHA256.Result().Value());
- EXPECT_EQ(sha512.Value(), hashes.SHA512.Result().Value());
+ HashStringList list = hashes.GetHashStringList();
+ EXPECT_FALSE(list.empty());
+ EXPECT_EQ(4, list.size());
+ EXPECT_EQ(md5.Value(), list.find("MD5Sum")->HashValue());
+ EXPECT_EQ(sha1.Value(), list.find("SHA1")->HashValue());
+ EXPECT_EQ(sha256.Value(), list.find("SHA256")->HashValue());
+ EXPECT_EQ(sha512.Value(), list.find("SHA512")->HashValue());
}
unsigned long sz = fd.FileSize();
fd.Seek(0);
{
Hashes hashes;
hashes.AddFD(fd.Fd(), sz);
- EXPECT_EQ(md5.Value(), hashes.MD5.Result().Value());
- EXPECT_EQ(sha1.Value(), hashes.SHA1.Result().Value());
- EXPECT_EQ(sha256.Value(), hashes.SHA256.Result().Value());
- EXPECT_EQ(sha512.Value(), hashes.SHA512.Result().Value());
+ HashStringList list = hashes.GetHashStringList();
+ EXPECT_FALSE(list.empty());
+ EXPECT_EQ(4, list.size());
+ EXPECT_EQ(md5.Value(), list.find("MD5Sum")->HashValue());
+ EXPECT_EQ(sha1.Value(), list.find("SHA1")->HashValue());
+ EXPECT_EQ(sha256.Value(), list.find("SHA256")->HashValue());
+ EXPECT_EQ(sha512.Value(), list.find("SHA512")->HashValue());
}
fd.Seek(0);
{
@@ -207,16 +214,118 @@ TEST(HashSumsTest, FileBased)
}
fd.Close();
- {
- HashString sha2("SHA256", sha256.Value());
- EXPECT_TRUE(sha2.VerifyFile(__FILE__));
- }
- {
- HashString sha2("SHA512", sha512.Value());
- EXPECT_TRUE(sha2.VerifyFile(__FILE__));
- }
- {
- HashString sha2("SHA256:" + sha256.Value());
- EXPECT_TRUE(sha2.VerifyFile(__FILE__));
- }
+ HashString sha2file("SHA512", sha512.Value());
+ EXPECT_TRUE(sha2file.VerifyFile(__FILE__));
+ HashString sha2wrong("SHA512", "00000000000");
+ EXPECT_FALSE(sha2wrong.VerifyFile(__FILE__));
+ EXPECT_EQ(sha2file, sha2file);
+ EXPECT_TRUE(sha2file == sha2file);
+ EXPECT_NE(sha2file, sha2wrong);
+ EXPECT_TRUE(sha2file != sha2wrong);
+
+ HashString sha2big("SHA256", sha256.Value());
+ EXPECT_TRUE(sha2big.VerifyFile(__FILE__));
+ HashString sha2small("sha256:" + sha256.Value());
+ EXPECT_TRUE(sha2small.VerifyFile(__FILE__));
+ EXPECT_EQ(sha2big, sha2small);
+ EXPECT_TRUE(sha2big == sha2small);
+ EXPECT_FALSE(sha2big != sha2small);
+
+ HashStringList hashes;
+ EXPECT_TRUE(hashes.empty());
+ EXPECT_TRUE(hashes.push_back(sha2file));
+ EXPECT_FALSE(hashes.empty());
+ EXPECT_EQ(1, hashes.size());
+
+ HashStringList wrong;
+ EXPECT_TRUE(wrong.push_back(sha2wrong));
+ EXPECT_NE(wrong, hashes);
+ EXPECT_FALSE(wrong == hashes);
+ EXPECT_TRUE(wrong != hashes);
+
+ HashStringList similar;
+ EXPECT_TRUE(similar.push_back(sha2big));
+ EXPECT_NE(similar, hashes);
+ EXPECT_FALSE(similar == hashes);
+ EXPECT_TRUE(similar != hashes);
+
+ EXPECT_TRUE(hashes.push_back(sha2big));
+ EXPECT_EQ(2, hashes.size());
+ EXPECT_TRUE(hashes.push_back(sha2small));
+ EXPECT_EQ(2, hashes.size());
+ EXPECT_FALSE(hashes.push_back(sha2wrong));
+ EXPECT_EQ(2, hashes.size());
+ EXPECT_TRUE(hashes.VerifyFile(__FILE__));
+
+ EXPECT_EQ(similar, hashes);
+ EXPECT_TRUE(similar == hashes);
+ EXPECT_FALSE(similar != hashes);
+ similar.clear();
+ EXPECT_TRUE(similar.empty());
+ EXPECT_EQ(0, similar.size());
+ EXPECT_NE(similar, hashes);
+ EXPECT_FALSE(similar == hashes);
+ EXPECT_TRUE(similar != hashes);
+}
+TEST(HashSumsTest, HashStringList)
+{
+ _config->Clear("Acquire::ForceHash");
+
+ HashStringList list;
+ EXPECT_TRUE(list.empty());
+ EXPECT_FALSE(list.usable());
+ EXPECT_EQ(0, list.size());
+ EXPECT_EQ(NULL, list.find(NULL));
+ EXPECT_EQ(NULL, list.find(""));
+ EXPECT_EQ(NULL, list.find("MD5Sum"));
+
+ HashStringList list2;
+ EXPECT_FALSE(list == list2);
+ EXPECT_TRUE(list != list2);
+
+ Hashes hashes;
+ hashes.Add("The quick brown fox jumps over the lazy dog");
+ list = hashes.GetHashStringList();
+ EXPECT_FALSE(list.empty());
+ EXPECT_TRUE(list.usable());
+ EXPECT_EQ(4, list.size());
+ EXPECT_TRUE(NULL != list.find(NULL));
+ EXPECT_TRUE(NULL != list.find(""));
+ EXPECT_TRUE(NULL != list.find("MD5Sum"));
+ EXPECT_TRUE(NULL == list.find("ROT26"));
+
+ _config->Set("Acquire::ForceHash", "MD5Sum");
+ EXPECT_FALSE(list.empty());
+ EXPECT_TRUE(list.usable());
+ EXPECT_EQ(4, list.size());
+ EXPECT_TRUE(NULL != list.find(NULL));
+ EXPECT_TRUE(NULL != list.find(""));
+ EXPECT_TRUE(NULL != list.find("MD5Sum"));
+ EXPECT_TRUE(NULL == list.find("ROT26"));
+
+ _config->Set("Acquire::ForceHash", "ROT26");
+ EXPECT_FALSE(list.empty());
+ EXPECT_FALSE(list.usable());
+ EXPECT_EQ(4, list.size());
+ EXPECT_TRUE(NULL == list.find(NULL));
+ EXPECT_TRUE(NULL == list.find(""));
+ EXPECT_TRUE(NULL != list.find("MD5Sum"));
+ EXPECT_TRUE(NULL == list.find("ROT26"));
+
+ _config->Clear("Acquire::ForceHash");
+
+ list2.push_back(*list.find("MD5Sum"));
+ EXPECT_TRUE(list == list2);
+ EXPECT_FALSE(list != list2);
+
+ // introduce a mismatch to the list
+ list2.push_back(HashString("SHA1", "cacecbd74968bc90ea3342767e6b94f46ddbcafc"));
+ EXPECT_FALSE(list == list2);
+ EXPECT_TRUE(list != list2);
+
+ _config->Set("Acquire::ForceHash", "MD5Sum");
+ EXPECT_TRUE(list == list2);
+ EXPECT_FALSE(list != list2);
+
+ _config->Clear("Acquire::ForceHash");
}
diff --git a/test/libapt/makefile b/test/libapt/makefile
index 69a13fd92..7f23ace46 100644
--- a/test/libapt/makefile
+++ b/test/libapt/makefile
@@ -14,8 +14,8 @@ test: $(BIN)/gtest$(BASENAME)
$(BIN)/gtest$(BASENAME): $(LIB)/gtest.a
PROGRAM = gtest${BASENAME}
-SLIBS = -lapt-pkg -pthread $(LIB)/gtest.a
-LIB_MAKES = apt-pkg/makefile
+SLIBS = -lapt-pkg -lapt-private -pthread $(LIB)/gtest.a
+LIB_MAKES = apt-pkg/makefile apt-private/makefile
SOURCE = gtest_runner.cc $(wildcard *-helpers.cc *_test.cc)
include $(PROGRAM_H)
diff --git a/test/libapt/strutil_test.cc b/test/libapt/strutil_test.cc
index 194c9c074..8dd9114ec 100644
--- a/test/libapt/strutil_test.cc
+++ b/test/libapt/strutil_test.cc
@@ -85,6 +85,15 @@ TEST(StrUtilTest,EndsWith)
EXPECT_FALSE(Endswith("abcd", "x"));
EXPECT_FALSE(Endswith("abcd", "abcndefg"));
}
+TEST(StrUtilTest,StartWith)
+{
+ using APT::String::Startswith;
+ EXPECT_TRUE(Startswith("abcd", "a"));
+ EXPECT_TRUE(Startswith("abcd", "ab"));
+ EXPECT_TRUE(Startswith("abcd", "abcd"));
+ EXPECT_FALSE(Startswith("abcd", "x"));
+ EXPECT_FALSE(Startswith("abcd", "abcndefg"));
+}
TEST(StrUtilTest,SubstVar)
{
EXPECT_EQ("", SubstVar("", "fails", "passes"));
diff --git a/test/libapt/tagfile_test.cc b/test/libapt/tagfile_test.cc
index 1bac75b55..df618ea16 100644
--- a/test/libapt/tagfile_test.cc
+++ b/test/libapt/tagfile_test.cc
@@ -7,6 +7,7 @@
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
+#include <sstream>
#include <gtest/gtest.h>
@@ -34,3 +35,181 @@ TEST(TagFileTest,SingleField)
// There is only one section in this tag file
EXPECT_FALSE(tfile.Step(section));
}
+
+TEST(TagFileTest,MultipleSections)
+{
+ FileFd fd;
+ createTemporaryFile("bigsection", fd, NULL, "Package: pkgA\n"
+ "Version: 1\n"
+ "Size: 100\n"
+ "Description: aaa\n"
+ " aaa\n"
+ "\n"
+ "Package: pkgB\n"
+ "Version: 1\n"
+ "Flag: no\n"
+ "Description: bbb\n"
+ "\n"
+ "Package: pkgC\n"
+ "Version: 2\n"
+ "Flag: yes\n"
+ "Description:\n"
+ " ccc\n"
+ );
+
+ pkgTagFile tfile(&fd);
+ pkgTagSection section;
+ EXPECT_FALSE(section.Exists("Version"));
+
+ EXPECT_TRUE(tfile.Step(section));
+ EXPECT_EQ(4, section.Count());
+ EXPECT_TRUE(section.Exists("Version"));
+ EXPECT_TRUE(section.Exists("Package"));
+ EXPECT_TRUE(section.Exists("Size"));
+ EXPECT_FALSE(section.Exists("Flag"));
+ EXPECT_TRUE(section.Exists("Description"));
+ EXPECT_EQ("pkgA", section.FindS("Package"));
+ EXPECT_EQ("1", section.FindS("Version"));
+ EXPECT_EQ(1, section.FindULL("Version"));
+ EXPECT_EQ(100, section.FindULL("Size"));
+ unsigned long Flags = 1;
+ EXPECT_TRUE(section.FindFlag("Flag", Flags, 1));
+ EXPECT_EQ(1, Flags);
+ Flags = 0;
+ EXPECT_TRUE(section.FindFlag("Flag", Flags, 1));
+ EXPECT_EQ(0, Flags);
+ EXPECT_EQ("aaa\n aaa", section.FindS("Description"));
+
+
+ EXPECT_TRUE(tfile.Step(section));
+ EXPECT_EQ(4, section.Count());
+ EXPECT_TRUE(section.Exists("Version"));
+ EXPECT_TRUE(section.Exists("Package"));
+ EXPECT_FALSE(section.Exists("Size"));
+ EXPECT_TRUE(section.Exists("Flag"));
+ EXPECT_TRUE(section.Exists("Description"));
+ EXPECT_EQ("pkgB", section.FindS("Package"));
+ EXPECT_EQ("1", section.FindS("Version"));
+ EXPECT_EQ(1, section.FindULL("Version"));
+ EXPECT_EQ(0, section.FindULL("Size"));
+ Flags = 1;
+ EXPECT_TRUE(section.FindFlag("Flag", Flags, 1));
+ EXPECT_EQ(0, Flags);
+ Flags = 0;
+ EXPECT_TRUE(section.FindFlag("Flag", Flags, 1));
+ EXPECT_EQ(0, Flags);
+ EXPECT_EQ("bbb", section.FindS("Description"));
+
+ EXPECT_TRUE(tfile.Step(section));
+ EXPECT_EQ(4, section.Count());
+ EXPECT_TRUE(section.Exists("Version"));
+ EXPECT_TRUE(section.Exists("Package"));
+ EXPECT_FALSE(section.Exists("Size"));
+ EXPECT_TRUE(section.Exists("Flag"));
+ EXPECT_TRUE(section.Exists("Description"));
+ EXPECT_EQ("pkgC", section.FindS("Package"));
+ EXPECT_EQ("2", section.FindS("Version"));
+ EXPECT_EQ(2, section.FindULL("Version"));
+ Flags = 0;
+ EXPECT_TRUE(section.FindFlag("Flag", Flags, 1));
+ EXPECT_EQ(1, Flags);
+ Flags = 1;
+ EXPECT_TRUE(section.FindFlag("Flag", Flags, 1));
+ EXPECT_EQ(1, Flags);
+ EXPECT_EQ("ccc", section.FindS("Description"));
+
+ // There is no section left in this tag file
+ EXPECT_FALSE(tfile.Step(section));
+}
+
+TEST(TagFileTest,BigSection)
+{
+ size_t const count = 500;
+ std::stringstream content;
+ for (size_t i = 0; i < count; ++i)
+ content << "Field-" << i << ": " << (2000 + i) << std::endl;
+
+ FileFd fd;
+ createTemporaryFile("bigsection", fd, NULL, content.str().c_str());
+
+ pkgTagFile tfile(&fd);
+ pkgTagSection section;
+ EXPECT_TRUE(tfile.Step(section));
+
+ EXPECT_EQ(count, section.Count());
+ for (size_t i = 0; i < count; ++i)
+ {
+ std::stringstream name;
+ name << "Field-" << i;
+ EXPECT_TRUE(section.Exists(name.str().c_str())) << name.str() << " does not exist";
+ EXPECT_EQ((2000 + i), section.FindULL(name.str().c_str()));
+ }
+
+ // There is only one section in this tag file
+ EXPECT_FALSE(tfile.Step(section));
+}
+
+TEST(TagFileTest, PickedUpFromPreviousCall)
+{
+ size_t const count = 500;
+ std::stringstream contentstream;
+ for (size_t i = 0; i < count; ++i)
+ contentstream << "Field-" << i << ": " << (2000 + i) << std::endl;
+ contentstream << std::endl << std::endl;
+ std::string content = contentstream.str();
+
+ pkgTagSection section;
+ EXPECT_FALSE(section.Scan(content.c_str(), content.size()/2));
+ EXPECT_NE(0, section.Count());
+ EXPECT_NE(count, section.Count());
+ EXPECT_TRUE(section.Scan(content.c_str(), content.size(), false));
+ EXPECT_EQ(count, section.Count());
+
+ for (size_t i = 0; i < count; ++i)
+ {
+ std::stringstream name;
+ name << "Field-" << i;
+ EXPECT_TRUE(section.Exists(name.str().c_str())) << name.str() << " does not exist";
+ EXPECT_EQ((2000 + i), section.FindULL(name.str().c_str()));
+ }
+}
+
+TEST(TagFileTest, SpacesEverywhere)
+{
+ std::string content =
+ "Package: pkgA\n"
+ "Package: pkgB\n"
+ "NoSpaces:yes\n"
+ "TagSpaces\t :yes\n"
+ "ValueSpaces: \tyes\n"
+ "BothSpaces \t:\t yes\n"
+ "TrailingSpaces: yes\t \n"
+ "Naming Space: yes\n"
+ "Naming Spaces: yes\n"
+ "Package : pkgC \n"
+ "Multi-Colon::yes:\n"
+ "\n\n";
+
+ pkgTagSection section;
+ EXPECT_TRUE(section.Scan(content.c_str(), content.size()));
+ EXPECT_TRUE(section.Exists("Package"));
+ EXPECT_TRUE(section.Exists("NoSpaces"));
+ EXPECT_TRUE(section.Exists("TagSpaces"));
+ EXPECT_TRUE(section.Exists("ValueSpaces"));
+ EXPECT_TRUE(section.Exists("BothSpaces"));
+ EXPECT_TRUE(section.Exists("TrailingSpaces"));
+ EXPECT_TRUE(section.Exists("Naming Space"));
+ EXPECT_TRUE(section.Exists("Naming Spaces"));
+ EXPECT_TRUE(section.Exists("Multi-Colon"));
+ EXPECT_EQ("pkgC", section.FindS("Package"));
+ EXPECT_EQ("yes", section.FindS("NoSpaces"));
+ EXPECT_EQ("yes", section.FindS("TagSpaces"));
+ EXPECT_EQ("yes", section.FindS("ValueSpaces"));
+ EXPECT_EQ("yes", section.FindS("BothSpaces"));
+ EXPECT_EQ("yes", section.FindS("TrailingSpaces"));
+ EXPECT_EQ("yes", section.FindS("Naming Space"));
+ EXPECT_EQ("yes", section.FindS("Naming Spaces"));
+ EXPECT_EQ(":yes:", section.FindS("Multi-Colon"));
+ // overridden values are still present, but not really accessible
+ EXPECT_EQ(11, section.Count());
+}