summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--AUTHORS6
-rw-r--r--Makefile15
-rw-r--r--apt-inst/contrib/arfile.cc154
-rw-r--r--apt-inst/contrib/arfile.h68
-rw-r--r--apt-inst/contrib/extracttar.cc342
-rw-r--r--apt-inst/contrib/extracttar.h54
-rw-r--r--apt-inst/database.cc30
-rw-r--r--apt-inst/database.h56
-rw-r--r--apt-inst/deb/debfile.cc262
-rw-r--r--apt-inst/deb/debfile.h92
-rw-r--r--apt-inst/deb/dpkgdb.cc490
-rw-r--r--apt-inst/deb/dpkgdb.h53
-rw-r--r--apt-inst/dirstream.cc103
-rw-r--r--apt-inst/dirstream.h61
-rw-r--r--apt-inst/dpkg-diffs.txt5
-rw-r--r--apt-inst/extract.cc509
-rw-r--r--apt-inst/extract.h52
-rw-r--r--apt-inst/filelist.cc588
-rw-r--r--apt-inst/filelist.h314
-rw-r--r--apt-inst/makefile30
-rw-r--r--apt-pkg/acquire-item.cc142
-rw-r--r--apt-pkg/acquire-item.h20
-rw-r--r--apt-pkg/acquire-method.cc10
-rw-r--r--apt-pkg/acquire-method.h6
-rw-r--r--apt-pkg/acquire-worker.cc16
-rw-r--r--apt-pkg/acquire-worker.h6
-rw-r--r--apt-pkg/acquire.cc26
-rw-r--r--apt-pkg/acquire.h25
-rw-r--r--apt-pkg/algorithms.cc223
-rw-r--r--apt-pkg/algorithms.h25
-rw-r--r--apt-pkg/cachefile.cc90
-rw-r--r--apt-pkg/cachefile.h31
-rw-r--r--apt-pkg/cacheiterators.h50
-rw-r--r--apt-pkg/clean.cc28
-rw-r--r--apt-pkg/contrib/cdromutl.cc14
-rw-r--r--apt-pkg/contrib/cmndline.cc30
-rw-r--r--apt-pkg/contrib/configuration.cc406
-rw-r--r--apt-pkg/contrib/configuration.h57
-rw-r--r--apt-pkg/contrib/error.cc4
-rw-r--r--apt-pkg/contrib/error.h22
-rw-r--r--apt-pkg/contrib/fileutl.cc107
-rw-r--r--apt-pkg/contrib/fileutl.h7
-rw-r--r--apt-pkg/contrib/md5.h4
-rw-r--r--apt-pkg/contrib/mmap.cc23
-rw-r--r--apt-pkg/contrib/progress.cc14
-rw-r--r--apt-pkg/contrib/sptr.h66
-rw-r--r--apt-pkg/contrib/strutl.cc163
-rw-r--r--apt-pkg/contrib/strutl.h38
-rw-r--r--apt-pkg/deb/debindexfile.cc506
-rw-r--r--apt-pkg/deb/debindexfile.h112
-rw-r--r--apt-pkg/deb/deblistparser.cc221
-rw-r--r--apt-pkg/deb/deblistparser.h36
-rw-r--r--apt-pkg/deb/debrecords.cc23
-rw-r--r--apt-pkg/deb/debrecords.h12
-rw-r--r--apt-pkg/deb/debsrcrecords.cc97
-rw-r--r--apt-pkg/deb/debsrcrecords.h24
-rw-r--r--apt-pkg/deb/debsystem.cc197
-rw-r--r--apt-pkg/deb/debsystem.h41
-rw-r--r--apt-pkg/deb/debversion.cc266
-rw-r--r--apt-pkg/deb/debversion.h72
-rw-r--r--apt-pkg/deb/dpkginit.cc119
-rw-r--r--apt-pkg/deb/dpkginit.h34
-rw-r--r--apt-pkg/deb/dpkgpm.cc162
-rw-r--r--apt-pkg/deb/dpkgpm.h8
-rw-r--r--apt-pkg/depcache.cc255
-rw-r--r--apt-pkg/depcache.h72
-rw-r--r--apt-pkg/indexfile.cc77
-rw-r--r--apt-pkg/indexfile.h80
-rw-r--r--apt-pkg/init.cc105
-rw-r--r--apt-pkg/init.h28
-rw-r--r--apt-pkg/makefile43
-rw-r--r--apt-pkg/orderlist.cc119
-rw-r--r--apt-pkg/orderlist.h21
-rw-r--r--apt-pkg/packagemanager.cc56
-rw-r--r--apt-pkg/packagemanager.h25
-rw-r--r--apt-pkg/pkgcache.cc245
-rw-r--r--apt-pkg/pkgcache.h64
-rw-r--r--apt-pkg/pkgcachegen.cc688
-rw-r--r--apt-pkg/pkgcachegen.h41
-rw-r--r--apt-pkg/pkgrecords.cc45
-rw-r--r--apt-pkg/pkgrecords.h27
-rw-r--r--apt-pkg/pkgsystem.cc45
-rw-r--r--apt-pkg/pkgsystem.h95
-rw-r--r--apt-pkg/policy.cc275
-rw-r--r--apt-pkg/policy.h83
-rw-r--r--apt-pkg/sourcelist.cc424
-rw-r--r--apt-pkg/sourcelist.h61
-rw-r--r--apt-pkg/srcrecords.cc68
-rw-r--r--apt-pkg/srcrecords.h47
-rw-r--r--apt-pkg/tagfile.cc282
-rw-r--r--apt-pkg/tagfile.h49
-rw-r--r--apt-pkg/version.cc261
-rw-r--r--apt-pkg/version.h69
-rw-r--r--apt-pkg/versionmatch.cc210
-rw-r--r--apt-pkg/versionmatch.h69
-rw-r--r--buildlib/apti18n.h.in1
-rw-r--r--buildlib/archtable54
-rwxr-xr-xbuildlib/config.guess439
-rw-r--r--buildlib/config.h.in16
-rwxr-xr-xbuildlib/config.sub200
-rw-r--r--buildlib/configure.mak2
-rw-r--r--buildlib/copy.mak2
-rw-r--r--buildlib/debiandoc.mak4
-rw-r--r--buildlib/defaults.mak14
-rw-r--r--buildlib/environment.mak.in25
-rw-r--r--buildlib/fail.mak20
-rw-r--r--buildlib/library.mak3
-rw-r--r--buildlib/makefile.in20
-rw-r--r--buildlib/manpage.mak2
-rw-r--r--buildlib/ostable19
-rw-r--r--buildlib/program.mak3
-rw-r--r--buildlib/python.mak68
-rw-r--r--buildlib/sgml_manpage.mak49
-rw-r--r--buildlib/sizetable22
-rw-r--r--buildlib/staticlibrary.mak3
-rw-r--r--cmdline/acqprogress.cc32
-rw-r--r--cmdline/apt-cache.cc601
-rw-r--r--cmdline/apt-cdrom.cc118
-rw-r--r--cmdline/apt-config.cc66
-rw-r--r--cmdline/apt-get.cc908
-rw-r--r--cmdline/apt-sortpkgs.cc201
-rw-r--r--cmdline/indexcopy.cc90
-rw-r--r--cmdline/indexcopy.h11
-rw-r--r--cmdline/makefile7
-rw-r--r--configure.in122
-rw-r--r--debian/apt-utils.dirs2
-rw-r--r--debian/changelog100
-rw-r--r--debian/control14
-rw-r--r--debian/dhelp8
-rw-r--r--debian/dirs2
-rw-r--r--debian/libapt-pkg-doc.dhelp22
-rwxr-xr-xdebian/postrm4
-rwxr-xr-xdebian/preinst28
-rwxr-xr-xdebian/prerm11
-rw-r--r--debian/python-apt.dirs1
-rwxr-xr-xdebian/rules111
-rw-r--r--doc/.cvsignore12
-rw-r--r--doc/Bugs2
-rw-r--r--doc/apt-cache.8.sgml365
-rw-r--r--doc/apt-cache.8.yo280
-rw-r--r--doc/apt-cdrom.8.sgml146
-rw-r--r--doc/apt-cdrom.8.yo120
-rw-r--r--doc/apt-config.8.sgml105
-rw-r--r--doc/apt-config.8.yo86
-rw-r--r--doc/apt-ftparchive.1.sgml507
-rw-r--r--doc/apt-get.8.sgml451
-rw-r--r--doc/apt-get.8.yo302
-rw-r--r--doc/apt-sortpkgs.1.sgml73
-rw-r--r--doc/apt.conf.5.sgml407
-rw-r--r--doc/apt.conf.5.yo282
-rw-r--r--doc/apt.ent159
-rw-r--r--doc/apt_preferences.5.sgml227
-rw-r--r--doc/cache.sgml39
-rw-r--r--doc/examples/configure-index20
-rw-r--r--doc/examples/ftp-archive.conf81
-rw-r--r--doc/examples/sources.list2
-rw-r--r--doc/files.sgml12
-rw-r--r--doc/guide.it.sgml585
-rw-r--r--doc/guide.sgml24
-rw-r--r--doc/libapt-pkg2_to_3.txt89
-rw-r--r--doc/makefile14
-rw-r--r--doc/offline.sgml22
-rw-r--r--doc/sources.list.5.sgml199
-rw-r--r--doc/sources.list.5.yo148
-rw-r--r--doc/style.txt75
-rwxr-xr-xdselect/install38
-rwxr-xr-xdselect/update17
-rw-r--r--ftparchive/apt-ftparchive.cc919
-rw-r--r--ftparchive/apt-ftparchive.h28
-rw-r--r--ftparchive/cachedb.cc284
-rw-r--r--ftparchive/cachedb.h119
-rw-r--r--ftparchive/contents.cc401
-rw-r--r--ftparchive/contents.h89
-rw-r--r--ftparchive/makefile20
-rw-r--r--ftparchive/multicompress.cc494
-rw-r--r--ftparchive/multicompress.h80
-rw-r--r--ftparchive/override.cc180
-rw-r--r--ftparchive/override.h50
-rw-r--r--ftparchive/writer.cc756
-rw-r--r--ftparchive/writer.h145
-rw-r--r--methods/connect.cc57
-rw-r--r--methods/connect.h3
-rw-r--r--methods/ftp.cc306
-rw-r--r--methods/ftp.h18
-rw-r--r--methods/http.cc24
-rw-r--r--methods/http.h5
-rw-r--r--methods/makefile16
-rw-r--r--methods/rfc2553emu.cc51
-rw-r--r--methods/rsh.cc486
-rw-r--r--methods/rsh.h69
-rw-r--r--test/conf.cc35
-rw-r--r--test/extract-control.cc40
-rw-r--r--test/makefile26
-rw-r--r--test/scratch.cc1
-rw-r--r--test/testdeb.cc39
-rw-r--r--test/testextract.cc96
-rw-r--r--test/versiontest.cc5
197 files changed, 19417 insertions, 4635 deletions
diff --git a/AUTHORS b/AUTHORS
index 6b8e074c9..05358e3dd 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -6,16 +6,20 @@ CVS:jgg Jason Gunthorpe <jgg@debian.org>
CVS:che Ben Gertzfield <che@debian.org>
- Packaging and Releases
+CVS:bod Brendan O'Dea <bod@debian.org>
+- Perl Bindings
+
Past Contributures:
Brian White <bcwhite@verisim.com> - Project originator
Tom Lees <tom@lpsg.demon.co.uk> - DPKG documentation and ideas
Behan Webster <behanw@verisim.com> - Original GUI design
Scott Ellis <storm@gate.net> - Original packaging and beta releases
-Branden Branden Robinson <branden@purdue.edu> - Man Page Documentation
+Branden Robinson <branden@purdue.edu> - Man Page Documentation
Manoj Srivastava <srivasta@datasync.com> - 1st Generation FTP method and
dselect setup script
Adam Heath <doogie@debian.org> - 2nd Generation FTP method author
+Ben Collins <bcollins@debian.org> - Initial RSH method
Many other bug reports through the Debian Bug system
NOTE: The ChangeLog generator will parse for names and email addresses. The
diff --git a/Makefile b/Makefile
index 88a451c93..2a88c1bb1 100644
--- a/Makefile
+++ b/Makefile
@@ -6,17 +6,17 @@ ifndef NOISY
.SILENT:
endif
+.PHONY: default
+default: startup all
+
.PHONY: headers library clean veryclean all binary program doc
-all headers library clean veryclean binary program doc:
+all headers library clean veryclean binary program doc dirs:
$(MAKE) -C apt-pkg $@
+ $(MAKE) -C apt-inst $@
$(MAKE) -C methods $@
-# $(MAKE) -C methods/ftp $@
$(MAKE) -C cmdline $@
+ $(MAKE) -C ftparchive $@
$(MAKE) -C dselect $@
-ifdef GUI
- $(MAKE) -C deity $@
- $(MAKE) -C gui $@
-endif
$(MAKE) -C doc $@
# Some very common aliases
@@ -25,8 +25,9 @@ maintainer-clean dist-clean distclean pristine sanity: veryclean
# The startup target builds the necessary configure scripts. It should
# be used after a CVS checkout.
-CONVERTED=environment.mak include/config.h makefile
+CONVERTED=environment.mak include/config.h include/apti18n.h makefile
include buildlib/configure.mak
$(BUILDDIR)/include/config.h: buildlib/config.h.in
+$(BUILDDIR)/include/apti18n.h: buildlib/apti18n.h.in
$(BUILDDIR)/environment.mak: buildlib/environment.mak.in
$(BUILDDIR)/makefile: buildlib/makefile.in
diff --git a/apt-inst/contrib/arfile.cc b/apt-inst/contrib/arfile.cc
new file mode 100644
index 000000000..c2964b7a9
--- /dev/null
+++ b/apt-inst/contrib/arfile.cc
@@ -0,0 +1,154 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: arfile.cc,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ AR File - Handle an 'AR' archive
+
+ AR Archives have plain text headers at the start of each file
+ section. The headers are aligned on a 2 byte boundry.
+
+ Information about the structure of AR files can be found in ar(5)
+ on a BSD system, or in the binutils source.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/arfile.h"
+#endif
+#include <apt-pkg/arfile.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/error.h>
+
+#include <stdlib.h>
+ /*}}}*/
+
+struct ARArchive::MemberHeader
+{
+ char Name[16];
+ char MTime[12];
+ char UID[6];
+ char GID[6];
+ char Mode[8];
+ char Size[10];
+ char Magic[2];
+};
+
+// ARArchive::ARArchive - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+ARArchive::ARArchive(FileFd &File) : List(0), File(File)
+{
+ LoadHeaders();
+}
+ /*}}}*/
+// ARArchive::~ARArchive - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+ARArchive::~ARArchive()
+{
+ while (List != 0)
+ {
+ Member *Tmp = List;
+ List = List->Next;
+ delete Tmp;
+ }
+}
+ /*}}}*/
+// ARArchive::LoadHeaders - Load the headers from each file /*{{{*/
+// ---------------------------------------------------------------------
+/* AR files are structured with a 8 byte magic string followed by a 60
+ byte plain text header then the file data, another header, data, etc */
+bool ARArchive::LoadHeaders()
+{
+ signed long Left = File.Size();
+
+ // Check the magic byte
+ char Magic[8];
+ if (File.Read(Magic,sizeof(Magic)) == false)
+ return false;
+ if (memcmp(Magic,"!<arch>\012",sizeof(Magic)) != 0)
+ return _error->Error("Invalid archive signature");
+ Left -= sizeof(Magic);
+
+ // Read the member list
+ while (Left > 0)
+ {
+ MemberHeader Head;
+ if (File.Read(&Head,sizeof(Head)) == false)
+ return _error->Error("Error reading archive member header");
+ Left -= sizeof(Head);
+
+ // Convert all of the integer members
+ Member *Memb = new Member();
+ if (StrToNum(Head.MTime,Memb->MTime,sizeof(Head.MTime)) == false ||
+ StrToNum(Head.UID,Memb->UID,sizeof(Head.UID)) == false ||
+ StrToNum(Head.GID,Memb->GID,sizeof(Head.GID)) == false ||
+ StrToNum(Head.Mode,Memb->Mode,sizeof(Head.Mode),8) == false ||
+ StrToNum(Head.Size,Memb->Size,sizeof(Head.Size)) == false)
+ {
+ delete Memb;
+ return _error->Error("Invalid archive member header");
+ }
+
+ // Check for an extra long name string
+ if (memcmp(Head.Name,"#1/",3) == 0)
+ {
+ char S[300];
+ unsigned long Len;
+ if (StrToNum(Head.Name+3,Len,sizeof(Head.Size)-3) == false ||
+ Len >= strlen(S))
+ {
+ delete Memb;
+ return _error->Error("Invalid archive member header");
+ }
+ if (File.Read(S,Len) == false)
+ return false;
+ S[Len] = 0;
+ Memb->Name = S;
+ Memb->Size -= Len;
+ Left -= Len;
+ }
+ else
+ {
+ unsigned int I = sizeof(Head.Name) - 1;
+ for (; Head.Name[I] == ' '; I--);
+ Memb->Name = string(Head.Name,0,I+1);
+ }
+
+ // Account for the AR header alignment
+ unsigned Skip = Memb->Size % 2;
+
+ // Add it to the list
+ Memb->Next = List;
+ List = Memb;
+ Memb->Start = File.Tell();
+ if (File.Skip(Memb->Size + Skip) == false)
+ return false;
+ if (Left < (signed)(Memb->Size + Skip))
+ return _error->Error("Archive is too short");
+ Left -= Memb->Size + Skip;
+ }
+ if (Left != 0)
+ return _error->Error("Failed to read the archive headers");
+
+ return true;
+}
+ /*}}}*/
+// ARArchive::FindMember - Find a name in the member list /*{{{*/
+// ---------------------------------------------------------------------
+/* Find a member with the given name */
+const ARArchive::Member *ARArchive::FindMember(const char *Name) const
+{
+ const Member *Res = List;
+ while (Res != 0)
+ {
+ if (Res->Name == Name)
+ return Res;
+ Res = Res->Next;
+ }
+
+ return 0;
+}
+ /*}}}*/
diff --git a/apt-inst/contrib/arfile.h b/apt-inst/contrib/arfile.h
new file mode 100644
index 000000000..6c54d3e6b
--- /dev/null
+++ b/apt-inst/contrib/arfile.h
@@ -0,0 +1,68 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: arfile.h,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ AR File - Handle an 'AR' archive
+
+ This is a reader for the usual 4.4 BSD AR format. It allows raw
+ stream access to a single member at a time. Basically all this class
+ provides is header parsing and verification. It is up to the client
+ to correctly make use of the stream start/stop points.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_ARFILE_H
+#define PKGLIB_ARFILE_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/arfile.h"
+#endif
+
+#include <string>
+#include <apt-pkg/fileutl.h>
+
+class ARArchive
+{
+ struct MemberHeader;
+ public:
+ struct Member;
+
+ protected:
+
+ // Linked list of members
+ Member *List;
+
+ bool LoadHeaders();
+
+ public:
+
+ // The stream file
+ FileFd &File;
+
+ // Locate a member by name
+ const Member *FindMember(const char *Name) const;
+
+ ARArchive(FileFd &File);
+ ~ARArchive();
+};
+
+// A member of the archive
+struct ARArchive::Member
+{
+ // Fields from the header
+ string Name;
+ unsigned long MTime;
+ unsigned long UID;
+ unsigned long GID;
+ unsigned long Mode;
+ unsigned long Size;
+
+ // Location of the data.
+ unsigned long Start;
+ Member *Next;
+
+ Member() : Start(0), Next(0) {};
+};
+
+#endif
diff --git a/apt-inst/contrib/extracttar.cc b/apt-inst/contrib/extracttar.cc
new file mode 100644
index 000000000..57e083b5a
--- /dev/null
+++ b/apt-inst/contrib/extracttar.cc
@@ -0,0 +1,342 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: extracttar.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Extract a Tar - Tar Extractor
+
+ Some performance measurements showed that zlib performed quite poorly
+ in comparision to a forked gzip process. This tar extractor makes use
+ of the fact that dup'd file descriptors have the same seek pointer
+ and that gzip will not read past the end of a compressed stream,
+ even if there is more data. We use the dup property to track extraction
+ progress and the gzip feature to just feed gzip a fd in the middle
+ of an AR file.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/extracttar.h"
+#endif
+#include <apt-pkg/extracttar.h>
+
+#include <apt-pkg/error.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/configuration.h>
+#include <system.h>
+
+#include <stdlib.h>
+#include <unistd.h>
+#include <signal.h>
+#include <fcntl.h>
+ /*}}}*/
+
+// The on disk header for a tar file.
+struct ExtractTar::TarHeader
+{
+ char Name[100];
+ char Mode[8];
+ char UserID[8];
+ char GroupID[8];
+ char Size[12];
+ char MTime[12];
+ char Checksum[8];
+ char LinkFlag;
+ char LinkName[100];
+ char MagicNumber[8];
+ char UserName[32];
+ char GroupName[32];
+ char Major[8];
+ char Minor[8];
+};
+
+// ExtractTar::ExtractTar - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+ExtractTar::ExtractTar(FileFd &Fd,unsigned long Max) : File(Fd),
+ MaxInSize(Max)
+
+{
+ GZPid = -1;
+ InFd = -1;
+ Eof = false;
+}
+ /*}}}*/
+// ExtractTar::ExtractTar - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+ExtractTar::~ExtractTar()
+{
+ Done(false);
+}
+ /*}}}*/
+// ExtractTar::Done - Reap the gzip sub process /*{{{*/
+// ---------------------------------------------------------------------
+/* If the force flag is given then error messages are suppressed - this
+ means we hit the end of the tar file but there was still gzip data. */
+bool ExtractTar::Done(bool Force)
+{
+ InFd.Close();
+ if (GZPid <= 0)
+ return true;
+
+ /* If there is a pending error then we are cleaning up gzip and are
+ not interested in it's failures */
+ if (_error->PendingError() == true)
+ Force = true;
+
+ // Make sure we clean it up!
+ kill(GZPid,SIGINT);
+ if (ExecWait(GZPid,_config->Find("dir::bin::gzip","/bin/gzip").c_str(),
+ Force) == false)
+ {
+ GZPid = -1;
+ return Force;
+ }
+
+ GZPid = -1;
+ return true;
+}
+ /*}}}*/
+// ExtractTar::StartGzip - Startup gzip /*{{{*/
+// ---------------------------------------------------------------------
+/* This creates a gzip sub process that has its input as the file itself.
+ If this tar file is embedded into something like an ar file then
+ gzip will efficiently ignore the extra bits. */
+bool ExtractTar::StartGzip()
+{
+ int Pipes[2];
+ if (pipe(Pipes) != 0)
+ return _error->Errno("pipe","Failed to create pipes");
+
+ // Fork off the process
+ GZPid = ExecFork();
+
+ // Spawn the subprocess
+ if (GZPid == 0)
+ {
+ // Setup the FDs
+ dup2(Pipes[1],STDOUT_FILENO);
+ dup2(File.Fd(),STDIN_FILENO);
+ int Fd = open("/dev/null",O_RDWR);
+ if (Fd == -1)
+ _exit(101);
+ dup2(Fd,STDERR_FILENO);
+ close(Fd);
+ SetCloseExec(STDOUT_FILENO,false);
+ SetCloseExec(STDIN_FILENO,false);
+ SetCloseExec(STDERR_FILENO,false);
+
+ const char *Args[3];
+ Args[0] = _config->Find("dir::bin::gzip","/bin/gzip").c_str();
+ Args[1] = "-d";
+ Args[2] = 0;
+ execv(Args[0],(char **)Args);
+ cerr << "Failed to exec gzip " << Args[0] << endl;
+ _exit(100);
+ }
+
+ // Fix up our FDs
+ InFd.Fd(Pipes[0]);
+ close(Pipes[1]);
+ return true;
+}
+ /*}}}*/
+// ExtractTar::Go - Perform extraction /*{{{*/
+// ---------------------------------------------------------------------
+/* This reads each 512 byte block from the archive and extracts the header
+ information into the Item structure. Then it resolves the UID/GID and
+ invokes the correct processing function. */
+bool ExtractTar::Go(pkgDirStream &Stream)
+{
+ if (StartGzip() == false)
+ return false;
+
+ // Loop over all blocks
+ string LastLongLink;
+ string LastLongName;
+ while (1)
+ {
+ bool BadRecord = false;
+ unsigned char Block[512];
+ if (InFd.Read(Block,sizeof(Block),true) == false)
+ return false;
+
+ if (InFd.Eof() == true)
+ break;
+
+ // Get the checksum
+ TarHeader *Tar = (TarHeader *)Block;
+ unsigned long CheckSum;
+ if (StrToNum(Tar->Checksum,CheckSum,sizeof(Tar->Checksum),8) == false)
+ return _error->Error("Corrupted archive");
+
+ /* Compute the checksum field. The actual checksum is blanked out
+ with spaces so it is not included in the computation */
+ unsigned long NewSum = 0;
+ memset(Tar->Checksum,' ',sizeof(Tar->Checksum));
+ for (int I = 0; I != sizeof(Block); I++)
+ NewSum += Block[I];
+
+ /* Check for a block of nulls - in this case we kill gzip, GNU tar
+ does this.. */
+ if (NewSum == ' '*sizeof(Tar->Checksum))
+ return Done(true);
+
+ if (NewSum != CheckSum)
+ return _error->Error("Tar Checksum failed, archive corrupted");
+
+ // Decode all of the fields
+ pkgDirStream::Item Itm;
+ unsigned long UID;
+ unsigned long GID;
+ if (StrToNum(Tar->Mode,Itm.Mode,sizeof(Tar->Mode),8) == false ||
+ StrToNum(Tar->UserID,UID,sizeof(Tar->UserID),8) == false ||
+ StrToNum(Tar->GroupID,GID,sizeof(Tar->GroupID),8) == false ||
+ StrToNum(Tar->Size,Itm.Size,sizeof(Tar->Size),8) == false ||
+ StrToNum(Tar->MTime,Itm.MTime,sizeof(Tar->MTime),8) == false ||
+ StrToNum(Tar->Major,Itm.Major,sizeof(Tar->Major),8) == false ||
+ StrToNum(Tar->Minor,Itm.Minor,sizeof(Tar->Minor),8) == false)
+ return _error->Error("Corrupted archive");
+
+ // Grab the filename
+ if (LastLongName.empty() == false)
+ Itm.Name = (char *)LastLongName.c_str();
+ else
+ {
+ Tar->Name[sizeof(Tar->Name)] = 0;
+ Itm.Name = Tar->Name;
+ }
+ if (Itm.Name[0] == '.' && Itm.Name[1] == '/' && Itm.Name[2] != 0)
+ Itm.Name += 2;
+
+ // Grab the link target
+ Tar->Name[sizeof(Tar->LinkName)] = 0;
+ Itm.LinkTarget = Tar->LinkName;
+
+ if (LastLongLink.empty() == false)
+ Itm.LinkTarget = (char *)LastLongLink.c_str();
+
+ // Convert the type over
+ switch (Tar->LinkFlag)
+ {
+ case NormalFile0:
+ case NormalFile:
+ Itm.Type = pkgDirStream::Item::File;
+ break;
+
+ case HardLink:
+ Itm.Type = pkgDirStream::Item::HardLink;
+ break;
+
+ case SymbolicLink:
+ Itm.Type = pkgDirStream::Item::SymbolicLink;
+ break;
+
+ case CharacterDevice:
+ Itm.Type = pkgDirStream::Item::CharDevice;
+ break;
+
+ case BlockDevice:
+ Itm.Type = pkgDirStream::Item::BlockDevice;
+ break;
+
+ case Directory:
+ Itm.Type = pkgDirStream::Item::Directory;
+ break;
+
+ case FIFO:
+ Itm.Type = pkgDirStream::Item::FIFO;
+ break;
+
+ case GNU_LongLink:
+ {
+ unsigned long Length = Itm.Size;
+ unsigned char Block[512];
+ while (Length > 0)
+ {
+ if (InFd.Read(Block,sizeof(Block),true) == false)
+ return false;
+ if (Length <= sizeof(Block))
+ {
+ LastLongLink.append(Block,Block+sizeof(Block));
+ break;
+ }
+ LastLongLink.append(Block,Block+sizeof(Block));
+ Length -= sizeof(Block);
+ }
+ continue;
+ }
+
+ case GNU_LongName:
+ {
+ unsigned long Length = Itm.Size;
+ unsigned char Block[512];
+ while (Length > 0)
+ {
+ if (InFd.Read(Block,sizeof(Block),true) == false)
+ return false;
+ if (Length < sizeof(Block))
+ {
+ LastLongName.append(Block,Block+sizeof(Block));
+ break;
+ }
+ LastLongName.append(Block,Block+sizeof(Block));
+ Length -= sizeof(Block);
+ }
+ continue;
+ }
+
+ default:
+ BadRecord = true;
+ _error->Warning("Unkown TAR header type %u, member %s",(unsigned)Tar->LinkFlag,Tar->Name);
+ break;
+ }
+
+ int Fd = -1;
+ if (BadRecord == false)
+ if (Stream.DoItem(Itm,Fd) == false)
+ return false;
+
+ // Copy the file over the FD
+ unsigned long Size = Itm.Size;
+ while (Size != 0)
+ {
+ unsigned char Junk[32*1024];
+ unsigned long Read = MIN(Size,sizeof(Junk));
+ if (InFd.Read(Junk,((Read+511)/512)*512) == false)
+ return false;
+
+ if (BadRecord == false)
+ {
+ if (Fd > 0)
+ {
+ if (write(Fd,Junk,Read) != (signed)Read)
+ return Stream.Fail(Itm,Fd);
+ }
+ else
+ {
+ /* An Fd of -2 means to send to a special processing
+ function */
+ if (Fd == -2)
+ if (Stream.Process(Itm,Junk,Read,Itm.Size - Size) == false)
+ return Stream.Fail(Itm,Fd);
+ }
+ }
+
+ Size -= Read;
+ }
+
+ // And finish up
+ if (Itm.Size != 0 && BadRecord == false)
+ if (Stream.FinishedFile(Itm,Fd) == false)
+ return false;
+
+ LastLongName.erase();
+ LastLongLink.erase();
+ }
+
+ return Done(false);
+}
+ /*}}}*/
diff --git a/apt-inst/contrib/extracttar.h b/apt-inst/contrib/extracttar.h
new file mode 100644
index 000000000..aaca987f2
--- /dev/null
+++ b/apt-inst/contrib/extracttar.h
@@ -0,0 +1,54 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: extracttar.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Extract a Tar - Tar Extractor
+
+ The tar extractor takes an ordinary gzip compressed tar stream from
+ the given file and explodes it, passing the individual items to the
+ given Directory Stream for processing.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_EXTRACTTAR_H
+#define PKGLIB_EXTRACTTAR_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/extracttar.h"
+#endif
+
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/dirstream.h>
+
+class ExtractTar
+{
+ protected:
+
+ struct TarHeader;
+
+ // The varios types items can be
+ enum ItemType {NormalFile0 = '\0',NormalFile = '0',HardLink = '1',
+ SymbolicLink = '2',CharacterDevice = '3',
+ BlockDevice = '4',Directory = '5',FIFO = '6',
+ GNU_LongLink = 'K',GNU_LongName = 'L'};
+
+ FileFd &File;
+ unsigned long MaxInSize;
+ int GZPid;
+ FileFd InFd;
+ bool Eof;
+
+ // Fork and reap gzip
+ bool StartGzip();
+ bool Done(bool Force);
+
+ public:
+
+ bool Go(pkgDirStream &Stream);
+
+ ExtractTar(FileFd &Fd,unsigned long Max);
+ virtual ~ExtractTar();
+};
+
+#endif
diff --git a/apt-inst/database.cc b/apt-inst/database.cc
new file mode 100644
index 000000000..beee692b8
--- /dev/null
+++ b/apt-inst/database.cc
@@ -0,0 +1,30 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: database.cc,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ Data Base Abstraction
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/database.h"
+#endif
+
+#include <apt-pkg/database.h>
+ /*}}}*/
+
+// DataBase::GetMetaTmp - Get the temp dir /*{{{*/
+// ---------------------------------------------------------------------
+/* This re-initializes the meta temporary directory if it hasn't yet
+ been inited for this cycle. The flag is the emptyness of MetaDir */
+bool pkgDataBase::GetMetaTmp(string &Dir)
+{
+ if (MetaDir.empty() == true)
+ if (InitMetaTmp(MetaDir) == false)
+ return false;
+ Dir = MetaDir;
+ return true;
+}
+ /*}}}*/
diff --git a/apt-inst/database.h b/apt-inst/database.h
new file mode 100644
index 000000000..0972d40cd
--- /dev/null
+++ b/apt-inst/database.h
@@ -0,0 +1,56 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: database.h,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ Data Base Abstraction
+
+ This class provides a simple interface to an abstract notion of a
+ database directory for storing state information about the system.
+
+ The 'Meta' information for a package is the control information and
+ setup scripts stored inside the archive. GetMetaTmp returns the name of
+ a directory that is used to store named files containing the control
+ information.
+
+ The File Listing is the database of installed files. It is loaded
+ into the memory/persistent cache structure by the ReadFileList method.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_DATABASE_H
+#define PKGLIB_DATABASE_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/database.h"
+#endif
+
+#include <apt-pkg/filelist.h>
+#include <apt-pkg/pkgcachegen.h>
+
+class pkgDataBase
+{
+ protected:
+
+ pkgCacheGenerator *Cache;
+ pkgFLCache *FList;
+ string MetaDir;
+ virtual bool InitMetaTmp(string &Dir) = 0;
+
+ public:
+
+ // Some manipulators for the cache and generator
+ inline pkgCache &GetCache() {return Cache->GetCache();};
+ inline pkgFLCache &GetFLCache() {return *FList;};
+ inline pkgCacheGenerator &GetGenerator() {return *Cache;};
+
+ bool GetMetaTmp(string &Dir);
+ virtual bool ReadyFileList(OpProgress &Progress) = 0;
+ virtual bool ReadyPkgCache(OpProgress &Progress) = 0;
+ virtual bool LoadChanges() = 0;
+
+ pkgDataBase() : Cache(0), FList(0) {};
+ virtual ~pkgDataBase() {delete Cache; delete FList;};
+};
+
+#endif
diff --git a/apt-inst/deb/debfile.cc b/apt-inst/deb/debfile.cc
new file mode 100644
index 000000000..c93ba88a8
--- /dev/null
+++ b/apt-inst/deb/debfile.cc
@@ -0,0 +1,262 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: debfile.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Debian Archive File (.deb)
+
+ .DEB archives are AR files containing two tars and an empty marker
+ member called 'debian-binary'. The two tars contain the meta data and
+ the actual archive contents. Thus this class is a very simple wrapper
+ around ar/tar to simply extract the right tar files.
+
+ It also uses the deb package list parser to parse the control file
+ into the cache.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/debfile.h"
+#endif
+
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/extracttar.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/deblistparser.h>
+
+#include <sys/stat.h>
+#include <unistd.h>
+ /*}}}*/
+
+// DebFile::debDebFile - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Open the AR file and check for consistency */
+debDebFile::debDebFile(FileFd &File) : File(File), AR(File)
+{
+ if (_error->PendingError() == true)
+ return;
+
+ // Check the members for validity
+ if (CheckMember("debian-binary") == false ||
+ CheckMember("control.tar.gz") == false ||
+ CheckMember("data.tar.gz") == false)
+ return;
+}
+ /*}}}*/
+// DebFile::CheckMember - Check if a named member is in the archive /*{{{*/
+// ---------------------------------------------------------------------
+/* This is used to check for a correct deb and to give nicer error messages
+ for people playing around. */
+bool debDebFile::CheckMember(const char *Name)
+{
+ if (AR.FindMember(Name) == 0)
+ return _error->Error("This is not a valid DEB archive, missing '%s' member",Name);
+ return true;
+}
+ /*}}}*/
+// DebFile::GotoMember - Jump to a Member /*{{{*/
+// ---------------------------------------------------------------------
+/* Jump in the file to the start of a named member and return the information
+ about that member. The caller can then read from the file up to the
+ returned size. Note, since this relies on the file position this is
+ a destructive operation, it also changes the last returned Member
+ structure - so don't nest them! */
+const ARArchive::Member *debDebFile::GotoMember(const char *Name)
+{
+ // Get the archive member and positition the file
+ const ARArchive::Member *Member = AR.FindMember(Name);
+ if (Member == 0)
+ {
+ _error->Error("Internal Error, could not locate member %s",Name);
+ return 0;
+ }
+ if (File.Seek(Member->Start) == false)
+ return 0;
+
+ return Member;
+}
+ /*}}}*/
+// DebFile::ExtractControl - Extract Control information /*{{{*/
+// ---------------------------------------------------------------------
+/* Extract the control information into the Database's temporary
+ directory. */
+bool debDebFile::ExtractControl(pkgDataBase &DB)
+{
+ // Get the archive member and positition the file
+ const ARArchive::Member *Member = GotoMember("control.tar.gz");
+ if (Member == 0)
+ return false;
+
+ // Prepare Tar
+ ControlExtract Extract;
+ ExtractTar Tar(File,Member->Size);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Get into the temporary directory
+ string Cwd = SafeGetCWD();
+ string Tmp;
+ if (DB.GetMetaTmp(Tmp) == false)
+ return false;
+ if (chdir(Tmp.c_str()) != 0)
+ return _error->Errno("chdir","Couldn't change to %s",Tmp.c_str());
+
+ // Do extraction
+ if (Tar.Go(Extract) == false)
+ return false;
+
+ // Switch out of the tmp directory.
+ if (chdir(Cwd.c_str()) != 0)
+ chdir("/");
+
+ return true;
+}
+ /*}}}*/
+// DebFile::ExtractArchive - Extract the archive data itself /*{{{*/
+// ---------------------------------------------------------------------
+/* Simple wrapper around tar.. */
+bool debDebFile::ExtractArchive(pkgDirStream &Stream)
+{
+ // Get the archive member and positition the file
+ const ARArchive::Member *Member = AR.FindMember("data.tar.gz");
+ if (Member == 0)
+ return _error->Error("Internal Error, could not locate member");
+ if (File.Seek(Member->Start) == false)
+ return false;
+
+ // Prepare Tar
+ ExtractTar Tar(File,Member->Size);
+ if (_error->PendingError() == true)
+ return false;
+ return Tar.Go(Stream);
+}
+ /*}}}*/
+// DebFile::MergeControl - Merge the control information /*{{{*/
+// ---------------------------------------------------------------------
+/* This reads the extracted control file into the cache and returns the
+ version that was parsed. All this really does is select the correct
+ parser and correct file to parse. */
+pkgCache::VerIterator debDebFile::MergeControl(pkgDataBase &DB)
+{
+ // Open the control file
+ string Tmp;
+ if (DB.GetMetaTmp(Tmp) == false)
+ return pkgCache::VerIterator(DB.GetCache());
+ FileFd Fd(Tmp + "control",FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return pkgCache::VerIterator(DB.GetCache());
+
+ // Parse it
+ debListParser Parse(&Fd);
+ pkgCache::VerIterator Ver(DB.GetCache());
+ if (DB.GetGenerator().MergeList(Parse,&Ver) == false)
+ return pkgCache::VerIterator(DB.GetCache());
+
+ if (Ver.end() == true)
+ _error->Error("Failed to locate a valid control file");
+ return Ver;
+}
+ /*}}}*/
+
+// DebFile::ControlExtract::DoItem - Control Tar Extraction /*{{{*/
+// ---------------------------------------------------------------------
+/* This directory stream handler for the control tar handles extracting
+ it into the temporary meta directory. It only extracts files, it does
+ not create directories, links or anything else. */
+bool debDebFile::ControlExtract::DoItem(Item &Itm,int &Fd)
+{
+ if (Itm.Type != Item::File)
+ return true;
+
+ /* Cleanse the file name, prevent people from trying to unpack into
+ absolute paths, .., etc */
+ for (char *I = Itm.Name; *I != 0; I++)
+ if (*I == '/')
+ *I = '_';
+
+ /* Force the ownership to be root and ensure correct permissions,
+ go-w, the rest are left untouched */
+ Itm.UID = 0;
+ Itm.GID = 0;
+ Itm.Mode &= ~(S_IWGRP | S_IWOTH);
+
+ return pkgDirStream::DoItem(Itm,Fd);
+}
+ /*}}}*/
+
+// MemControlExtract::DoItem - Check if it is the control file /*{{{*/
+// ---------------------------------------------------------------------
+/* This sets up to extract the control block member file into a memory
+ block of just the right size. All other files go into the bit bucket. */
+bool debDebFile::MemControlExtract::DoItem(Item &Itm,int &Fd)
+{
+ // At the control file, allocate buffer memory.
+ if (Member == Itm.Name)
+ {
+ delete [] Control;
+ Control = new char[Itm.Size+2];
+ IsControl = true;
+ Fd = -2; // Signal to pass to Process
+ Length = Itm.Size;
+ }
+ else
+ IsControl = false;
+
+ return true;
+}
+ /*}}}*/
+// MemControlExtract::Process - Process extracting the control file /*{{{*/
+// ---------------------------------------------------------------------
+/* Just memcopy the block from the tar extractor and put it in the right
+ place in the pre-allocated memory block. */
+bool debDebFile::MemControlExtract::Process(Item &Itm,const unsigned char *Data,
+ unsigned long Size,unsigned long Pos)
+{
+ memcpy(Control + Pos, Data,Size);
+ return true;
+}
+ /*}}}*/
+// MemControlExtract::Read - Read the control information from the deb /*{{{*/
+// ---------------------------------------------------------------------
+/* This uses the internal tar extractor to fetch the control file, and then
+ it parses it into a tag section parser. */
+bool debDebFile::MemControlExtract::Read(debDebFile &Deb)
+{
+ // Get the archive member and positition the file
+ const ARArchive::Member *Member = Deb.GotoMember("control.tar.gz");
+ if (Member == 0)
+ return false;
+
+ // Extract it.
+ ExtractTar Tar(Deb.GetFile(),Member->Size);
+ if (Tar.Go(*this) == false)
+ return false;
+
+ if (Control == 0)
+ return true;
+
+ Control[Length] = '\n';
+ Control[Length+1] = '\n';
+ if (Section.Scan(Control,Length+2) == false)
+ return _error->Error("Unparsible control file");
+ return true;
+}
+ /*}}}*/
+// MemControlExtract::TakeControl - Parse a memory block /*{{{*/
+// ---------------------------------------------------------------------
+/* The given memory block is loaded into the parser and parsed as a control
+ record. */
+bool debDebFile::MemControlExtract::TakeControl(const void *Data,unsigned long Size)
+{
+ delete [] Control;
+ Control = new char[Size+2];
+ Length = Size;
+ memcpy(Control,Data,Size);
+
+ Control[Length] = '\n';
+ Control[Length+1] = '\n';
+ return Section.Scan(Control,Length+2);
+}
+ /*}}}*/
+
diff --git a/apt-inst/deb/debfile.h b/apt-inst/deb/debfile.h
new file mode 100644
index 000000000..d89b85268
--- /dev/null
+++ b/apt-inst/deb/debfile.h
@@ -0,0 +1,92 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: debfile.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Debian Archive File (.deb)
+
+ This Class handles all the operations performed directly on .deb
+ files. It makes use of the AR and TAR classes to give the necessary
+ external interface.
+
+ There are only two things that can be done with a raw package,
+ extract it's control information and extract the contents itself.
+
+ This should probably subclass an as-yet unwritten super class to
+ produce a generic archive mechanism.
+
+ The memory control file extractor is useful to extract a single file
+ into memory from the control.tar.gz
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_DEBFILE_H
+#define PKGLIB_DEBFILE_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/debfile.h"
+#endif
+
+#include <apt-pkg/arfile.h>
+#include <apt-pkg/database.h>
+#include <apt-pkg/dirstream.h>
+#include <apt-pkg/tagfile.h>
+
+class debDebFile
+{
+ protected:
+
+ FileFd &File;
+ ARArchive AR;
+
+ bool CheckMember(const char *Name);
+
+ public:
+
+ class ControlExtract;
+ class MemControlExtract;
+
+ bool ExtractControl(pkgDataBase &DB);
+ bool ExtractArchive(pkgDirStream &Stream);
+ pkgCache::VerIterator MergeControl(pkgDataBase &DB);
+ const ARArchive::Member *GotoMember(const char *Name);
+ inline FileFd &GetFile() {return File;};
+
+ debDebFile(FileFd &File);
+};
+
+class debDebFile::ControlExtract : public pkgDirStream
+{
+ public:
+
+ virtual bool DoItem(Item &Itm,int &Fd);
+};
+
+class debDebFile::MemControlExtract : public pkgDirStream
+{
+ bool IsControl;
+
+ public:
+
+ char *Control;
+ pkgTagSection Section;
+ unsigned long Length;
+ string Member;
+
+ // Members from DirStream
+ virtual bool DoItem(Item &Itm,int &Fd);
+ virtual bool Process(Item &Itm,const unsigned char *Data,
+ unsigned long Size,unsigned long Pos);
+
+
+ // Helpers
+ bool Read(debDebFile &Deb);
+ bool TakeControl(const void *Data,unsigned long Size);
+
+ MemControlExtract() : IsControl(false), Control(0), Length(0), Member("control") {};
+ MemControlExtract(string Member) : IsControl(false), Control(0), Length(0), Member(Member) {};
+ ~MemControlExtract() {delete [] Control;};
+};
+ /*}}}*/
+
+#endif
diff --git a/apt-inst/deb/dpkgdb.cc b/apt-inst/deb/dpkgdb.cc
new file mode 100644
index 000000000..85fec1ccd
--- /dev/null
+++ b/apt-inst/deb/dpkgdb.cc
@@ -0,0 +1,490 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: dpkgdb.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ DPKGv1 Database Implemenation
+
+ This class provides parsers and other implementations for the DPKGv1
+ database. It reads the diversion file, the list files and the status
+ file to build both the list of currently installed files and the
+ currently installed package list.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/dpkgdb.h"
+#endif
+
+#include <apt-pkg/dpkgdb.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/progress.h>
+#include <apt-pkg/tagfile.h>
+#include <apt-pkg/strutl.h>
+
+#include <stdio.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <sys/mman.h>
+#include <fcntl.h>
+#include <unistd.h>
+ /*}}}*/
+
+// EraseDir - Erase A Directory /*{{{*/
+// ---------------------------------------------------------------------
+/* This is necessary to create a new empty sub directory. The caller should
+ invoke mkdir after this with the proper permissions and check for
+ error. Maybe stick this in fileutils */
+static bool EraseDir(const char *Dir)
+{
+ // First we try a simple RM
+ if (rmdir(Dir) == 0 ||
+ errno == ENOENT)
+ return true;
+
+ // A file? Easy enough..
+ if (errno == ENOTDIR)
+ {
+ if (unlink(Dir) != 0)
+ return _error->Errno("unlink","Failed to remove %s",Dir);
+ return true;
+ }
+
+ // Should not happen
+ if (errno != ENOTEMPTY)
+ return _error->Errno("rmdir","Failed to remove %s",Dir);
+
+ // Purge it using rm
+ int Pid = ExecFork();
+
+ // Spawn the subprocess
+ if (Pid == 0)
+ {
+ execlp(_config->Find("Dir::Bin::rm","/bin/rm").c_str(),
+ "rm","-rf","--",Dir,0);
+ _exit(100);
+ }
+ return ExecWait(Pid,_config->Find("dir::bin::rm","/bin/rm").c_str());
+}
+ /*}}}*/
+// DpkgDB::debDpkgDB - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+debDpkgDB::debDpkgDB() : CacheMap(0), FileMap(0)
+{
+ AdminDir = flNotFile(_config->Find("Dir::State::status"));
+ DiverInode = 0;
+ DiverTime = 0;
+}
+ /*}}}*/
+// DpkgDB::~debDpkgDB - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+debDpkgDB::~debDpkgDB()
+{
+ delete Cache;
+ Cache = 0;
+ delete CacheMap;
+ CacheMap = 0;
+
+ delete FList;
+ FList = 0;
+ delete FileMap;
+ FileMap = 0;
+}
+ /*}}}*/
+// DpkgDB::InitMetaTmp - Get the temp dir for meta information /*{{{*/
+// ---------------------------------------------------------------------
+/* This creats+empties the meta temporary directory /var/lib/dpkg/tmp.ci
+ Only one package at a time can be using the returned meta directory. */
+bool debDpkgDB::InitMetaTmp(string &Dir)
+{
+ string Tmp = AdminDir + "tmp.ci/";
+ if (EraseDir(Tmp.c_str()) == false)
+ return _error->Error("Unable to create %s",Tmp.c_str());
+ if (mkdir(Tmp.c_str(),0755) != 0)
+ return _error->Errno("mkdir","Unable to create %s",Tmp.c_str());
+
+ // Verify it is on the same filesystem as the main info directory
+ dev_t Dev;
+ struct stat St;
+ if (stat((AdminDir + "info").c_str(),&St) != 0)
+ return _error->Errno("stat","Failed to stat %sinfo",AdminDir.c_str());
+ Dev = St.st_dev;
+ if (stat(Tmp.c_str(),&St) != 0)
+ return _error->Errno("stat","Failed to stat %s",Tmp.c_str());
+ if (Dev != St.st_dev)
+ return _error->Error("The info and temp directories need to be on the same filesystem");
+
+ // Done
+ Dir = Tmp;
+ return true;
+}
+ /*}}}*/
+// DpkgDB::ReadyPkgCache - Prepare the cache with the current status /*{{{*/
+// ---------------------------------------------------------------------
+/* This reads in the status file into an empty cache. This really needs
+ to be somehow unified with the high level APT notion of the Database
+ directory, but there is no clear way on how to do that yet. */
+bool debDpkgDB::ReadyPkgCache(OpProgress &Progress)
+{
+ if (Cache != 0)
+ {
+ Progress.OverallProgress(1,1,1,"Reading Package Lists");
+ return true;
+ }
+
+ if (CacheMap != 0)
+ {
+ delete CacheMap;
+ CacheMap = 0;
+ }
+
+ if (pkgMakeOnlyStatusCache(Progress,&CacheMap) == false)
+ return false;
+ Cache->DropProgress();
+
+ return true;
+}
+ /*}}}*/
+// DpkgDB::ReadFList - Read the File Listings in /*{{{*/
+// ---------------------------------------------------------------------
+/* This reads the file listing in from the state directory. This is a
+ performance critical routine, as it needs to parse about 50k lines of
+ text spread over a hundred or more files. For an initial cold start
+ most of the time is spent in reading file inodes and so on, not
+ actually parsing. */
+bool debDpkgDB::ReadFList(OpProgress &Progress)
+{
+ // Count the number of packages we need to read information for
+ unsigned long Total = 0;
+ pkgCache &Cache = this->Cache->GetCache();
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ {
+ // Only not installed packages have no files.
+ if (I->CurrentState == pkgCache::State::NotInstalled)
+ continue;
+ Total++;
+ }
+
+ /* Switch into the admin dir, this prevents useless lookups for the
+ path components */
+ string Cwd = SafeGetCWD();
+ if (chdir((AdminDir + "info/").c_str()) != 0)
+ return _error->Errno("chdir","Failed to change to the admin dir %sinfo",AdminDir.c_str());
+
+ // Allocate a buffer. Anything larger than this buffer will be mmaped
+ unsigned long BufSize = 32*1024;
+ char *Buffer = new char[BufSize];
+
+ // Begin Loading them
+ unsigned long Count = 0;
+ char Name[300];
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ {
+ /* Only not installed packages have no files. ConfFile packages have
+ file lists but we don't want to read them in */
+ if (I->CurrentState == pkgCache::State::NotInstalled ||
+ I->CurrentState == pkgCache::State::ConfigFiles)
+ continue;
+
+ // Fetch a package handle to associate with the file
+ pkgFLCache::PkgIterator FlPkg = FList->GetPkg(I.Name(),0,true);
+ if (FlPkg.end() == true)
+ {
+ _error->Error("Internal Error getting a Package Name");
+ break;
+ }
+
+ Progress.OverallProgress(Count,Total,1,"Reading File Listing");
+
+ // Open the list file
+ snprintf(Name,sizeof(Name),"%s.list",I.Name());
+ int Fd = open(Name,O_RDONLY);
+
+ /* Okay this is very strange and bad.. Best thing is to bail and
+ instruct the user to look into it. */
+ struct stat Stat;
+ if (Fd == -1 || fstat(Fd,&Stat) != 0)
+ {
+ _error->Errno("open","Failed to open the list file '%sinfo/%s'. If you "
+ "cannot restore this file then make it empty "
+ "and immediately re-install the same version of the package!",
+ AdminDir.c_str(),Name);
+ break;
+ }
+
+ // Set File to be a memory buffer containing the whole file
+ char *File;
+ if ((unsigned)Stat.st_size < BufSize)
+ {
+ if (read(Fd,Buffer,Stat.st_size) != Stat.st_size)
+ {
+ _error->Errno("read","Failed reading the list file %sinfo/%s",
+ AdminDir.c_str(),Name);
+ close(Fd);
+ break;
+ }
+ File = Buffer;
+ }
+ else
+ {
+ // Use mmap
+ File = (char *)mmap(0,Stat.st_size,PROT_READ,MAP_PRIVATE,Fd,0);
+ if (File == (char *)(-1))
+ {
+ _error->Errno("mmap","Failed reading the list file %sinfo/%s",
+ AdminDir.c_str(),Name);
+ close(Fd);
+ break;
+ }
+ }
+
+ // Parse it
+ const char *Start = File;
+ const char *End = File;
+ const char *Finish = File + Stat.st_size;
+ for (; End < Finish; End++)
+ {
+ // Not an end of line
+ if (*End != '\n' && End + 1 < Finish)
+ continue;
+
+ // Skip blank lines
+ if (End - Start > 1)
+ {
+ pkgFLCache::NodeIterator Node = FList->GetNode(Start,End,
+ FlPkg.Offset(),true,false);
+ if (Node.end() == true)
+ {
+ _error->Error("Internal Error getting a Node");
+ break;
+ }
+ }
+
+ // Skip past the end of line
+ for (; *End == '\n' && End < Finish; End++);
+ Start = End;
+ }
+
+ close(Fd);
+ if ((unsigned)Stat.st_size >= BufSize)
+ munmap((caddr_t)File,Stat.st_size);
+
+ // Failed
+ if (End < Finish)
+ break;
+
+ Count++;
+ }
+
+ delete [] Buffer;
+ if (chdir(Cwd.c_str()) != 0)
+ chdir("/");
+
+ return !_error->PendingError();
+}
+ /*}}}*/
+// DpkgDB::ReadDiversions - Load the diversions file /*{{{*/
+// ---------------------------------------------------------------------
+/* Read the diversion file in from disk. This is usually invoked by
+ LoadChanges before performing an operation that uses the FLCache. */
+bool debDpkgDB::ReadDiversions()
+{
+ struct stat Stat;
+ if (stat((AdminDir + "diversions").c_str(),&Stat) != 0)
+ return true;
+
+ if (_error->PendingError() == true)
+ return false;
+
+ FILE *Fd = fopen((AdminDir + "diversions").c_str(),"r");
+ if (Fd == 0)
+ return _error->Errno("fopen","Failed to open the diversions file %sdiversions",AdminDir.c_str());
+
+ FList->BeginDiverLoad();
+ while (1)
+ {
+ char From[300];
+ char To[300];
+ char Package[100];
+
+ // Read the three lines in
+ if (fgets(From,sizeof(From),Fd) == 0)
+ break;
+ if (fgets(To,sizeof(To),Fd) == 0 ||
+ fgets(Package,sizeof(Package),Fd) == 0)
+ {
+ _error->Error("The diversion file is corrupted");
+ break;
+ }
+
+ // Strip the \ns
+ unsigned long Len = strlen(From);
+ if (Len < 2 || From[Len-1] != '\n')
+ _error->Error("Invalid line in the diversion file: %s",From);
+ else
+ From[Len-1] = 0;
+ Len = strlen(To);
+ if (Len < 2 || To[Len-1] != '\n')
+ _error->Error("Invalid line in the diversion file: %s",To);
+ else
+ To[Len-1] = 0;
+ Len = strlen(Package);
+ if (Len < 2 || Package[Len-1] != '\n')
+ _error->Error("Invalid line in the diversion file: %s",Package);
+ else
+ Package[Len-1] = 0;
+
+ // Make sure the lines were parsed OK
+ if (_error->PendingError() == true)
+ break;
+
+ // Fetch a package
+ if (strcmp(Package,":") == 0)
+ Package[0] = 0;
+ pkgFLCache::PkgIterator FlPkg = FList->GetPkg(Package,0,true);
+ if (FlPkg.end() == true)
+ {
+ _error->Error("Internal Error getting a Package Name");
+ break;
+ }
+
+ // Install the diversion
+ if (FList->AddDiversion(FlPkg,From,To) == false)
+ {
+ _error->Error("Internal Error adding a diversion");
+ break;
+ }
+ }
+ if (_error->PendingError() == false)
+ FList->FinishDiverLoad();
+
+ DiverInode = Stat.st_ino;
+ DiverTime = Stat.st_mtime;
+
+ fclose(Fd);
+ return !_error->PendingError();
+}
+ /*}}}*/
+// DpkgDB::ReadFileList - Read the file listing /*{{{*/
+// ---------------------------------------------------------------------
+/* Read in the file listing. The file listing is created from three
+ sources, *.list, Conffile sections and the Diversion table. */
+bool debDpkgDB::ReadyFileList(OpProgress &Progress)
+{
+ if (Cache == 0)
+ return _error->Error("The pkg cache must be initialize first");
+ if (FList != 0)
+ {
+ Progress.OverallProgress(1,1,1,"Reading File List");
+ return true;
+ }
+
+ // Create the cache and read in the file listing
+ FileMap = new DynamicMMap(MMap::Public);
+ FList = new pkgFLCache(*FileMap);
+ if (_error->PendingError() == true ||
+ ReadFList(Progress) == false ||
+ ReadConfFiles() == false ||
+ ReadDiversions() == false)
+ {
+ delete FList;
+ delete FileMap;
+ FileMap = 0;
+ FList = 0;
+ return false;
+ }
+
+ cout << "Node: " << FList->HeaderP->NodeCount << ',' << FList->HeaderP->UniqNodes << endl;
+ cout << "Dir: " << FList->HeaderP->DirCount << endl;
+ cout << "Package: " << FList->HeaderP->PackageCount << endl;
+ cout << "HashSize: " << FList->HeaderP->HashSize << endl;
+ cout << "Size: " << FileMap->Size() << endl;
+ cout << endl;
+
+ return true;
+}
+ /*}}}*/
+// DpkgDB::ReadConfFiles - Read the conf file sections from the s-file /*{{{*/
+// ---------------------------------------------------------------------
+/* Reading the conf files is done by reparsing the status file. This is
+ actually rather fast so it is no big deal. */
+bool debDpkgDB::ReadConfFiles()
+{
+ FileFd File(_config->FindFile("Dir::State::status"),FileFd::ReadOnly);
+ pkgTagFile Tags(&File);
+ if (_error->PendingError() == true)
+ return false;
+
+ pkgTagSection Section;
+ while (1)
+ {
+ // Skip to the next section
+ unsigned long Offset = Tags.Offset();
+ if (Tags.Step(Section) == false)
+ break;
+
+ // Parse the line
+ const char *Start;
+ const char *Stop;
+ if (Section.Find("Conffiles",Start,Stop) == false)
+ continue;
+
+ const char *PkgStart;
+ const char *PkgEnd;
+ if (Section.Find("Package",PkgStart,PkgEnd) == false)
+ return _error->Error("Failed to find a Package: Header, offset %lu",Offset);
+
+ // Snag a package record for it
+ pkgFLCache::PkgIterator FlPkg = FList->GetPkg(PkgStart,PkgEnd,true);
+ if (FlPkg.end() == true)
+ return _error->Error("Internal Error getting a Package Name");
+
+ // Parse the conf file lines
+ while (1)
+ {
+ for (; isspace(*Start) != 0 && Start < Stop; Start++);
+ if (Start == Stop)
+ break;
+
+ // Split it into words
+ const char *End = Start;
+ for (; isspace(*End) == 0 && End < Stop; End++);
+ const char *StartMd5 = End;
+ for (; isspace(*StartMd5) != 0 && StartMd5 < Stop; StartMd5++);
+ const char *EndMd5 = StartMd5;
+ for (; isspace(*EndMd5) == 0 && EndMd5 < Stop; EndMd5++);
+ if (StartMd5 == EndMd5 || Start == End)
+ return _error->Error("Bad ConfFile section in the status file. Offset %lu",Offset);
+
+ // Insert a new entry
+ unsigned char MD5[16];
+ if (Hex2Num(StartMd5,EndMd5,MD5,16) == false)
+ return _error->Error("Error parsing MD5. Offset %lu",Offset);
+
+ if (FList->AddConfFile(Start,End,FlPkg,MD5) == false)
+ return false;
+ Start = EndMd5;
+ }
+ }
+
+ return true;
+}
+ /*}}}*/
+// DpkgDB::LoadChanges - Read in any changed state files /*{{{*/
+// ---------------------------------------------------------------------
+/* The only file in the dpkg system that can change while packages are
+ unpacking is the diversions file. */
+bool debDpkgDB::LoadChanges()
+{
+ struct stat Stat;
+ if (stat((AdminDir + "diversions").c_str(),&Stat) != 0)
+ return true;
+ if (DiverInode == Stat.st_ino && DiverTime == Stat.st_mtime)
+ return true;
+ return ReadDiversions();
+}
+ /*}}}*/
diff --git a/apt-inst/deb/dpkgdb.h b/apt-inst/deb/dpkgdb.h
new file mode 100644
index 000000000..ddbb6d6f2
--- /dev/null
+++ b/apt-inst/deb/dpkgdb.h
@@ -0,0 +1,53 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: dpkgdb.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ DPKGv1 Data Base Implemenation
+
+ The DPKGv1 database is typically stored in /var/lib/dpkg/. For
+ DPKGv1 the 'meta' information is the contents of the .deb control.tar.gz
+ member prepended by the package name. The meta information is unpacked
+ in its temporary directory and then migrated into the main list dir
+ at a checkpoint.
+
+ Journaling is providing by syncronized file writes to the updates sub
+ directory.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_DPKGDB_H
+#define PKGLIB_DPKGDB_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/dpkgdb.h"
+#endif
+
+#include <apt-pkg/database.h>
+
+class debDpkgDB : public pkgDataBase
+{
+ protected:
+
+ string AdminDir;
+ DynamicMMap *CacheMap;
+ DynamicMMap *FileMap;
+ unsigned long DiverInode;
+ signed long DiverTime;
+
+ virtual bool InitMetaTmp(string &Dir);
+ bool ReadFList(OpProgress &Progress);
+ bool ReadDiversions();
+ bool ReadConfFiles();
+
+ public:
+
+ virtual bool ReadyFileList(OpProgress &Progress);
+ virtual bool ReadyPkgCache(OpProgress &Progress);
+ virtual bool LoadChanges();
+
+ debDpkgDB();
+ virtual ~debDpkgDB();
+};
+
+#endif
diff --git a/apt-inst/dirstream.cc b/apt-inst/dirstream.cc
new file mode 100644
index 000000000..41dbf4403
--- /dev/null
+++ b/apt-inst/dirstream.cc
@@ -0,0 +1,103 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: dirstream.cc,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ Directory Stream
+
+ This class provides a simple basic extractor that can be used for
+ a number of purposes.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/dirstream.h"
+#endif
+
+#include <apt-pkg/dirstream.h>
+#include <apt-pkg/error.h>
+
+#include <fcntl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <errno.h>
+#include <utime.h>
+#include <unistd.h>
+ /*}}}*/
+
+// DirStream::DoItem - Process an item /*{{{*/
+// ---------------------------------------------------------------------
+/* This is a very simple extractor, it does not deal with things like
+ overwriting directories with files and so on. */
+bool pkgDirStream::DoItem(Item &Itm,int &Fd)
+{
+ switch (Itm.Type)
+ {
+ case Item::File:
+ {
+ /* Open the output file, NDELAY is used to prevent this from
+ blowing up on device special files.. */
+ int iFd = open(Itm.Name,O_NDELAY|O_WRONLY|O_CREAT|O_TRUNC|O_APPEND,
+ Itm.Mode);
+ if (iFd < 0)
+ return _error->Errno("open","Failed write file %s",
+ Itm.Name);
+
+ // fchmod deals with umask and fchown sets the ownership
+ if (fchmod(iFd,Itm.Mode) != 0)
+ return _error->Errno("fchmod","Failed write file %s",
+ Itm.Name);
+ if (fchown(iFd,Itm.UID,Itm.GID) != 0 && errno != EPERM)
+ return _error->Errno("fchown","Failed write file %s",
+ Itm.Name);
+ Fd = iFd;
+ return true;
+ }
+
+ case Item::HardLink:
+ case Item::SymbolicLink:
+ case Item::CharDevice:
+ case Item::BlockDevice:
+ case Item::Directory:
+ case Item::FIFO:
+ break;
+ }
+
+ return true;
+}
+ /*}}}*/
+// DirStream::FinishedFile - Finished processing a file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgDirStream::FinishedFile(Item &Itm,int Fd)
+{
+ if (Fd < 0)
+ return true;
+
+ if (close(Fd) != 0)
+ return _error->Errno("close","Failed to close file %s",Itm.Name);
+
+ /* Set the modification times. The only way it can fail is if someone
+ has futzed with our file, which is intolerable :> */
+ struct utimbuf Time;
+ Time.actime = Itm.MTime;
+ Time.modtime = Itm.MTime;
+ if (utime(Itm.Name,&Time) != 0)
+ _error->Errno("utime","Failed to close file %s",Itm.Name);
+
+ return true;
+}
+ /*}}}*/
+// DirStream::Fail - Failed processing a file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgDirStream::Fail(Item &Itm,int Fd)
+{
+ if (Fd < 0)
+ return true;
+
+ close(Fd);
+ return false;
+}
+ /*}}}*/
diff --git a/apt-inst/dirstream.h b/apt-inst/dirstream.h
new file mode 100644
index 000000000..dfb480bd4
--- /dev/null
+++ b/apt-inst/dirstream.h
@@ -0,0 +1,61 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: dirstream.h,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ Directory Stream
+
+ When unpacking the contents of the archive are passed into a directory
+ stream class for analysis and processing. The class controls all aspects
+ of actually writing the directory stream from disk. The low level
+ archive handlers are only responsible for decoding the archive format
+ and sending events (via method calls) to the specified directory
+ stream.
+
+ When unpacking a real file the archive handler is passed back a file
+ handle to write the data to, this is to support strange
+ archives+unpacking methods. If that fd is -1 then the file data is
+ simply ignored.
+
+ The provided defaults do the 'Right Thing' for a normal unpacking
+ process (ie 'tar')
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_DIRSTREAM_H
+#define PKGLIB_DIRSTREAM_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/dirstream.h"
+#endif
+
+class pkgDirStream
+{
+ public:
+
+ // All possible information about a component
+ struct Item
+ {
+ enum Type_t {File, HardLink, SymbolicLink, CharDevice, BlockDevice,
+ Directory, FIFO} Type;
+ char *Name;
+ char *LinkTarget;
+ unsigned long Mode;
+ unsigned long UID;
+ unsigned long GID;
+ unsigned long Size;
+ unsigned long MTime;
+ unsigned long Major;
+ unsigned long Minor;
+ };
+
+ virtual bool DoItem(Item &Itm,int &Fd);
+ virtual bool Fail(Item &Itm,int Fd);
+ virtual bool FinishedFile(Item &Itm,int Fd);
+ virtual bool Process(Item &Itm,const unsigned char *Data,
+ unsigned long Size,unsigned long Pos) {return true;};
+
+ virtual ~pkgDirStream() {};
+};
+
+#endif
diff --git a/apt-inst/dpkg-diffs.txt b/apt-inst/dpkg-diffs.txt
new file mode 100644
index 000000000..d161055f7
--- /dev/null
+++ b/apt-inst/dpkg-diffs.txt
@@ -0,0 +1,5 @@
+- Replacing directories with files
+ dpkg permits this with the weak condition that the directory is owned only
+ by the package. APT requires that the directory have no files that are not
+ owned by the package. Replaces are specifically not checked to prevent
+ file list corruption.
diff --git a/apt-inst/extract.cc b/apt-inst/extract.cc
new file mode 100644
index 000000000..a3c06c088
--- /dev/null
+++ b/apt-inst/extract.cc
@@ -0,0 +1,509 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: extract.cc,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ Archive Extraction Directory Stream
+
+ Extraction for each file is a bit of an involved process. Each object
+ undergoes an atomic backup, overwrite, erase sequence. First the
+ object is unpacked to '.dpkg.new' then the original is hardlinked to
+ '.dpkg.tmp' and finally the new object is renamed to overwrite the old
+ one. From an external perspective the file never ceased to exist.
+ After the archive has been sucessfully unpacked the .dpkg.tmp files
+ are erased. A failure causes all the .dpkg.tmp files to be restored.
+
+ Decisions about unpacking go like this:
+ - Store the original filename in the file listing
+ - Resolve any diversions that would effect this file, all checks
+ below apply to the diverted name, not the real one.
+ - Resolve any symlinked configuration files.
+ - If the existing file does not exist then .dpkg-tmp is checked for.
+ [Note, this is reduced to only check if a file was expected to be
+ there]
+ - If the existing link/file is not a directory then it is replaced
+ irregardless
+ - If the existing link/directory is being replaced by a directory then
+ absolutely nothing happens.
+ - If the existing link/directory is being replaced by a link then
+ absolutely nothing happens.
+ - If the existing link/directory is being replaced by a non-directory
+ then this will abort if the package is not the sole owner of the
+ directory. [Note, this is changed to not happen if the directory
+ non-empty - that is, it only includes files that are part of this
+ package - prevents removing user files accidentally.]
+ - If the non-directory exists in the listing database and it
+ does not belong to the current package then an overwrite condition
+ is invoked.
+
+ As we unpack we record the file list differences in the FL cache. If
+ we need to unroll the the FL cache knows which files have been unpacked
+ and can undo. When we need to erase then it knows which files have not
+ been unpacked.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/extract.h"
+#endif
+#include <apt-pkg/extract.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/debversion.h>
+
+#include <sys/stat.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <errno.h>
+#include <dirent.h>
+ /*}}}*/
+
+static const char *TempExt = "dpkg-tmp";
+//static const char *NewExt = "dpkg-new";
+
+// Extract::pkgExtract - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgExtract::pkgExtract(pkgFLCache &FLCache,pkgCache::VerIterator Ver) :
+ FLCache(FLCache), Ver(Ver)
+{
+ FLPkg = FLCache.GetPkg(Ver.ParentPkg().Name(),true);
+ if (FLPkg.end() == true)
+ return;
+ Debug = true;
+}
+ /*}}}*/
+// Extract::DoItem - Handle a single item from the stream /*{{{*/
+// ---------------------------------------------------------------------
+/* This performs the setup for the extraction.. */
+bool pkgExtract::DoItem(Item &Itm,int &Fd)
+{
+ char Temp[sizeof(FileName)];
+
+ /* Strip any leading/trailing /s from the filename, then copy it to the
+ temp buffer and re-apply the leading / We use a class variable
+ to store the new filename for use by the three extraction funcs */
+ char *End = FileName+1;
+ const char *I = Itm.Name;
+ for (; *I != 0 && *I == '/'; I++);
+ *FileName = '/';
+ for (; *I != 0 && End < FileName + sizeof(FileName); I++, End++)
+ *End = *I;
+ if (End + 20 >= FileName + sizeof(FileName))
+ return _error->Error("The path %s is too long",Itm.Name);
+ for (; End > FileName && End[-1] == '/'; End--);
+ *End = 0;
+ Itm.Name = FileName;
+
+ /* Lookup the file. Nde is the file [group] we are going to write to and
+ RealNde is the actual node we are manipulating. Due to diversions
+ they may be entirely different. */
+ pkgFLCache::NodeIterator Nde = FLCache.GetNode(Itm.Name,End,0,false,false);
+ pkgFLCache::NodeIterator RealNde = Nde;
+
+ // See if the file is already in the file listing
+ unsigned long FileGroup = RealNde->File;
+ for (; RealNde.end() == false && FileGroup == RealNde->File; RealNde++)
+ if (RealNde.RealPackage() == FLPkg)
+ break;
+
+ // Nope, create an entry
+ if (RealNde.end() == true)
+ {
+ RealNde = FLCache.GetNode(Itm.Name,End,FLPkg.Offset(),true,false);
+ if (RealNde.end() == true)
+ return false;
+ RealNde->Flags |= pkgFLCache::Node::NewFile;
+ }
+
+ /* Check if this entry already was unpacked. The only time this should
+ ever happen is if someone has hacked tar to support capabilities, in
+ which case this needs to be modified anyhow.. */
+ if ((RealNde->Flags & pkgFLCache::Node::Unpacked) ==
+ pkgFLCache::Node::Unpacked)
+ return _error->Error("Unpacking %s more than once",Itm.Name);
+
+ if (Nde.end() == true)
+ Nde = RealNde;
+
+ /* Consider a diverted file - We are not permitted to divert directories,
+ but everything else is fair game (including conf files!) */
+ if ((Nde->Flags & pkgFLCache::Node::Diversion) != 0)
+ {
+ if (Itm.Type == Item::Directory)
+ return _error->Error("The directory %s is diverted",Itm.Name);
+
+ /* A package overwriting a diversion target is just the same as
+ overwriting a normally owned file and is checked for below in
+ the overwrites mechanism */
+
+ /* If this package is trying to overwrite the target of a diversion,
+ that is never, ever permitted */
+ pkgFLCache::DiverIterator Div = Nde.Diversion();
+ if (Div.DivertTo() == Nde)
+ return _error->Error("The package is trying to write to the "
+ "diversion target %s/%s",Nde.DirN(),Nde.File());
+
+ // See if it is us and we are following it in the right direction
+ if (Div->OwnerPkg != FLPkg.Offset() && Div.DivertFrom() == Nde)
+ {
+ Nde = Div.DivertTo();
+ End = FileName + snprintf(FileName,sizeof(FileName)-20,"%s/%s",
+ Nde.DirN(),Nde.File());
+ if (End <= FileName)
+ return _error->Error("The diversion path is too long");
+ }
+ }
+
+ // Deal with symlinks and conf files
+ if ((RealNde->Flags & pkgFLCache::Node::NewConfFile) ==
+ pkgFLCache::Node::NewConfFile)
+ {
+ string Res = flNoLink(Itm.Name);
+ if (Res.length() > sizeof(FileName))
+ return _error->Error("The path %s is too long",Res.c_str());
+ if (Debug == true)
+ clog << "Followed conf file from " << FileName << " to " << Res << endl;
+ Itm.Name = strcpy(FileName,Res.c_str());
+ }
+
+ /* Get information about the existing file, and attempt to restore
+ a backup if it does not exist */
+ struct stat LExisting;
+ bool EValid = false;
+ if (lstat(Itm.Name,&LExisting) != 0)
+ {
+ // This is bad news.
+ if (errno != ENOENT)
+ return _error->Errno("stat","Failed to stat %s",Itm.Name);
+
+ // See if we can recover the backup file
+ if (Nde.end() == false)
+ {
+ snprintf(Temp,sizeof(Temp),"%s.%s",Itm.Name,TempExt);
+ if (rename(Temp,Itm.Name) != 0 && errno != ENOENT)
+ return _error->Errno("rename","Failed to rename %s to %s",
+ Temp,Itm.Name);
+ if (stat(Itm.Name,&LExisting) != 0)
+ {
+ if (errno != ENOENT)
+ return _error->Errno("stat","Failed to stat %s",Itm.Name);
+ }
+ else
+ EValid = true;
+ }
+ }
+ else
+ EValid = true;
+
+ /* If the file is a link we need to stat its destination, get the
+ existing file modes */
+ struct stat Existing = LExisting;
+ if (EValid == true && S_ISLNK(Existing.st_mode))
+ {
+ if (stat(Itm.Name,&Existing) != 0)
+ {
+ if (errno != ENOENT)
+ return _error->Errno("stat","Failed to stat %s",Itm.Name);
+ Existing = LExisting;
+ }
+ }
+
+ // We pretend a non-existing file looks like it is a normal file
+ if (EValid == false)
+ Existing.st_mode = S_IFREG;
+
+ /* Okay, at this point 'Existing' is the stat information for the
+ real non-link file */
+
+ /* The only way this can be a no-op is if a directory is being
+ replaced by a directory or by a link */
+ if (S_ISDIR(Existing.st_mode) != 0 &&
+ (Itm.Type == Item::Directory || Itm.Type == Item::SymbolicLink))
+ return true;
+
+ /* Non-Directory being replaced by non-directory. We check for over
+ writes here. */
+ if (Nde.end() == false)
+ {
+ if (HandleOverwrites(Nde) == false)
+ return false;
+ }
+
+ /* Directory being replaced by a non-directory - this needs to see if
+ the package is the owner and then see if the directory would be
+ empty after the package is removed [ie no user files will be
+ erased] */
+ if (S_ISDIR(Existing.st_mode) != 0)
+ {
+ if (CheckDirReplace(Itm.Name) == false)
+ return _error->Error("The directory %s is being replaced by a non-directory",Itm.Name);
+ }
+
+ if (Debug == true)
+ clog << "Extract " << string(Itm.Name,End) << endl;
+/* if (Count != 0)
+ return _error->Error("Done");*/
+
+ return true;
+}
+ /*}}}*/
+// Extract::Finished - Sequence finished, erase the temp files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgExtract::Finished()
+{
+ return true;
+}
+ /*}}}*/
+// Extract::Aborted - Sequence aborted, undo all our unpacking /*{{{*/
+// ---------------------------------------------------------------------
+/* This undoes everything that was done by all calls to the DoItem method
+ and restores the File Listing cache to its original form. It bases its
+ actions on the flags value for each node in the cache. */
+bool pkgExtract::Aborted()
+{
+ if (Debug == true)
+ clog << "Aborted, backing out" << endl;
+
+ pkgFLCache::NodeIterator Files = FLPkg.Files();
+ map_ptrloc *Last = &FLPkg->Files;
+
+ /* Loop over all files, restore those that have been unpacked from their
+ dpkg-tmp entires */
+ while (Files.end() == false)
+ {
+ // Locate the hash bucket for the node and locate its group head
+ pkgFLCache::NodeIterator Nde(FLCache,FLCache.HashNode(Files));
+ for (; Nde.end() == false && Files->File != Nde->File; Nde++);
+ if (Nde.end() == true)
+ return _error->Error("Failed to locate node in its hash bucket");
+
+ if (snprintf(FileName,sizeof(FileName)-20,"%s/%s",
+ Nde.DirN(),Nde.File()) <= 0)
+ return _error->Error("The path is too long");
+
+ // Deal with diversions
+ if ((Nde->Flags & pkgFLCache::Node::Diversion) != 0)
+ {
+ pkgFLCache::DiverIterator Div = Nde.Diversion();
+
+ // See if it is us and we are following it in the right direction
+ if (Div->OwnerPkg != FLPkg.Offset() && Div.DivertFrom() == Nde)
+ {
+ Nde = Div.DivertTo();
+ if (snprintf(FileName,sizeof(FileName)-20,"%s/%s",
+ Nde.DirN(),Nde.File()) <= 0)
+ return _error->Error("The diversion path is too long");
+ }
+ }
+
+ // Deal with overwrites+replaces
+ for (; Nde.end() == false && Files->File == Nde->File; Nde++)
+ {
+ if ((Nde->Flags & pkgFLCache::Node::Replaced) ==
+ pkgFLCache::Node::Replaced)
+ {
+ if (Debug == true)
+ clog << "De-replaced " << FileName << " from " << Nde.RealPackage()->Name << endl;
+ Nde->Flags &= ~pkgFLCache::Node::Replaced;
+ }
+ }
+
+ // Undo the change in the filesystem
+ if (Debug == true)
+ clog << "Backing out " << FileName;
+
+ // Remove a new node
+ if ((Files->Flags & pkgFLCache::Node::NewFile) ==
+ pkgFLCache::Node::NewFile)
+ {
+ if (Debug == true)
+ clog << " [new node]" << endl;
+ pkgFLCache::Node *Tmp = Files;
+ Files++;
+ *Last = Tmp->NextPkg;
+ Tmp->NextPkg = 0;
+
+ FLCache.DropNode(Tmp - FLCache.NodeP);
+ }
+ else
+ {
+ if (Debug == true)
+ clog << endl;
+
+ Last = &Files->NextPkg;
+ Files++;
+ }
+ }
+
+ return true;
+}
+ /*}}}*/
+// Extract::Fail - Extraction of a file Failed /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgExtract::Fail(Item &Itm,int Fd)
+{
+ return pkgDirStream::Fail(Itm,Fd);
+}
+ /*}}}*/
+// Extract::FinishedFile - Finished a file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgExtract::FinishedFile(Item &Itm,int Fd)
+{
+ return pkgDirStream::FinishedFile(Itm,Fd);
+}
+ /*}}}*/
+// Extract::HandleOverwrites - See if a replaces covers this overwrite /*{{{*/
+// ---------------------------------------------------------------------
+/* Check if the file is in a package that is being replaced by this
+ package or if the file is being overwritten. Note that if the file
+ is really a directory but it has been erased from the filesystem
+ this will fail with an overwrite message. This is a limitation of the
+ dpkg file information format.
+
+ XX If a new package installs and another package replaces files in this
+ package what should we do? */
+bool pkgExtract::HandleOverwrites(pkgFLCache::NodeIterator Nde,
+ bool DiverCheck)
+{
+ pkgFLCache::NodeIterator TmpNde = Nde;
+ unsigned long DiverOwner = 0;
+ unsigned long FileGroup = Nde->File;
+ const char *FirstOwner = 0;
+ for (; Nde.end() == false && FileGroup == Nde->File; Nde++)
+ {
+ if ((Nde->Flags & pkgFLCache::Node::Diversion) != 0)
+ {
+ /* Store the diversion owner if this is the forward direction
+ of the diversion */
+ if (DiverCheck == true)
+ DiverOwner = Nde.Diversion()->OwnerPkg;
+ continue;
+ }
+
+ pkgFLCache::PkgIterator FPkg(FLCache,Nde.RealPackage());
+ if (FPkg.end() == true || FPkg == FLPkg)
+ continue;
+
+ /* This tests trips when we are checking a diversion to see
+ if something has already been diverted by this diversion */
+ if (FPkg.Offset() == DiverOwner)
+ continue;
+ FirstOwner = FPkg.Name();
+
+ // Now see if this package matches one in a replace depends
+ pkgCache::DepIterator Dep = Ver.DependsList();
+ bool Ok = false;
+ for (; Dep.end() == false; Dep++)
+ {
+ if (Dep->Type != pkgCache::Dep::Replaces)
+ continue;
+
+ // Does the replaces apply to this package?
+ if (strcmp(Dep.TargetPkg().Name(),FPkg.Name()) != 0)
+ continue;
+
+ /* Check the version for match. I do not think CurrentVer can be
+ 0 if we are here.. */
+ pkgCache::PkgIterator Pkg = Dep.TargetPkg();
+ if (Pkg->CurrentVer == 0)
+ {
+ _error->Warning("Overwrite package match with no version for %s",Pkg.Name());
+ continue;
+ }
+
+ // Replaces is met
+ if (debVS.CheckDep(Pkg.CurrentVer().VerStr(),Dep->CompareOp,Dep.TargetVer()) == true)
+ {
+ if (Debug == true)
+ clog << "Replaced file " << Nde.DirN() << '/' << Nde.File() << " from " << Pkg.Name() << endl;
+ Nde->Flags |= pkgFLCache::Node::Replaced;
+ Ok = true;
+ break;
+ }
+ }
+
+ // Negative Hit
+ if (Ok == false)
+ return _error->Error("File %s/%s overwrites the one in the package %s",
+ Nde.DirN(),Nde.File(),FPkg.Name());
+ }
+
+ /* If this is a diversion we might have to recurse to process
+ the other side of it */
+ if ((TmpNde->Flags & pkgFLCache::Node::Diversion) != 0)
+ {
+ pkgFLCache::DiverIterator Div = TmpNde.Diversion();
+ if (Div.DivertTo() == TmpNde)
+ return HandleOverwrites(Div.DivertFrom(),true);
+ }
+
+ return true;
+}
+ /*}}}*/
+// Extract::CheckDirReplace - See if this directory can be erased /*{{{*/
+// ---------------------------------------------------------------------
+/* If this directory is owned by a single package and that package is
+ replacing it with something non-directoryish then dpkg allows this.
+ We increase the requirement to be that the directory is non-empty after
+ the package is removed */
+bool pkgExtract::CheckDirReplace(string Dir,unsigned int Depth)
+{
+ // Looping?
+ if (Depth > 40)
+ return false;
+
+ if (Dir[Dir.size() - 1] != '/')
+ Dir += '/';
+
+ DIR *D = opendir(Dir.c_str());
+ if (D == 0)
+ return _error->Errno("opendir","Unable to read %s",Dir.c_str());
+
+ string File;
+ for (struct dirent *Dent = readdir(D); Dent != 0; Dent = readdir(D))
+ {
+ // Skip some files
+ if (strcmp(Dent->d_name,".") == 0 ||
+ strcmp(Dent->d_name,"..") == 0)
+ continue;
+
+ // Look up the node
+ File = Dir + Dent->d_name;
+ pkgFLCache::NodeIterator Nde = FLCache.GetNode(File.begin(),
+ File.end(),0,false,false);
+
+ // The file is not owned by this package
+ if (Nde.end() != false || Nde.RealPackage() != FLPkg)
+ {
+ closedir(D);
+ return false;
+ }
+
+ // See if it is a directory
+ struct stat St;
+ if (lstat(File.c_str(),&St) != 0)
+ {
+ closedir(D);
+ return _error->Errno("lstat","Unable to stat %s",File.c_str());
+ }
+
+ // Recurse down directories
+ if (S_ISDIR(St.st_mode) != 0)
+ {
+ if (CheckDirReplace(File,Depth + 1) == false)
+ {
+ closedir(D);
+ return false;
+ }
+ }
+ }
+
+ // No conflicts
+ closedir(D);
+ return true;
+}
+ /*}}}*/
diff --git a/apt-inst/extract.h b/apt-inst/extract.h
new file mode 100644
index 000000000..a9152a26f
--- /dev/null
+++ b/apt-inst/extract.h
@@ -0,0 +1,52 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: extract.h,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ Archive Extraction Directory Stream
+
+ This Directory Stream implements extraction of an archive into the
+ filesystem. It makes the choices on what files should be unpacked and
+ replaces as well as guiding the actual unpacking.
+
+ When the unpacking sequence is completed one of the two functions,
+ Finished or Aborted must be called.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_EXTRACT_H
+#define PKGLIB_EXTRACT_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/extract.h"
+#endif
+
+#include <apt-pkg/dirstream.h>
+#include <apt-pkg/filelist.h>
+#include <apt-pkg/pkgcache.h>
+
+class pkgExtract : public pkgDirStream
+{
+ pkgFLCache &FLCache;
+ pkgCache::VerIterator Ver;
+ pkgFLCache::PkgIterator FLPkg;
+ char FileName[1024];
+ bool Debug;
+
+ bool HandleOverwrites(pkgFLCache::NodeIterator Nde,
+ bool DiverCheck = false);
+ bool CheckDirReplace(string Dir,unsigned int Depth = 0);
+
+ public:
+
+ virtual bool DoItem(Item &Itm,int &Fd);
+ virtual bool Fail(Item &Itm,int Fd);
+ virtual bool FinishedFile(Item &Itm,int Fd);
+
+ bool Finished();
+ bool Aborted();
+
+ pkgExtract(pkgFLCache &FLCache,pkgCache::VerIterator Ver);
+};
+
+#endif
diff --git a/apt-inst/filelist.cc b/apt-inst/filelist.cc
new file mode 100644
index 000000000..211fc935e
--- /dev/null
+++ b/apt-inst/filelist.cc
@@ -0,0 +1,588 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: filelist.cc,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ File Listing - Manages a Cache of File -> Package names.
+
+ Diversions add some signficant complexity to the system. To keep
+ storage space down in the very special case of a diverted file no
+ extra bytes are allocated in the Node structure. Instead a diversion
+ is inserted directly into the hash table and its flag bit set. Every
+ lookup for that filename will always return the diversion.
+
+ The hash buckets are stored in sorted form, with diversions having
+ the higest sort order. Identical files are assigned the same file
+ pointer, thus after a search all of the nodes owning that file can be
+ found by iterating down the bucket.
+
+ Re-updates of diversions (another extremely special case) are done by
+ marking all diversions as untouched, then loading the entire diversion
+ list again, touching each diversion and then finally going back and
+ releasing all untouched diversions. It is assumed that the diversion
+ table will always be quite small and be a very irregular case.
+
+ Diversions that are user-installed are represented by a package with
+ an empty name string.
+
+ Conf files are handled like diversions by changing the meaning of the
+ Pointer field to point to a conf file entry - again to reduce over
+ head for a special case.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/filelist.h"
+#endif
+
+#include <apt-pkg/filelist.h>
+#include <apt-pkg/mmap.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/strutl.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <iostream>
+ /*}}}*/
+
+// FlCache::Header::Header - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Initialize the header variables. These are the defaults used when
+ creating new caches */
+pkgFLCache::Header::Header()
+{
+ Signature = 0xEA3F1295;
+
+ /* Whenever the structures change the major version should be bumped,
+ whenever the generator changes the minor version should be bumped. */
+ MajorVersion = 1;
+ MinorVersion = 0;
+ Dirty = true;
+
+ HeaderSz = sizeof(pkgFLCache::Header);
+ NodeSz = sizeof(pkgFLCache::Node);
+ DirSz = sizeof(pkgFLCache::Directory);
+ PackageSz = sizeof(pkgFLCache::Package);
+ DiversionSz = sizeof(pkgFLCache::Diversion);
+ ConfFileSz = sizeof(pkgFLCache::ConfFile);
+
+ NodeCount = 0;
+ DirCount = 0;
+ PackageCount = 0;
+ DiversionCount = 0;
+ ConfFileCount = 0;
+ HashSize = 1 << 14;
+
+ FileHash = 0;
+ DirTree = 0;
+ Packages = 0;
+ Diversions = 0;
+ UniqNodes = 0;
+ memset(Pools,0,sizeof(Pools));
+}
+ /*}}}*/
+// FLCache::Header::CheckSizes - Check if the two headers have same *sz /*{{{*/
+// ---------------------------------------------------------------------
+/* Compare to make sure we are matching versions */
+bool pkgFLCache::Header::CheckSizes(Header &Against) const
+{
+ if (HeaderSz == Against.HeaderSz &&
+ NodeSz == Against.NodeSz &&
+ DirSz == Against.DirSz &&
+ DiversionSz == Against.DiversionSz &&
+ PackageSz == Against.PackageSz &&
+ ConfFileSz == Against.ConfFileSz)
+ return true;
+ return false;
+}
+ /*}}}*/
+
+// FLCache::pkgFLCache - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* If this is a new cache then a new header and hash table are instantaited
+ otherwise the existing ones are mearly attached */
+pkgFLCache::pkgFLCache(DynamicMMap &Map) : Map(Map)
+{
+ if (_error->PendingError() == true)
+ return;
+
+ LastTreeLookup = 0;
+ LastLookupSize = 0;
+
+ // Apply the typecasts
+ HeaderP = (Header *)Map.Data();
+ NodeP = (Node *)Map.Data();
+ DirP = (Directory *)Map.Data();
+ DiverP = (Diversion *)Map.Data();
+ PkgP = (Package *)Map.Data();
+ ConfP = (ConfFile *)Map.Data();
+ StrP = (char *)Map.Data();
+ AnyP = (unsigned char *)Map.Data();
+
+ // New mapping, create the basic cache structures
+ if (Map.Size() == 0)
+ {
+ Map.RawAllocate(sizeof(pkgFLCache::Header));
+ *HeaderP = pkgFLCache::Header();
+ HeaderP->FileHash = Map.RawAllocate(sizeof(pkgFLCache::Node)*HeaderP->HashSize,
+ sizeof(pkgFLCache::Node))/sizeof(pkgFLCache::Node);
+ }
+
+ FileHash = NodeP + HeaderP->FileHash;
+
+ // Setup the dynamic map manager
+ HeaderP->Dirty = true;
+ Map.Sync(0,sizeof(pkgFLCache::Header));
+ Map.UsePools(*HeaderP->Pools,sizeof(HeaderP->Pools)/sizeof(HeaderP->Pools[0]));
+}
+ /*}}}*/
+// FLCache::TreeLookup - Perform a lookup in a generic tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This is a simple generic tree lookup. The first three entries of
+ the Directory structure are used as a template, but any other similar
+ structure could be used in it's place. */
+map_ptrloc pkgFLCache::TreeLookup(map_ptrloc *Base,const char *Text,
+ const char *TextEnd,unsigned long Size,
+ unsigned int *Count,bool Insert)
+{
+ pkgFLCache::Directory *Dir;
+
+ // Check our last entry cache
+ if (LastTreeLookup != 0 && LastLookupSize == Size)
+ {
+ Dir = (pkgFLCache::Directory *)(AnyP + LastTreeLookup*Size);
+ if (stringcmp(Text,TextEnd,StrP + Dir->Name) == 0)
+ return LastTreeLookup;
+ }
+
+ while (1)
+ {
+ // Allocate a new one
+ if (*Base == 0)
+ {
+ if (Insert == false)
+ return 0;
+
+ *Base = Map.Allocate(Size);
+ if (*Base == 0)
+ return 0;
+
+ (*Count)++;
+ Dir = (pkgFLCache::Directory *)(AnyP + *Base*Size);
+ Dir->Name = Map.WriteString(Text,TextEnd - Text);
+ LastTreeLookup = *Base;
+ LastLookupSize = Size;
+ return *Base;
+ }
+
+ // Compare this node
+ Dir = (pkgFLCache::Directory *)(AnyP + *Base*Size);
+ int Res = stringcmp(Text,TextEnd,StrP + Dir->Name);
+ if (Res == 0)
+ {
+ LastTreeLookup = *Base;
+ LastLookupSize = Size;
+ return *Base;
+ }
+
+ if (Res > 0)
+ Base = &Dir->Left;
+ if (Res < 0)
+ Base = &Dir->Right;
+ }
+}
+ /*}}}*/
+// FLCache::PrintTree - Print out a tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This is a simple generic tree dumper, ment for debugging. */
+void pkgFLCache::PrintTree(map_ptrloc Base,unsigned long Size)
+{
+ if (Base == 0)
+ return;
+
+ pkgFLCache::Directory *Dir = (pkgFLCache::Directory *)(AnyP + Base*Size);
+ PrintTree(Dir->Left,Size);
+ cout << (StrP + Dir->Name) << endl;
+ PrintTree(Dir->Right,Size);
+}
+ /*}}}*/
+// FLCache::GetPkg - Get a package pointer /*{{{*/
+// ---------------------------------------------------------------------
+/* Locate a package by name in it's tree, this is just a wrapper for
+ TreeLookup */
+pkgFLCache::PkgIterator pkgFLCache::GetPkg(const char *Name,const char *NameEnd,
+ bool Insert)
+{
+ if (NameEnd == 0)
+ NameEnd = Name + strlen(Name);
+
+ map_ptrloc Pos = TreeLookup(&HeaderP->Packages,Name,NameEnd,
+ sizeof(pkgFLCache::Package),
+ &HeaderP->PackageCount,Insert);
+ if (Pos == 0)
+ return pkgFLCache::PkgIterator();
+ return pkgFLCache::PkgIterator(*this,PkgP + Pos);
+}
+ /*}}}*/
+// FLCache::GetNode - Get the node associated with the filename /*{{{*/
+// ---------------------------------------------------------------------
+/* Lookup a node in the hash table. If Insert is true then a new node is
+ always inserted. The hash table can have multiple instances of a
+ single name available. A search returns the first. It is important
+ that additions for the same name insert after the first entry of
+ the name group. */
+pkgFLCache::NodeIterator pkgFLCache::GetNode(const char *Name,
+ const char *NameEnd,
+ map_ptrloc Loc,
+ bool Insert,bool Divert)
+{
+ // Split the name into file and directory, hashing as it is copied
+ const char *File = Name;
+ unsigned long HashPos = 0;
+ for (const char *I = Name; I < NameEnd; I++)
+ {
+ HashPos = 1637*HashPos + *I;
+ if (*I == '/')
+ File = I;
+ }
+
+ // Search for it
+ Node *Hash = NodeP + HeaderP->FileHash + (HashPos % HeaderP->HashSize);
+ int Res = 0;
+ map_ptrloc FilePtr = 0;
+ while (Hash->Pointer != 0)
+ {
+ // Compare
+ Res = stringcmp(File+1,NameEnd,StrP + Hash->File);
+ if (Res == 0)
+ Res = stringcmp(Name,File,StrP + DirP[Hash->Dir].Name);
+
+ // Diversion?
+ if (Res == 0 && Insert == true)
+ {
+ /* Dir and File match exactly, we need to reuse the file name
+ when we link it in */
+ FilePtr = Hash->File;
+ Res = Divert - ((Hash->Flags & Node::Diversion) == Node::Diversion);
+ }
+
+ // Is a match
+ if (Res == 0)
+ {
+ if (Insert == false)
+ return NodeIterator(*this,Hash);
+
+ // Only one diversion per name!
+ if (Divert == true)
+ return NodeIterator(*this,Hash);
+ break;
+ }
+
+ // Out of sort order
+ if (Res > 0)
+ break;
+
+ if (Hash->Next != 0)
+ Hash = NodeP + Hash->Next;
+ else
+ break;
+ }
+
+ // Fail, not found
+ if (Insert == false)
+ return NodeIterator(*this);
+
+ // Find a directory node
+ map_ptrloc Dir = TreeLookup(&HeaderP->DirTree,Name,File,
+ sizeof(pkgFLCache::Directory),
+ &HeaderP->DirCount,true);
+ if (Dir == 0)
+ return NodeIterator(*this);
+
+ // Allocate a new node
+ if (Hash->Pointer != 0)
+ {
+ // Overwrite or append
+ if (Res > 0)
+ {
+ Node *Next = NodeP + Map.Allocate(sizeof(*Hash));
+ if (Next == NodeP)
+ return NodeIterator(*this);
+ *Next = *Hash;
+ Hash->Next = Next - NodeP;
+ }
+ else
+ {
+ unsigned long NewNext = Map.Allocate(sizeof(*Hash));
+ if (NewNext == 0)
+ return NodeIterator(*this);
+ NodeP[NewNext].Next = Hash->Next;
+ Hash->Next = NewNext;
+ Hash = NodeP + Hash->Next;
+ }
+ }
+
+ // Insert into the new item
+ Hash->Dir = Dir;
+ Hash->Pointer = Loc;
+ Hash->Flags = 0;
+ if (Divert == true)
+ Hash->Flags |= Node::Diversion;
+
+ if (FilePtr != 0)
+ Hash->File = FilePtr;
+ else
+ {
+ HeaderP->UniqNodes++;
+ Hash->File = Map.WriteString(File+1,NameEnd - File-1);
+ }
+
+ // Link the node to the package list
+ if (Divert == false && Loc == 0)
+ {
+ Hash->Next = PkgP[Loc].Files;
+ PkgP[Loc].Files = Hash - NodeP;
+ }
+
+ HeaderP->NodeCount++;
+ return NodeIterator(*this,Hash);
+}
+ /*}}}*/
+// FLCache::HashNode - Return the hash bucket for the node /*{{{*/
+// ---------------------------------------------------------------------
+/* This is one of two hashing functions. The other is inlined into the
+ GetNode routine. */
+pkgFLCache::Node *pkgFLCache::HashNode(NodeIterator const &Nde)
+{
+ // Hash the node
+ unsigned long HashPos = 0;
+ for (const char *I = Nde.DirN(); *I != 0; I++)
+ HashPos = 1637*HashPos + *I;
+ HashPos = 1637*HashPos + '/';
+ for (const char *I = Nde.File(); *I != 0; I++)
+ HashPos = 1637*HashPos + *I;
+ return NodeP + HeaderP->FileHash + (HashPos % HeaderP->HashSize);
+}
+ /*}}}*/
+// FLCache::DropNode - Drop a node from the hash table /*{{{*/
+// ---------------------------------------------------------------------
+/* This erases a node from the hash table. Note that this does not unlink
+ the node from the package linked list. */
+void pkgFLCache::DropNode(map_ptrloc N)
+{
+ if (N == 0)
+ return;
+
+ NodeIterator Nde(*this,NodeP + N);
+
+ if (Nde->NextPkg != 0)
+ _error->Warning("DropNode called on still linked node");
+
+ // Locate it in the hash table
+ Node *Last = 0;
+ Node *Hash = HashNode(Nde);
+ while (Hash->Pointer != 0)
+ {
+ // Got it
+ if (Hash == Nde)
+ {
+ // Top of the bucket..
+ if (Last == 0)
+ {
+ Hash->Pointer = 0;
+ if (Hash->Next == 0)
+ return;
+ *Hash = NodeP[Hash->Next];
+ // Release Hash->Next
+ return;
+ }
+ Last->Next = Hash->Next;
+ // Release Hash
+ return;
+ }
+
+ Last = Hash;
+ if (Hash->Next != 0)
+ Hash = NodeP + Hash->Next;
+ else
+ break;
+ }
+
+ _error->Error("Failed to locate the hash element!");
+}
+ /*}}}*/
+// FLCache::BeginDiverLoad - Start reading new diversions /*{{{*/
+// ---------------------------------------------------------------------
+/* Tag all the diversions as untouched */
+void pkgFLCache::BeginDiverLoad()
+{
+ for (DiverIterator I = DiverBegin(); I.end() == false; I++)
+ I->Flags = 0;
+}
+ /*}}}*/
+// FLCache::FinishDiverLoad - Finish up a new diversion load /*{{{*/
+// ---------------------------------------------------------------------
+/* This drops any untouched diversions. In effect removing any diversions
+ that where not loaded (ie missing from the diversion file) */
+void pkgFLCache::FinishDiverLoad()
+{
+ map_ptrloc *Cur = &HeaderP->Diversions;
+ while (*Cur != 0)
+ {
+ Diversion *Div = DiverP + *Cur;
+ if ((Div->Flags & Diversion::Touched) == Diversion::Touched)
+ {
+ Cur = &Div->Next;
+ continue;
+ }
+
+ // Purge!
+ DropNode(Div->DivertTo);
+ DropNode(Div->DivertFrom);
+ *Cur = Div->Next;
+ }
+}
+ /*}}}*/
+// FLCache::AddDiversion - Add a new diversion /*{{{*/
+// ---------------------------------------------------------------------
+/* Add a new diversion to the diverion tables and make sure that it is
+ unique and non-chaining. */
+bool pkgFLCache::AddDiversion(PkgIterator const &Owner,
+ const char *From,const char *To)
+{
+ /* Locate the two hash nodes we are going to manipulate. If there
+ are pre-existing diversions then they will be returned */
+ NodeIterator FromN = GetNode(From,From+strlen(From),0,true,true);
+ NodeIterator ToN = GetNode(To,To+strlen(To),0,true,true);
+ if (FromN.end() == true || ToN.end() == true)
+ return _error->Error("Failed to allocate diversion");
+
+ // Should never happen
+ if ((FromN->Flags & Node::Diversion) != Node::Diversion ||
+ (ToN->Flags & Node::Diversion) != Node::Diversion)
+ return _error->Error("Internal Error in AddDiversion");
+
+ // Now, try to reclaim an existing diversion..
+ map_ptrloc Diver = 0;
+ if (FromN->Pointer != 0)
+ Diver = FromN->Pointer;
+
+ /* Make sure from and to point to the same diversion, if they dont
+ then we are trying to intermix diversions - very bad */
+ if (ToN->Pointer != 0 && ToN->Pointer != Diver)
+ {
+ // It could be that the other diversion is no longer in use
+ if ((DiverP[ToN->Pointer].Flags & Diversion::Touched) == Diversion::Touched)
+ return _error->Error("Trying to overwrite a diversion, %s -> %s and %s/%s",
+ From,To,ToN.File(),ToN.Dir().Name());
+
+ // We can erase it.
+ Diversion *Div = DiverP + ToN->Pointer;
+ ToN->Pointer = 0;
+
+ if (Div->DivertTo == ToN.Offset())
+ Div->DivertTo = 0;
+ if (Div->DivertFrom == ToN.Offset())
+ Div->DivertFrom = 0;
+
+ // This diversion will be cleaned up by FinishDiverLoad
+ }
+
+ // Allocate a new diversion
+ if (Diver == 0)
+ {
+ Diver = Map.Allocate(sizeof(Diversion));
+ if (Diver == 0)
+ return false;
+ DiverP[Diver].Next = HeaderP->Diversions;
+ HeaderP->Diversions = Diver;
+ HeaderP->DiversionCount++;
+ }
+
+ // Can only have one diversion of the same files
+ Diversion *Div = DiverP + Diver;
+ if ((Div->Flags & Diversion::Touched) == Diversion::Touched)
+ return _error->Error("Double add of diversion %s -> %s",From,To);
+
+ // Setup the From/To links
+ if (Div->DivertFrom != FromN.Offset() && Div->DivertFrom != ToN.Offset())
+ DropNode(Div->DivertFrom);
+ Div->DivertFrom = FromN.Offset();
+ if (Div->DivertTo != FromN.Offset() && Div->DivertTo != ToN.Offset())
+ DropNode(Div->DivertTo);
+ Div->DivertTo = ToN.Offset();
+
+ // Link it to the two nodes
+ FromN->Pointer = Diver;
+ ToN->Pointer = Diver;
+
+ // And the package
+ Div->OwnerPkg = Owner.Offset();
+ Div->Flags |= Diversion::Touched;
+
+ return true;
+}
+ /*}}}*/
+// FLCache::AddConfFile - Add a new configuration file /*{{{*/
+// ---------------------------------------------------------------------
+/* This simply adds a new conf file node to the hash table. This is only
+ used by the status file reader. It associates a hash with each conf
+ file entry that exists in the status file and the list file for
+ the proper package. Duplicate conf files (across packages) are left
+ up to other routines to deal with. */
+bool pkgFLCache::AddConfFile(const char *Name,const char *NameEnd,
+ PkgIterator const &Owner,
+ const unsigned char *Sum)
+{
+ NodeIterator Nde = GetNode(Name,NameEnd,0,false,false);
+ if (Nde.end() == true)
+ return true;
+
+ unsigned long File = Nde->File;
+ for (; Nde->File == File && Nde.end() == false; Nde++)
+ {
+ if (Nde.RealPackage() != Owner)
+ continue;
+
+ if ((Nde->Flags & Node::ConfFile) == Node::ConfFile)
+ return _error->Error("Duplicate conf file %s/%s",Nde.DirN(),Nde.File());
+
+ // Allocate a new conf file structure
+ map_ptrloc Conf = Map.Allocate(sizeof(ConfFile));
+ if (Conf == 0)
+ return false;
+ ConfP[Conf].OwnerPkg = Owner.Offset();
+ memcpy(ConfP[Conf].MD5,Sum,sizeof(ConfP[Conf].MD5));
+
+ Nde->Pointer = Conf;
+ Nde->Flags |= Node::ConfFile;
+ return true;
+ }
+
+ /* This means the conf file has been replaced, but the entry in the
+ status file was not updated */
+ return true;
+}
+ /*}}}*/
+
+// NodeIterator::RealPackage - Return the package for this node /*{{{*/
+// ---------------------------------------------------------------------
+/* Since the package pointer is indirected in all sorts of interesting ways
+ this is used to get a pointer to the owning package */
+pkgFLCache::Package *pkgFLCache::NodeIterator::RealPackage() const
+{
+ if (Nde->Pointer == 0)
+ return 0;
+
+ if ((Nde->Flags & Node::ConfFile) == Node::ConfFile)
+ return Owner->PkgP + Owner->ConfP[Nde->Pointer].OwnerPkg;
+
+ // Diversions are ignored
+ if ((Nde->Flags & Node::Diversion) == Node::Diversion)
+ return 0;
+
+ return Owner->PkgP + Nde->Pointer;
+}
+ /*}}}*/
diff --git a/apt-inst/filelist.h b/apt-inst/filelist.h
new file mode 100644
index 000000000..7536a2f63
--- /dev/null
+++ b/apt-inst/filelist.h
@@ -0,0 +1,314 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: filelist.h,v 1.2 2001/02/20 07:03:16 jgg Exp $
+/* ######################################################################
+
+ File Listing - Manages a Cache of File -> Package names.
+
+ This is identical to the Package cache, except that the generator
+ (which is much simpler) is integrated directly into the main class,
+ and it has been designed to handle live updates.
+
+ The storage content of the class is maintained in a memory map and is
+ written directly to the file system. Performance is traded against
+ space to give something that performs well and remains small.
+ The average per file usage is 32 bytes which yeilds about a meg every
+ 36k files. Directory paths are collected into a binary tree and stored
+ only once, this offsets the cost of the hash nodes enough to keep
+ memory usage slightly less than the sum of the filenames.
+
+ The file names are stored into a fixed size chained hash table that is
+ linked to the package name and to the directory component.
+
+ Each file node has a set of associated flags that indicate the current
+ state of the file.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_FILELIST_H
+#define PKGLIB_FILELIST_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/filelist.h"
+#endif
+
+#include <apt-pkg/mmap.h>
+
+class pkgFLCache
+{
+ public:
+ struct Header;
+ struct Node;
+ struct Directory;
+ struct Package;
+ struct Diversion;
+ struct ConfFile;
+
+ class NodeIterator;
+ class DirIterator;
+ class PkgIterator;
+ class DiverIterator;
+
+ protected:
+ string CacheFile;
+ DynamicMMap &Map;
+ map_ptrloc LastTreeLookup;
+ unsigned long LastLookupSize;
+
+ // Helpers for the addition algorithms
+ map_ptrloc TreeLookup(map_ptrloc *Base,const char *Text,const char *TextEnd,
+ unsigned long Size,unsigned int *Count = 0,
+ bool Insert = false);
+
+ public:
+
+ // Pointers to the arrays of items
+ Header *HeaderP;
+ Node *NodeP;
+ Directory *DirP;
+ Package *PkgP;
+ Diversion *DiverP;
+ ConfFile *ConfP;
+ char *StrP;
+ unsigned char *AnyP;
+
+ // Quick accessors
+ Node *FileHash;
+
+ // Accessors
+ Header &Head() {return *HeaderP;};
+ void PrintTree(map_ptrloc Base,unsigned long Size);
+
+ // Add/Find things
+ PkgIterator GetPkg(const char *Name,const char *End,bool Insert);
+ inline PkgIterator GetPkg(const char *Name,bool Insert);
+ NodeIterator GetNode(const char *Name,
+ const char *NameEnd,
+ map_ptrloc Loc,
+ bool Insert,bool Divert);
+ Node *HashNode(NodeIterator const &N);
+ void DropNode(map_ptrloc Node);
+
+ inline DiverIterator DiverBegin();
+
+ // Diversion control
+ void BeginDiverLoad();
+ void FinishDiverLoad();
+ bool AddDiversion(PkgIterator const &Owner,const char *From,
+ const char *To);
+ bool AddConfFile(const char *Name,const char *NameEnd,
+ PkgIterator const &Owner,const unsigned char *Sum);
+
+ pkgFLCache(DynamicMMap &Map);
+// ~pkgFLCache();
+};
+
+struct pkgFLCache::Header
+{
+ // Signature information
+ unsigned long Signature;
+ short MajorVersion;
+ short MinorVersion;
+ bool Dirty;
+
+ // Size of structure values
+ unsigned HeaderSz;
+ unsigned NodeSz;
+ unsigned DirSz;
+ unsigned PackageSz;
+ unsigned DiversionSz;
+ unsigned ConfFileSz;
+
+ // Structure Counts;
+ unsigned int NodeCount;
+ unsigned int DirCount;
+ unsigned int PackageCount;
+ unsigned int DiversionCount;
+ unsigned int ConfFileCount;
+ unsigned int HashSize;
+ unsigned long UniqNodes;
+
+ // Offsets
+ map_ptrloc FileHash;
+ map_ptrloc DirTree;
+ map_ptrloc Packages;
+ map_ptrloc Diversions;
+
+ /* Allocation pools, there should be one of these for each structure
+ excluding the header */
+ DynamicMMap::Pool Pools[5];
+
+ bool CheckSizes(Header &Against) const;
+ Header();
+};
+
+/* The bit field is used to advoid incurring an extra 4 bytes x 40000,
+ Pointer is the most infrequently used member of the structure */
+struct pkgFLCache::Node
+{
+ map_ptrloc Dir; // Dir
+ map_ptrloc File; // String
+ unsigned Pointer:24; // Package/Diversion/ConfFile
+ unsigned Flags:8; // Package
+ map_ptrloc Next; // Node
+ map_ptrloc NextPkg; // Node
+
+ enum Flags {Diversion = (1<<0),ConfFile = (1<<1),
+ NewConfFile = (1<<2),NewFile = (1<<3),
+ Unpacked = (1<<4),Replaced = (1<<5)};
+};
+
+struct pkgFLCache::Directory
+{
+ map_ptrloc Left; // Directory
+ map_ptrloc Right; // Directory
+ map_ptrloc Name; // String
+};
+
+struct pkgFLCache::Package
+{
+ map_ptrloc Left; // Package
+ map_ptrloc Right; // Package
+ map_ptrloc Name; // String
+ map_ptrloc Files; // Node
+};
+
+struct pkgFLCache::Diversion
+{
+ map_ptrloc OwnerPkg; // Package
+ map_ptrloc DivertFrom; // Node
+ map_ptrloc DivertTo; // String
+
+ map_ptrloc Next; // Diversion
+ unsigned long Flags;
+
+ enum Flags {Touched = (1<<0)};
+};
+
+struct pkgFLCache::ConfFile
+{
+ map_ptrloc OwnerPkg; // Package
+ unsigned char MD5[16];
+};
+
+class pkgFLCache::PkgIterator
+{
+ Package *Pkg;
+ pkgFLCache *Owner;
+
+ public:
+
+ inline bool end() const {return Owner == 0 || Pkg == Owner->PkgP?true:false;}
+
+ // Accessors
+ inline Package *operator ->() {return Pkg;};
+ inline Package const *operator ->() const {return Pkg;};
+ inline Package const &operator *() const {return *Pkg;};
+ inline operator Package *() {return Pkg == Owner->PkgP?0:Pkg;};
+ inline operator Package const *() const {return Pkg == Owner->PkgP?0:Pkg;};
+
+ inline unsigned long Offset() const {return Pkg - Owner->PkgP;};
+ inline const char *Name() const {return Pkg->Name == 0?0:Owner->StrP + Pkg->Name;};
+ inline pkgFLCache::NodeIterator Files() const;
+
+ PkgIterator() : Pkg(0), Owner(0) {};
+ PkgIterator(pkgFLCache &Owner,Package *Trg) : Pkg(Trg), Owner(&Owner) {};
+};
+
+class pkgFLCache::DirIterator
+{
+ Directory *Dir;
+ pkgFLCache *Owner;
+
+ public:
+
+ // Accessors
+ inline Directory *operator ->() {return Dir;};
+ inline Directory const *operator ->() const {return Dir;};
+ inline Directory const &operator *() const {return *Dir;};
+ inline operator Directory *() {return Dir == Owner->DirP?0:Dir;};
+ inline operator Directory const *() const {return Dir == Owner->DirP?0:Dir;};
+
+ inline const char *Name() const {return Dir->Name == 0?0:Owner->StrP + Dir->Name;};
+
+ DirIterator() : Dir(0), Owner(0) {};
+ DirIterator(pkgFLCache &Owner,Directory *Trg) : Dir(Trg), Owner(&Owner) {};
+};
+
+class pkgFLCache::DiverIterator
+{
+ Diversion *Diver;
+ pkgFLCache *Owner;
+
+ public:
+
+ // Iteration
+ void operator ++(int) {if (Diver != Owner->DiverP) Diver = Owner->DiverP + Diver->Next;};
+ inline void operator ++() {operator ++(0);};
+ inline bool end() const {return Owner == 0 || Diver == Owner->DiverP;};
+
+ // Accessors
+ inline Diversion *operator ->() {return Diver;};
+ inline Diversion const *operator ->() const {return Diver;};
+ inline Diversion const &operator *() const {return *Diver;};
+ inline operator Diversion *() {return Diver == Owner->DiverP?0:Diver;};
+ inline operator Diversion const *() const {return Diver == Owner->DiverP?0:Diver;};
+
+ inline PkgIterator OwnerPkg() const {return PkgIterator(*Owner,Owner->PkgP + Diver->OwnerPkg);};
+ inline NodeIterator DivertFrom() const;
+ inline NodeIterator DivertTo() const;
+
+ DiverIterator() : Diver(0), Owner(0) {};
+ DiverIterator(pkgFLCache &Owner,Diversion *Trg) : Diver(Trg), Owner(&Owner) {};
+};
+
+class pkgFLCache::NodeIterator
+{
+ Node *Nde;
+ enum {NdePkg, NdeHash} Type;
+ pkgFLCache *Owner;
+
+ public:
+
+ // Iteration
+ void operator ++(int) {if (Nde != Owner->NodeP) Nde = Owner->NodeP +
+ (Type == NdePkg?Nde->NextPkg:Nde->Next);};
+ inline void operator ++() {operator ++(0);};
+ inline bool end() const {return Owner == 0 || Nde == Owner->NodeP;};
+
+ // Accessors
+ inline Node *operator ->() {return Nde;};
+ inline Node const *operator ->() const {return Nde;};
+ inline Node const &operator *() const {return *Nde;};
+ inline operator Node *() {return Nde == Owner->NodeP?0:Nde;};
+ inline operator Node const *() const {return Nde == Owner->NodeP?0:Nde;};
+ inline unsigned long Offset() const {return Nde - Owner->NodeP;};
+ inline DirIterator Dir() const {return DirIterator(*Owner,Owner->DirP + Nde->Dir);};
+ inline DiverIterator Diversion() const {return DiverIterator(*Owner,Owner->DiverP + Nde->Pointer);};
+ inline const char *File() const {return Nde->File == 0?0:Owner->StrP + Nde->File;};
+ inline const char *DirN() const {return Owner->StrP + Owner->DirP[Nde->Dir].Name;};
+ Package *RealPackage() const;
+
+ NodeIterator() : Nde(0), Type(NdeHash), Owner(0) {};
+ NodeIterator(pkgFLCache &Owner) : Nde(Owner.NodeP), Type(NdeHash), Owner(&Owner) {};
+ NodeIterator(pkgFLCache &Owner,Node *Trg) : Nde(Trg), Type(NdeHash), Owner(&Owner) {};
+ NodeIterator(pkgFLCache &Owner,Node *Trg,Package *) : Nde(Trg), Type(NdePkg), Owner(&Owner) {};
+};
+
+/* Inlines with forward references that cannot be included directly in their
+ respsective classes */
+inline pkgFLCache::NodeIterator pkgFLCache::DiverIterator::DivertFrom() const
+ {return NodeIterator(*Owner,Owner->NodeP + Diver->DivertFrom);};
+inline pkgFLCache::NodeIterator pkgFLCache::DiverIterator::DivertTo() const
+ {return NodeIterator(*Owner,Owner->NodeP + Diver->DivertTo);};
+
+inline pkgFLCache::NodeIterator pkgFLCache::PkgIterator::Files() const
+ {return NodeIterator(*Owner,Owner->NodeP + Pkg->Files,Pkg);};
+
+inline pkgFLCache::DiverIterator pkgFLCache::DiverBegin()
+ {return DiverIterator(*this,DiverP + HeaderP->Diversions);};
+
+inline pkgFLCache::PkgIterator pkgFLCache::GetPkg(const char *Name,bool Insert)
+ {return GetPkg(Name,Name+strlen(Name),Insert);};
+
+#endif
diff --git a/apt-inst/makefile b/apt-inst/makefile
new file mode 100644
index 000000000..4a0981f2b
--- /dev/null
+++ b/apt-inst/makefile
@@ -0,0 +1,30 @@
+# -*- make -*-
+BASE=..
+SUBDIR=apt-inst
+
+# Header location
+SUBDIRS = contrib deb
+HEADER_TARGETDIRS = apt-pkg
+
+# Bring in the default rules
+include ../buildlib/defaults.mak
+
+# The library name
+LIBRARY=apt-inst
+MAJOR=1.0
+MINOR=0
+SLIBS=$(PTHREADLIB)
+
+# Source code for the contributed non-core things
+SOURCE = contrib/extracttar.cc contrib/arfile.cc
+
+# Source code for the main library
+SOURCE+= filelist.cc database.cc dirstream.cc extract.cc \
+ deb/dpkgdb.cc deb/debfile.cc
+
+# Public header files
+HEADERS = extracttar.h arfile.h filelist.h database.h extract.h \
+ dpkgdb.h dirstream.h debfile.h
+
+HEADERS := $(addprefix apt-pkg/,$(HEADERS))
+include $(LIBRARY_H)
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index 0de5eef7c..431382ef1 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acquire-item.cc,v 1.41 2000/01/17 07:11:49 jgg Exp $
+// $Id: acquire-item.cc,v 1.42 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Acquire Item - Item to acquire
@@ -18,10 +18,13 @@
#endif
#include <apt-pkg/acquire-item.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/sourcelist.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
+#include <apti18n.h>
+
#include <sys/stat.h>
#include <unistd.h>
#include <errno.h>
@@ -116,7 +119,7 @@ void pkgAcquire::Item::Rename(string From,string To)
if (rename(From.c_str(),To.c_str()) != 0)
{
char S[300];
- sprintf(S,"rename failed, %s (%s -> %s).",strerror(errno),
+ sprintf(S,_("rename failed, %s (%s -> %s)."),strerror(errno),
From.c_str(),To.c_str());
Status = StatError;
ErrorText = S;
@@ -127,31 +130,24 @@ void pkgAcquire::Item::Rename(string From,string To)
// AcqIndex::AcqIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The package file is added to the queue and a second class is
- instantiated to fetch the revision file */
-pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,const pkgSourceList::Item *Location) :
- Item(Owner), Location(Location)
+ instantiated to fetch the revision file */
+pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
+ string URI,string URIDesc,string ShortDesc) :
+ Item(Owner), RealURI(URI)
{
Decompression = false;
Erase = false;
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(Location->PackagesURI());
+ DestFile += URItoFileName(URI);
// Create the item
- Desc.URI = Location->PackagesURI() + ".gz";
- Desc.Description = Location->PackagesInfo();
+ Desc.URI = URI + ".gz";
+ Desc.Description = URIDesc;
Desc.Owner = this;
-
- // Set the short description to the archive component
- if (Location->Dist[Location->Dist.size() - 1] == '/')
- Desc.ShortDesc = Location->Dist;
- else
- Desc.ShortDesc = Location->Dist + '/' + Location->Section;
+ Desc.ShortDesc = ShortDesc;
QueueURI(Desc);
-
- // Create the Release fetch class
- new pkgAcqIndexRel(Owner,Location);
}
/*}}}*/
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
@@ -160,7 +156,7 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,const pkgSourceList::Item *Location)
string pkgAcqIndex::Custom600Headers()
{
string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(Location->PackagesURI());
+ Final += URItoFileName(RealURI);
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
@@ -185,13 +181,13 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5,
{
// Done, move it into position
string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(Location->PackagesURI());
+ FinalFile += URItoFileName(RealURI);
Rename(DestFile,FinalFile);
/* We restore the original name to DestFile so that the clean operation
will work OK */
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(Location->PackagesURI());
+ DestFile += URItoFileName(RealURI);
// Remove the compressed version.
if (Erase == true)
@@ -237,7 +233,7 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5,
Decompression = true;
DestFile += ".decomp";
- Desc.URI = "gzip:" + FileName,Location->PackagesInfo();
+ Desc.URI = "gzip:" + FileName;
QueueURI(Desc);
Mode = "gzip";
}
@@ -247,23 +243,18 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5,
// ---------------------------------------------------------------------
/* The Release file is added to the queue */
pkgAcqIndexRel::pkgAcqIndexRel(pkgAcquire *Owner,
- const pkgSourceList::Item *Location) :
- Item(Owner), Location(Location)
+ string URI,string URIDesc,string ShortDesc) :
+ Item(Owner), RealURI(URI)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(Location->ReleaseURI());
+ DestFile += URItoFileName(URI);
// Create the item
- Desc.URI = Location->ReleaseURI();
- Desc.Description = Location->ReleaseInfo();
+ Desc.URI = URI;
+ Desc.Description = URIDesc;
+ Desc.ShortDesc = ShortDesc;
Desc.Owner = this;
- // Set the short description to the archive component
- if (Location->Dist[Location->Dist.size() - 1] == '/')
- Desc.ShortDesc = Location->Dist;
- else
- Desc.ShortDesc = Location->Dist + '/' + Location->Section;
-
QueueURI(Desc);
}
/*}}}*/
@@ -273,7 +264,7 @@ pkgAcqIndexRel::pkgAcqIndexRel(pkgAcquire *Owner,
string pkgAcqIndexRel::Custom600Headers()
{
string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(Location->ReleaseURI());
+ Final += URItoFileName(RealURI);
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
@@ -317,7 +308,7 @@ void pkgAcqIndexRel::Done(string Message,unsigned long Size,string MD5,
// Done, move it into position
string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(Location->ReleaseURI());
+ FinalFile += URItoFileName(RealURI);
Rename(DestFile,FinalFile);
}
/*}}}*/
@@ -354,21 +345,42 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
if (Version.Arch() == 0)
{
- _error->Error("I wasn't able to locate file for the %s package. "
- "This might mean you need to manually fix this package. (due to missing arch)",
+ _error->Error(_("I wasn't able to locate file for the %s package. "
+ "This might mean you need to manually fix this package. (due to missing arch)"),
Version.ParentPkg().Name());
return;
}
- // Generate the final file name as: package_version_arch.deb
- StoreFilename = QuoteString(Version.ParentPkg().Name(),"_:") + '_' +
- QuoteString(Version.VerStr(),"_:") + '_' +
- QuoteString(Version.Arch(),"_:.") + ".deb";
-
+ /* We need to find a filename to determine the extension. We make the
+ assumption here that all the available sources for this version share
+ the same extension.. */
+ // Skip not source sources, they do not have file fields.
+ for (; Vf.end() == false; Vf++)
+ {
+ if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0)
+ continue;
+ break;
+ }
+
+ // Does not really matter here.. we are going to fail out below
+ if (Vf.end() != true)
+ {
+ // If this fails to get a file name we will bomb out below.
+ pkgRecords::Parser &Parse = Recs->Lookup(Vf);
+ if (_error->PendingError() == true)
+ return;
+
+ // Generate the final file name as: package_version_arch.foo
+ StoreFilename = QuoteString(Version.ParentPkg().Name(),"_:") + '_' +
+ QuoteString(Version.VerStr(),"_:") + '_' +
+ QuoteString(Version.Arch(),"_:.") +
+ "." + flExtension(Parse.FileName());
+ }
+
// Select a source
if (QueueNext() == false && _error->PendingError() == false)
- _error->Error("I wasn't able to locate file for the %s package. "
- "This might mean you need to manually fix this package.",
+ _error->Error(_("I wasn't able to locate file for the %s package. "
+ "This might mean you need to manually fix this package."),
Version.ParentPkg().Name());
}
/*}}}*/
@@ -378,7 +390,7 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
the archive is already available in the cache and stashs the MD5 for
checking later. */
bool pkgAcqArchive::QueueNext()
-{
+{
for (; Vf.end() == false; Vf++)
{
// Ignore not source sources
@@ -386,26 +398,21 @@ bool pkgAcqArchive::QueueNext()
continue;
// Try to cross match against the source list
- string PkgFile = flNotDir(Vf.File().FileName());
- pkgSourceList::const_iterator Location;
- for (Location = Sources->begin(); Location != Sources->end(); Location++)
- if (PkgFile == URItoFileName(Location->PackagesURI()))
- break;
-
- if (Location == Sources->end())
- continue;
+ pkgIndexFile *Index;
+ if (Sources->FindIndex(Vf.File(),Index) == false)
+ continue;
// Grab the text package record
pkgRecords::Parser &Parse = Recs->Lookup(Vf);
if (_error->PendingError() == true)
return false;
- PkgFile = Parse.FileName();
+ string PkgFile = Parse.FileName();
MD5 = Parse.MD5Hash();
if (PkgFile.empty() == true)
- return _error->Error("The package index files are corrupted. No Filename: "
- "field for package %s."
- ,Version.ParentPkg().Name());
+ return _error->Error(_("The package index files are corrupted. No Filename: "
+ "field for package %s."),
+ Version.ParentPkg().Name());
// See if we already have the file. (Legacy filenames)
FileSize = Version->Size;
@@ -460,8 +467,9 @@ bool pkgAcqArchive::QueueNext()
}
// Create the item
- Desc.URI = Location->ArchiveURI(PkgFile);
- Desc.Description = Location->ArchiveInfo(Version);
+ Local = false;
+ Desc.URI = Index->ArchiveURI(PkgFile);
+ Desc.Description = Index->ArchiveInfo(Version);
Desc.Owner = this;
Desc.ShortDesc = Version.ParentPkg().Name();
QueueURI(Desc);
@@ -484,7 +492,7 @@ void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash,
if (Size != Version->Size)
{
Status = StatError;
- ErrorText = "Size mismatch";
+ ErrorText = _("Size mismatch");
return;
}
@@ -494,7 +502,7 @@ void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash,
if (Md5Hash != MD5)
{
Status = StatError;
- ErrorText = "MD5Sum mismatch";
+ ErrorText = _("MD5Sum mismatch");
Rename(DestFile,DestFile + ".FAILED");
return;
}
@@ -534,6 +542,20 @@ void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash,
void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
{
ErrorText = LookupTag(Message,"Message");
+
+ /* We don't really want to retry on failed media swaps, this prevents
+ that. An interesting observation is that permanent failures are not
+ recorded. */
+ if (Cnf->Removable == true &&
+ StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
+ {
+ // Vf = Version.FileList();
+ while (Vf.end() == false) Vf++;
+ StoreFilename = string();
+ Item::Failed(Message,Cnf);
+ return;
+ }
+
if (QueueNext() == false)
{
// This is the retry counter
diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h
index bf1a50e75..3d411978e 100644
--- a/apt-pkg/acquire-item.h
+++ b/apt-pkg/acquire-item.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acquire-item.h,v 1.24 2000/01/27 04:15:09 jgg Exp $
+// $Id: acquire-item.h,v 1.25 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Acquire Item - Item to acquire
@@ -21,7 +21,7 @@
#define PKGLIB_ACQUIRE_ITEM_H
#include <apt-pkg/acquire.h>
-#include <apt-pkg/sourcelist.h>
+#include <apt-pkg/indexfile.h>
#include <apt-pkg/pkgrecords.h>
#ifdef __GNUG__
@@ -49,7 +49,7 @@ class pkgAcquire::Item
string ErrorText;
unsigned long FileSize;
unsigned long PartialSize;
- char *Mode;
+ const char *Mode;
unsigned long ID;
bool Complete;
bool Local;
@@ -82,10 +82,10 @@ class pkgAcqIndex : public pkgAcquire::Item
{
protected:
- const pkgSourceList::Item *Location;
bool Decompression;
bool Erase;
pkgAcquire::ItemDesc Desc;
+ string RealURI;
public:
@@ -93,9 +93,10 @@ class pkgAcqIndex : public pkgAcquire::Item
virtual void Done(string Message,unsigned long Size,string Md5Hash,
pkgAcquire::MethodConfig *Cnf);
virtual string Custom600Headers();
- virtual string DescURI() {return Location->PackagesURI();};
+ virtual string DescURI() {return RealURI;};
- pkgAcqIndex(pkgAcquire *Owner,const pkgSourceList::Item *Location);
+ pkgAcqIndex(pkgAcquire *Owner,string URI,string URIDesc,
+ string ShortDesct);
};
// Item class for index files
@@ -103,8 +104,8 @@ class pkgAcqIndexRel : public pkgAcquire::Item
{
protected:
- const pkgSourceList::Item *Location;
pkgAcquire::ItemDesc Desc;
+ string RealURI;
public:
@@ -113,9 +114,10 @@ class pkgAcqIndexRel : public pkgAcquire::Item
virtual void Done(string Message,unsigned long Size,string Md5Hash,
pkgAcquire::MethodConfig *Cnf);
virtual string Custom600Headers();
- virtual string DescURI() {return Location->ReleaseURI();};
+ virtual string DescURI() {return RealURI;};
- pkgAcqIndexRel(pkgAcquire *Owner,const pkgSourceList::Item *Location);
+ pkgAcqIndexRel(pkgAcquire *Owner,string URI,string URIDesc,
+ string ShortDesct);
};
// Item class for archive files
diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc
index 770c68a90..3b905f4e9 100644
--- a/apt-pkg/acquire-method.cc
+++ b/apt-pkg/acquire-method.cc
@@ -1,12 +1,12 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acquire-method.cc,v 1.24 2000/01/17 07:11:49 jgg Exp $
+// $Id: acquire-method.cc,v 1.25 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Acquire Method
This is a skeleton class that implements most of the functionality
- of a method and some usefull functions to make method implementation
+ of a method and some useful functions to make method implementation
simpler. The methods all derive this and specialize it. The most
complex implementation is the http method which needs to provide
pipelining, it runs the message engine at the same time it is
@@ -97,7 +97,8 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
if (Queue != 0)
{
snprintf(S,sizeof(S)-50,"400 URI Failure\nURI: %s\n"
- "Message: %s\n",Queue->Uri.c_str(),Err.c_str());
+ "Message: %s %s\n",Queue->Uri.c_str(),Err.c_str(),
+ FailExtra.c_str());
// Dequeue
FetchItem *Tmp = Queue;
@@ -108,7 +109,8 @@ void pkgAcqMethod::Fail(string Err,bool Transient)
}
else
snprintf(S,sizeof(S)-50,"400 URI Failure\nURI: <UNKNOWN>\n"
- "Message: %s\n",Err.c_str());
+ "Message: %s %s\n",Err.c_str(),
+ FailExtra.c_str());
// Set the transient flag
if (Transient == true)
diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h
index 64cff7331..b32d80c43 100644
--- a/apt-pkg/acquire-method.h
+++ b/apt-pkg/acquire-method.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acquire-method.h,v 1.13 2000/01/17 07:11:49 jgg Exp $
+// $Id: acquire-method.h,v 1.14 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Acquire Method - Method helper class + functions
@@ -49,7 +49,8 @@ class pkgAcqMethod
vector<string> Messages;
FetchItem *Queue;
FetchItem *QueueBack;
-
+ string FailExtra;
+
// Handlers for messages
virtual bool Configuration(string Message);
virtual bool Fetch(FetchItem * /*Item*/) {return true;};
@@ -74,6 +75,7 @@ class pkgAcqMethod
void Status(const char *Format,...);
int Run(bool Single = false);
+ inline void SetFailExtraMsg(string Msg) {FailExtra = Msg;};
pkgAcqMethod(const char *Ver,unsigned long Flags = 0);
virtual ~pkgAcqMethod() {};
diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc
index a2f970ab6..4805b5ebc 100644
--- a/apt-pkg/acquire-worker.cc
+++ b/apt-pkg/acquire-worker.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acquire-worker.cc,v 1.31 2000/05/10 05:56:46 jgg Exp $
+// $Id: acquire-worker.cc,v 1.32 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Acquire Worker
@@ -22,6 +22,8 @@
#include <apt-pkg/fileutl.h>
#include <apt-pkg/strutl.h>
+#include <apti18n.h>
+
#include <sys/stat.h>
#include <unistd.h>
#include <fcntl.h>
@@ -102,7 +104,7 @@ bool pkgAcquire::Worker::Start()
// Get the method path
string Method = _config->FindDir("Dir::Bin::Methods") + Access;
if (FileExists(Method) == false)
- return _error->Error("The method driver %s could not be found.",Method.c_str());
+ return _error->Error(_("The method driver %s could not be found."),Method.c_str());
if (Debug == true)
clog << "Starting method '" << Method << '\'' << endl;
@@ -154,7 +156,7 @@ bool pkgAcquire::Worker::Start()
// Read the configuration data
if (WaitFd(InFd) == false ||
ReadMessages() == false)
- return _error->Error("Method %s did not start correctly",Method.c_str());
+ return _error->Error(_("Method %s did not start correctly"),Method.c_str());
RunMessages();
if (OwnerQ != 0)
@@ -260,9 +262,9 @@ bool pkgAcquire::Worker::RunMessages()
Log->Pulse(Owner->GetOwner());
OwnerQ->ItemDone(Itm);
- if (TotalSize != 0 &&
+ if (TotalSize != 0 &&
(unsigned)atoi(LookupTag(Message,"Size","0").c_str()) != TotalSize)
- _error->Warning("Bizzar Error - File size is not what the server reported %s %u",
+ _error->Warning("Bizarre Error - File size is not what the server reported %s %lu",
LookupTag(Message,"Size","0").c_str(),TotalSize);
Owner->Done(Message,atoi(LookupTag(Message,"Size","0").c_str()),
@@ -313,7 +315,7 @@ bool pkgAcquire::Worker::RunMessages()
// 401 General Failure
case 401:
- _error->Error("Method %s General failure: %s",LookupTag(Message,"Message").c_str());
+ _error->Error("Method %s General failure: %s",Access.c_str(),LookupTag(Message,"Message").c_str());
break;
// 403 Media Change
@@ -405,7 +407,7 @@ bool pkgAcquire::Worker::SendConfiguration()
{
if (Top->Value.empty() == false)
{
- string Line = "Config-Item: " + Top->FullTag() + "=";
+ string Line = "Config-Item: " + QuoteString(Top->FullTag(),"=\"\n") + "=";
Line += QuoteString(Top->Value,"\n") + '\n';
Message += Line;
}
diff --git a/apt-pkg/acquire-worker.h b/apt-pkg/acquire-worker.h
index 797ea3f7c..6e1952202 100644
--- a/apt-pkg/acquire-worker.h
+++ b/apt-pkg/acquire-worker.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acquire-worker.h,v 1.11 1999/10/18 00:37:35 jgg Exp $
+// $Id: acquire-worker.h,v 1.12 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Acquire Worker - Worker process manager
@@ -21,10 +21,10 @@
// Interfacing to the method process
class pkgAcquire::Worker
{
- friend pkgAcquire;
+ friend class pkgAcquire;
protected:
- friend Queue;
+ friend class Queue;
/* Linked list starting at a Queue and a linked list starting
at Acquire */
diff --git a/apt-pkg/acquire.cc b/apt-pkg/acquire.cc
index 428bfd50f..1be8551f4 100644
--- a/apt-pkg/acquire.cc
+++ b/apt-pkg/acquire.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acquire.cc,v 1.46 2000/01/27 04:15:09 jgg Exp $
+// $Id: acquire.cc,v 1.47 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Acquire - File Acquiration
@@ -23,6 +23,8 @@
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
+#include <apti18n.h>
+
#include <dirent.h>
#include <sys/time.h>
#include <errno.h>
@@ -52,11 +54,11 @@ pkgAcquire::pkgAcquire(pkgAcquireStatus *Log) : Log(Log)
struct stat St;
if (stat((_config->FindDir("Dir::State::lists") + "partial/").c_str(),&St) != 0 ||
S_ISDIR(St.st_mode) == 0)
- _error->Error("Lists directory %spartial is missing.",
+ _error->Error(_("Lists directory %spartial is missing."),
_config->FindDir("Dir::State::lists").c_str());
if (stat((_config->FindDir("Dir::Cache::Archives") + "partial/").c_str(),&St) != 0 ||
S_ISDIR(St.st_mode) == 0)
- _error->Error("Archive directory %spartial is missing.",
+ _error->Error(_("Archive directory %spartial is missing."),
_config->FindDir("Dir::Cache::Archives").c_str());
}
/*}}}*/
@@ -398,13 +400,13 @@ bool pkgAcquire::Clean(string Dir)
{
DIR *D = opendir(Dir.c_str());
if (D == 0)
- return _error->Errno("opendir","Unable to read %s",Dir.c_str());
+ return _error->Errno("opendir",_("Unable to read %s"),Dir.c_str());
string StartDir = SafeGetCWD();
if (chdir(Dir.c_str()) != 0)
{
closedir(D);
- return _error->Errno("chdir","Unable to change to ",Dir.c_str());
+ return _error->Errno("chdir",_("Unable to change to %s"),Dir.c_str());
}
for (struct dirent *Dir = readdir(D); Dir != 0; Dir = readdir(D))
@@ -435,9 +437,9 @@ bool pkgAcquire::Clean(string Dir)
// Acquire::TotalNeeded - Number of bytes to fetch /*{{{*/
// ---------------------------------------------------------------------
/* This is the total number of bytes needed */
-unsigned long pkgAcquire::TotalNeeded()
+double pkgAcquire::TotalNeeded()
{
- unsigned long Total = 0;
+ double Total = 0;
for (pkgAcquire::Item **I = ItemsBegin(); I != ItemsEnd(); I++)
Total += (*I)->FileSize;
return Total;
@@ -446,9 +448,9 @@ unsigned long pkgAcquire::TotalNeeded()
// Acquire::FetchNeeded - Number of bytes needed to get /*{{{*/
// ---------------------------------------------------------------------
/* This is the number of bytes that is not local */
-unsigned long pkgAcquire::FetchNeeded()
+double pkgAcquire::FetchNeeded()
{
- unsigned long Total = 0;
+ double Total = 0;
for (pkgAcquire::Item **I = ItemsBegin(); I != ItemsEnd(); I++)
if ((*I)->Local == false)
Total += (*I)->FileSize;
@@ -458,9 +460,9 @@ unsigned long pkgAcquire::FetchNeeded()
// Acquire::PartialPresent - Number of partial bytes we already have /*{{{*/
// ---------------------------------------------------------------------
/* This is the number of bytes that is not local */
-unsigned long pkgAcquire::PartialPresent()
+double pkgAcquire::PartialPresent()
{
- unsigned long Total = 0;
+ double Total = 0;
for (pkgAcquire::Item **I = ItemsBegin(); I != ItemsEnd(); I++)
if ((*I)->Local == false)
Total += (*I)->PartialSize;
@@ -736,7 +738,7 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner)
// Totally ignore local items
if ((*I)->Local == true)
continue;
-
+
TotalBytes += (*I)->FileSize;
if ((*I)->Complete == true)
CurrentBytes += (*I)->FileSize;
diff --git a/apt-pkg/acquire.h b/apt-pkg/acquire.h
index de1474f56..d5b759cb3 100644
--- a/apt-pkg/acquire.h
+++ b/apt-pkg/acquire.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acquire.h,v 1.27 2000/01/27 04:15:09 jgg Exp $
+// $Id: acquire.h,v 1.28 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Acquire - File Acquiration
@@ -52,8 +52,8 @@ class pkgAcquire
class Worker;
struct MethodConfig;
struct ItemDesc;
- friend Item;
- friend Queue;
+ friend class Item;
+ friend class Queue;
protected:
@@ -112,9 +112,9 @@ class pkgAcquire
bool Clean(string Dir);
// Returns the size of the total download set
- unsigned long TotalNeeded();
- unsigned long FetchNeeded();
- unsigned long PartialPresent();
+ double TotalNeeded();
+ double FetchNeeded();
+ double PartialPresent();
pkgAcquire(pkgAcquireStatus *Log = 0);
virtual ~pkgAcquire();
@@ -132,8 +132,9 @@ struct pkgAcquire::ItemDesc
// List of possible items queued for download.
class pkgAcquire::Queue
{
- friend pkgAcquire;
- friend pkgAcquire::UriIterator;
+ friend class pkgAcquire;
+ friend class pkgAcquire::UriIterator;
+ friend class pkgAcquire::Worker;
Queue *Next;
protected:
@@ -241,11 +242,11 @@ class pkgAcquireStatus
struct timeval Time;
struct timeval StartTime;
- unsigned long LastBytes;
+ double LastBytes;
double CurrentCPS;
- unsigned long CurrentBytes;
- unsigned long TotalBytes;
- unsigned long FetchedBytes;
+ double CurrentBytes;
+ double TotalBytes;
+ double FetchedBytes;
unsigned long ElapsedTime;
unsigned long TotalItems;
unsigned long CurrentItems;
diff --git a/apt-pkg/algorithms.cc b/apt-pkg/algorithms.cc
index 7f7cb204f..fb85d12f9 100644
--- a/apt-pkg/algorithms.cc
+++ b/apt-pkg/algorithms.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: algorithms.cc,v 1.31 2000/10/03 23:59:05 jgg Exp $
+// $Id: algorithms.cc,v 1.32 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Algorithms - A set of misc algorithms
@@ -20,6 +20,10 @@
#include <apt-pkg/algorithms.h>
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/sptr.h>
+
+#include <apti18n.h>
+
#include <iostream.h>
/*}}}*/
@@ -27,19 +31,41 @@ pkgProblemResolver *pkgProblemResolver::This = 0;
// Simulate::Simulate - Constructor /*{{{*/
// ---------------------------------------------------------------------
-/* */
-pkgSimulate::pkgSimulate(pkgDepCache &Cache) : pkgPackageManager(Cache),
- Sim(Cache.GetMap())
+/* The legacy translations here of input Pkg iterators is obsolete,
+ this is not necessary since the pkgCaches are fully shared now. */
+pkgSimulate::pkgSimulate(pkgDepCache *Cache) : pkgPackageManager(Cache),
+ iPolicy(Cache),
+ Sim(&Cache->GetCache(),&iPolicy)
{
- Flags = new unsigned char[Cache.HeaderP->PackageCount];
- memset(Flags,0,sizeof(*Flags)*Cache.HeaderP->PackageCount);
+ Sim.Init(0);
+ Flags = new unsigned char[Cache->Head().PackageCount];
+ memset(Flags,0,sizeof(*Flags)*Cache->Head().PackageCount);
// Fake a filename so as not to activate the media swapping
string Jnk = "SIMULATE";
- for (unsigned int I = 0; I != Cache.Head().PackageCount; I++)
+ for (unsigned int I = 0; I != Cache->Head().PackageCount; I++)
FileNames[I] = Jnk;
}
/*}}}*/
+// Simulate::Describe - Describe a package /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgSimulate::Describe(PkgIterator Pkg,ostream &out,bool Now)
+{
+ VerIterator Ver(Sim);
+ if (Now == true)
+ Ver = Pkg.CurrentVer();
+ else
+ Ver = Sim[Pkg].CandidateVerIter(Sim);
+
+ out << Pkg.Name();
+
+ if (Ver.end() == true)
+ return;
+
+ out << " (" << Ver.VerStr() << ' ' << Ver.RelStr() << ')';
+}
+ /*}}}*/
// Simulate::Install - Simulate unpacking of a package /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -49,7 +75,8 @@ bool pkgSimulate::Install(PkgIterator iPkg,string /*File*/)
PkgIterator Pkg = Sim.FindPkg(iPkg.Name());
Flags[Pkg->ID] = 1;
- cout << "Inst " << Pkg.Name();
+ cout << "Inst ";
+ Describe(Pkg,cout,false);
Sim.MarkInstall(Pkg,false);
// Look for broken conflicts+predepends.
@@ -58,16 +85,23 @@ bool pkgSimulate::Install(PkgIterator iPkg,string /*File*/)
if (Sim[I].InstallVer == 0)
continue;
- for (DepIterator D = Sim[I].InstVerIter(Sim).DependsList(); D.end() == false; D++)
- if (D->Type == pkgCache::Dep::Conflicts || D->Type == pkgCache::Dep::PreDepends)
+ for (DepIterator D = Sim[I].InstVerIter(Sim).DependsList(); D.end() == false;)
+ {
+ DepIterator Start;
+ DepIterator End;
+ D.GlobOr(Start,End);
+ if (Start->Type == pkgCache::Dep::Conflicts ||
+ Start->Type == pkgCache::Dep::Obsoletes ||
+ End->Type == pkgCache::Dep::PreDepends)
{
- if ((Sim[D] & pkgDepCache::DepInstall) == 0)
+ if ((Sim[End] & pkgDepCache::DepGInstall) == 0)
{
- cout << " [" << I.Name() << " on " << D.TargetPkg().Name() << ']';
- if (D->Type == pkgCache::Dep::Conflicts)
+ cout << " [" << I.Name() << " on " << Start.TargetPkg().Name() << ']';
+ if (Start->Type == pkgCache::Dep::Conflicts)
_error->Error("Fatal, conflicts violated %s",I.Name());
}
- }
+ }
+ }
}
if (Sim.BrokenCount() != 0)
@@ -102,7 +136,9 @@ bool pkgSimulate::Configure(PkgIterator iPkg)
(Sim[D] & pkgDepCache::DepInstall) != 0)
continue;
- if (D->Type == pkgCache::Dep::Conflicts)
+ if (D->Type == pkgCache::Dep::Obsoletes)
+ cout << " Obsoletes:" << D.TargetPkg().Name();
+ else if (D->Type == pkgCache::Dep::Conflicts)
cout << " Conflicts:" << D.TargetPkg().Name();
else
cout << " Depends:" << D.TargetPkg().Name();
@@ -112,7 +148,10 @@ bool pkgSimulate::Configure(PkgIterator iPkg)
_error->Error("Conf Broken %s",Pkg.Name());
}
else
- cout << "Conf " << Pkg.Name();
+ {
+ cout << "Conf ";
+ Describe(Pkg,cout,false);
+ }
if (Sim.BrokenCount() != 0)
ShortBreaks();
@@ -133,9 +172,10 @@ bool pkgSimulate::Remove(PkgIterator iPkg,bool Purge)
Flags[Pkg->ID] = 3;
Sim.MarkDelete(Pkg);
if (Purge == true)
- cout << "Purg " << Pkg.Name();
+ cout << "Purg ";
else
- cout << "Remv " << Pkg.Name();
+ cout << "Remv ";
+ Describe(Pkg,cout,false);
if (Sim.BrokenCount() != 0)
ShortBreaks();
@@ -185,8 +225,8 @@ bool pkgApplyStatus(pkgDepCache &Cache)
if (Cache[I].CandidateVerIter(Cache).Downloadable() == true)
Cache.MarkInstall(I);
else
- return _error->Error("The package %s needs to be reinstalled, "
- "but I can't find an archive for it.",I.Name());
+ return _error->Error(_("The package %s needs to be reinstalled, "
+ "but I can't find an archive for it."),I.Name());
}
continue;
@@ -249,7 +289,7 @@ bool pkgFixBroken(pkgDepCache &Cache)
Cache.MarkInstall(I,true);
}
- pkgProblemResolver Fix(Cache);
+ pkgProblemResolver Fix(&Cache);
return Fix.Resolve(true);
}
/*}}}*/
@@ -281,7 +321,7 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
if (I->CurrentVer != 0)
Cache.MarkInstall(I,false);
- pkgProblemResolver Fix(Cache);
+ pkgProblemResolver Fix(&Cache);
// Hold back held packages.
if (_config->FindB("APT::Ignore-Hold",false) == false)
@@ -306,7 +346,7 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
to install packages not marked for install */
bool pkgAllUpgrade(pkgDepCache &Cache)
{
- pkgProblemResolver Fix(Cache);
+ pkgProblemResolver Fix(&Cache);
if (Cache.BrokenCount() != 0)
return false;
@@ -317,7 +357,7 @@ bool pkgAllUpgrade(pkgDepCache &Cache)
if (Cache[I].Install() == true)
Fix.Protect(I);
- if (_config->FindB("APT::Ingore-Hold",false) == false)
+ if (_config->FindB("APT::Ignore-Hold",false) == false)
if (I->SelectedState == pkgCache::State::Hold)
continue;
@@ -375,10 +415,10 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache)
// ProblemResolver::pkgProblemResolver - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgProblemResolver::pkgProblemResolver(pkgDepCache &Cache) : Cache(Cache)
+pkgProblemResolver::pkgProblemResolver(pkgDepCache *pCache) : Cache(*pCache)
{
// Allocate memory
- unsigned long Size = Cache.HeaderP->PackageCount;
+ unsigned long Size = Cache.Head().PackageCount;
Scores = new signed short[Size];
Flags = new unsigned char[Size];
memset(Flags,0,sizeof(*Flags)*Size);
@@ -387,6 +427,15 @@ pkgProblemResolver::pkgProblemResolver(pkgDepCache &Cache) : Cache(Cache)
Debug = _config->FindB("Debug::pkgProblemResolver",false);
}
/*}}}*/
+// ProblemResolver::~pkgProblemResolver - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgProblemResolver::~pkgProblemResolver()
+{
+ delete [] Scores;
+ delete [] Flags;
+}
+ /*}}}*/
// ProblemResolver::ScoreSort - Sort the list by score /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -406,7 +455,7 @@ int pkgProblemResolver::ScoreSort(const void *a,const void *b)
/* */
void pkgProblemResolver::MakeScores()
{
- unsigned long Size = Cache.HeaderP->PackageCount;
+ unsigned long Size = Cache.Head().PackageCount;
memset(Scores,0,sizeof(*Scores)*Size);
// Generate the base scores for a package based on its properties
@@ -450,7 +499,7 @@ void pkgProblemResolver::MakeScores()
}
// Copy the scores to advoid additive looping
- signed short *OldScores = new signed short[Size];
+ SPtrArray<signed short> OldScores = new signed short[Size];
memcpy(OldScores,Scores,sizeof(*Scores)*Size);
/* Now we cause 1 level of dependency inheritance, that is we add the
@@ -493,9 +542,7 @@ void pkgProblemResolver::MakeScores()
Scores[I->ID] += 10000;
if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
Scores[I->ID] += 5000;
- }
-
- delete [] OldScores;
+ }
}
/*}}}*/
// ProblemResolver::DoUpgrade - Attempt to upgrade this package /*{{{*/
@@ -573,8 +620,9 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
{
/* We let the algorithm deal with conflicts on its next iteration,
it is much smarter than us */
- if (Start->Type == pkgCache::Dep::Conflicts)
- break;
+ if (Start->Type == pkgCache::Dep::Conflicts ||
+ Start->Type == pkgCache::Dep::Obsoletes)
+ break;
if (Debug == true)
clog << " Reinst Failed early because of " << Start.TargetPkg().Name() << endl;
@@ -621,7 +669,7 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
upgrade packages to advoid problems. */
bool pkgProblemResolver::Resolve(bool BrokenFix)
{
- unsigned long Size = Cache.HeaderP->PackageCount;
+ unsigned long Size = Cache.Head().PackageCount;
// Record which packages are marked for install
bool Again = false;
@@ -657,7 +705,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
operates from highest score to lowest. This prevents problems when
high score packages cause the removal of lower score packages that
would cause the removal of even lower score packages. */
- pkgCache::Package **PList = new pkgCache::Package *[Size];
+ SPtrArray<pkgCache::Package *> PList = new pkgCache::Package *[Size];
pkgCache::Package **PEnd = PList;
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
*PEnd++ = I;
@@ -728,19 +776,12 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
bool InOr = false;
pkgCache::DepIterator Start;
pkgCache::DepIterator End;
- PackageKill *OldEnd;
+ PackageKill *OldEnd = LEnd;
enum {OrRemove,OrKeep} OrOp = OrRemove;
for (pkgCache::DepIterator D = Cache[I].InstVerIter(Cache).DependsList();
D.end() == false || InOr == true;)
{
- // We only worry about critical deps.
- if (D.IsCritical() != true)
- {
- D++;
- continue;
- }
-
// Compute a single dependency element (glob or)
if (Start == End)
{
@@ -761,13 +802,22 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
if (Debug == true)
clog << " Or group keep for " << I.Name() << endl;
Cache.MarkKeep(I);
- }
+ }
}
+ /* We do an extra loop (as above) to finalize the or group
+ processing */
+ InOr = false;
OrOp = OrRemove;
D.GlobOr(Start,End);
+ if (Start.end() == true)
+ break;
+
+ // We only worry about critical deps.
+ if (End.IsCritical() != true)
+ continue;
+
InOr = Start != End;
- cout << Start.TargetPkg().Name() << ',' << End.TargetPkg().Name() << ',' << InOr << endl;
OldEnd = LEnd;
}
else
@@ -783,9 +833,10 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
/* Look across the version list. If there are no possible
targets then we keep the package and bail. This is necessary
if a package has a dep on another package that cant be found */
- pkgCache::Version **VList = Start.AllTargets();
+ SPtrArray<pkgCache::Version *> VList = Start.AllTargets();
if (*VList == 0 && (Flags[I->ID] & Protected) != Protected &&
Start->Type != pkgCache::Dep::Conflicts &&
+ Start->Type != pkgCache::Dep::Obsoletes &&
Cache[I].NowBroken() == false)
{
if (InOr == true)
@@ -811,14 +862,16 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
" as a solution to " << I.Name() << ' ' << (int)Scores[I->ID] << endl;
if (Scores[I->ID] <= Scores[Pkg->ID] ||
((Cache[Start] & pkgDepCache::DepNow) == 0 &&
- End->Type != pkgCache::Dep::Conflicts))
+ End->Type != pkgCache::Dep::Conflicts &&
+ End->Type != pkgCache::Dep::Obsoletes))
{
// Try a little harder to fix protected packages..
if ((Flags[I->ID] & Protected) == Protected)
{
if (DoUpgrade(Pkg) == true)
{
- Scores[Pkg->ID] = Scores[I->ID];
+ if (Scores[Pkg->ID] > Scores[I->ID])
+ Scores[Pkg->ID] = Scores[I->ID];
break;
}
@@ -853,7 +906,10 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
clog << " Removing " << I.Name() << " rather than change " << Start.TargetPkg().Name() << endl;
Cache.MarkDelete(I);
if (Counter > 1)
- Scores[I->ID] = Scores[Pkg->ID];
+ {
+ if (Scores[Pkg->ID] > Scores[I->ID])
+ Scores[I->ID] = Scores[Pkg->ID];
+ }
}
}
}
@@ -874,13 +930,16 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
LEnd->Dep = End;
LEnd++;
- if (Start->Type != pkgCache::Dep::Conflicts)
+ if (Start->Type != pkgCache::Dep::Conflicts &&
+ Start->Type != pkgCache::Dep::Obsoletes)
break;
}
}
// Hm, nothing can possibly satisify this dep. Nuke it.
- if (VList[0] == 0 && Start->Type != pkgCache::Dep::Conflicts &&
+ if (VList[0] == 0 &&
+ Start->Type != pkgCache::Dep::Conflicts &&
+ Start->Type != pkgCache::Dep::Obsoletes &&
(Flags[I->ID] & Protected) != Protected)
{
bool Installed = Cache[I].Install();
@@ -910,8 +969,6 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
Done = true;
}
- delete [] VList;
-
// Try some more
if (InOr == true)
continue;
@@ -928,7 +985,8 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
Change = true;
if ((Cache[J->Dep] & pkgDepCache::DepGNow) == 0)
{
- if (J->Dep->Type == pkgCache::Dep::Conflicts)
+ if (J->Dep->Type == pkgCache::Dep::Conflicts ||
+ J->Dep->Type == pkgCache::Dep::Obsoletes)
{
if (Debug == true)
clog << " Fixing " << I.Name() << " via remove of " << J->Pkg.Name() << endl;
@@ -941,9 +999,12 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
clog << " Fixing " << I.Name() << " via keep of " << J->Pkg.Name() << endl;
Cache.MarkKeep(J->Pkg);
}
-
+
if (Counter > 1)
- Scores[J->Pkg->ID] = Scores[I->ID];
+ {
+ if (Scores[I->ID] > Scores[J->Pkg->ID])
+ Scores[J->Pkg->ID] = Scores[I->ID];
+ }
}
}
}
@@ -951,10 +1012,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
if (Debug == true)
clog << "Done" << endl;
-
- delete [] Scores;
- delete [] PList;
-
+
if (Cache.BrokenCount() != 0)
{
// See if this is the result of a hold
@@ -964,9 +1022,9 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
if (Cache[I].InstBroken() == false)
continue;
if ((Flags[I->ID] & Protected) != Protected)
- return _error->Error("Error, pkgProblemResolver::Resolve generated breaks, this may be caused by held packages.");
+ return _error->Error(_("Error, pkgProblemResolver::Resolve generated breaks, this may be caused by held packages."));
}
- return _error->Error("Unable to correct problems, you have held broken packages.");
+ return _error->Error(_("Unable to correct problems, you have held broken packages."));
}
return true;
@@ -979,7 +1037,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
system was non-broken previously. */
bool pkgProblemResolver::ResolveByKeep()
{
- unsigned long Size = Cache.HeaderP->PackageCount;
+ unsigned long Size = Cache.Head().PackageCount;
if (Debug == true)
clog << "Entering ResolveByKeep" << endl;
@@ -1007,7 +1065,7 @@ bool pkgProblemResolver::ResolveByKeep()
continue;
/* Keep the package. If this works then great, otherwise we have
- to be significantly more agressive and manipulate its dependencies */
+ to be significantly more agressive and manipulate its dependencies */
if ((Flags[I->ID] & Protected) == 0)
{
if (Debug == true)
@@ -1015,7 +1073,7 @@ bool pkgProblemResolver::ResolveByKeep()
Cache.MarkKeep(I);
if (Cache[I].InstBroken() == false)
{
- K = PList;
+ K = PList - 1;
continue;
}
}
@@ -1056,7 +1114,7 @@ bool pkgProblemResolver::ResolveByKeep()
clog << "Package " << I.Name() << " has broken dep on " << End.TargetPkg().Name() << endl;
// Look at all the possible provides on this package
- pkgCache::Version **VList = End.AllTargets();
+ SPtrArray<pkgCache::Version *> VList = End.AllTargets();
for (pkgCache::Version **V = VList; *V != 0; V++)
{
pkgCache::VerIterator Ver(Cache,*V);
@@ -1089,7 +1147,7 @@ bool pkgProblemResolver::ResolveByKeep()
if (K == LastStop)
return _error->Error("Internal Error, pkgProblemResolver::ResolveByKeep is looping on package %s.",I.Name());
LastStop = K;
- K = PList;
+ K = PList - 1;
}
return true;
@@ -1112,3 +1170,34 @@ void pkgProblemResolver::InstallProtect()
}
}
/*}}}*/
+
+// PrioSortList - Sort a list of versions by priority /*{{{*/
+// ---------------------------------------------------------------------
+/* This is ment to be used in conjunction with AllTargets to get a list
+ of versions ordered by preference. */
+static pkgCache *PrioCache;
+static int PrioComp(const void *A,const void *B)
+{
+ pkgCache::VerIterator L(*PrioCache,*(pkgCache::Version **)A);
+ pkgCache::VerIterator R(*PrioCache,*(pkgCache::Version **)B);
+
+ if ((L.ParentPkg()->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential &&
+ (L.ParentPkg()->Flags & pkgCache::Flag::Essential) != pkgCache::Flag::Essential)
+ return 1;
+ if ((L.ParentPkg()->Flags & pkgCache::Flag::Essential) != pkgCache::Flag::Essential &&
+ (L.ParentPkg()->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
+ return -1;
+
+ if (L->Priority != R->Priority)
+ return L->Priority - R->Priority;
+ return strcmp(L.ParentPkg().Name(),R.ParentPkg().Name());
+}
+void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List)
+{
+ unsigned long Count = 0;
+ PrioCache = &Cache;
+ for (pkgCache::Version **I = List; *I != 0; I++)
+ Count++;
+ qsort(List,Count,sizeof(*List),PrioComp);
+}
+ /*}}}*/
diff --git a/apt-pkg/algorithms.h b/apt-pkg/algorithms.h
index d68fbfbea..00b7882e2 100644
--- a/apt-pkg/algorithms.h
+++ b/apt-pkg/algorithms.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: algorithms.h,v 1.8 1999/10/27 04:38:27 jgg Exp $
+// $Id: algorithms.h,v 1.9 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Algorithms - A set of misc algorithms
@@ -27,7 +27,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_ALGORITHMS_H
#define PKGLIB_ALGORITHMS_H
@@ -42,8 +41,22 @@ class pkgSimulate : public pkgPackageManager
{
protected:
+ class Policy : public pkgDepCache::Policy
+ {
+ pkgDepCache *Cache;
+ public:
+
+ virtual VerIterator GetCandidateVer(PkgIterator Pkg)
+ {
+ return (*Cache)[Pkg].CandidateVerIter(*Cache);
+ }
+
+ Policy(pkgDepCache *Cache) : Cache(Cache) {};
+ };
+
unsigned char *Flags;
+ Policy iPolicy;
pkgDepCache Sim;
// The Actuall installation implementation
@@ -51,10 +64,11 @@ class pkgSimulate : public pkgPackageManager
virtual bool Configure(PkgIterator Pkg);
virtual bool Remove(PkgIterator Pkg,bool Purge);
void ShortBreaks();
+ void Describe(PkgIterator iPkg,ostream &out,bool Now);
public:
- pkgSimulate(pkgDepCache &Cache);
+ pkgSimulate(pkgDepCache *Cache);
};
class pkgProblemResolver
@@ -101,7 +115,8 @@ class pkgProblemResolver
void InstallProtect();
- pkgProblemResolver(pkgDepCache &Cache);
+ pkgProblemResolver(pkgDepCache *Cache);
+ ~pkgProblemResolver();
};
bool pkgDistUpgrade(pkgDepCache &Cache);
@@ -110,4 +125,6 @@ bool pkgFixBroken(pkgDepCache &Cache);
bool pkgAllUpgrade(pkgDepCache &Cache);
bool pkgMinimizeUpgrade(pkgDepCache &Cache);
+void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List);
+
#endif
diff --git a/apt-pkg/cachefile.cc b/apt-pkg/cachefile.cc
index d7f3c0937..74d136afb 100644
--- a/apt-pkg/cachefile.cc
+++ b/apt-pkg/cachefile.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: cachefile.cc,v 1.4 1999/06/24 04:06:30 jgg Exp $
+// $Id: cachefile.cc,v 1.5 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
CacheFile - Simple wrapper class for opening, generating and whatnot
@@ -21,23 +21,29 @@
#include <apt-pkg/sourcelist.h>
#include <apt-pkg/pkgcachegen.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/policy.h>
+#include <apt-pkg/pkgsystem.h>
+
+#include <apti18n.h>
/*}}}*/
// CacheFile::CacheFile - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgCacheFile::pkgCacheFile() : Map(0), Cache(0), Lock(0)
+pkgCacheFile::pkgCacheFile() : Map(0), Cache(0), DCache(0), Policy(0)
{
}
/*}}}*/
-// CacheFile::~CacheFile - Destructor /*{{{*/
+// CacheFile::~CacheFile - Destructor /*{{{*/
// ---------------------------------------------------------------------
/* */
pkgCacheFile::~pkgCacheFile()
{
+ delete DCache;
+ delete Policy;
delete Cache;
delete Map;
- delete Lock;
+ _system->UnLock(true);
}
/*}}}*/
// CacheFile::Open - Open the cache files, creating if necessary /*{{{*/
@@ -46,7 +52,8 @@ pkgCacheFile::~pkgCacheFile()
bool pkgCacheFile::Open(OpProgress &Progress,bool WithLock)
{
if (WithLock == true)
- Lock = new pkgDpkgLock;
+ if (_system->Lock() == false)
+ return false;
if (_error->PendingError() == true)
return false;
@@ -54,38 +61,35 @@ bool pkgCacheFile::Open(OpProgress &Progress,bool WithLock)
// Read the source list
pkgSourceList List;
if (List.ReadMainList() == false)
- return _error->Error("The list of sources could not be read.");
+ return _error->Error(_("The list of sources could not be read."));
+
+ // Read the caches
+ bool Res = pkgMakeStatusCache(List,Progress,&Map,!WithLock);
+ Progress.Done();
+ if (Res == false)
+ return _error->Error(_("The package lists or status file could not be parsed or opened."));
+
+ /* This sux, remove it someday */
+ if (_error->empty() == false)
+ _error->Warning(_("You may want to run apt-get update to correct these missing files"));
+
+ Cache = new pkgCache(Map);
+ if (_error->PendingError() == true)
+ return false;
- /* Build all of the caches, using the cache files if we are locking
- (ie as root) */
- if (WithLock == true)
- {
- pkgMakeStatusCache(List,Progress);
- Progress.Done();
- if (_error->PendingError() == true)
- return _error->Error("The package lists or status file could not be parsed or opened.");
- if (_error->empty() == false)
- _error->Warning("You may want to run apt-get update to correct these missing files");
-
- // Open the cache file
- FileFd File(_config->FindFile("Dir::Cache::pkgcache"),FileFd::ReadOnly);
- if (_error->PendingError() == true)
- return false;
-
- Map = new MMap(File,MMap::Public | MMap::ReadOnly);
- if (_error->PendingError() == true)
- return false;
- }
- else
- {
- Map = pkgMakeStatusCacheMem(List,Progress);
- Progress.Done();
- if (Map == 0)
- return false;
- }
+ // The policy engine
+ Policy = new pkgPolicy(Cache);
+ if (_error->PendingError() == true)
+ return false;
+ if (ReadPinFile(*Policy) == false)
+ return false;
// Create the dependency cache
- Cache = new pkgDepCache(*Map,Progress);
+ DCache = new pkgDepCache(Cache,Policy);
+ if (_error->PendingError() == true)
+ return false;
+
+ DCache->Init(&Progress);
Progress.Done();
if (_error->PendingError() == true)
return false;
@@ -93,3 +97,21 @@ bool pkgCacheFile::Open(OpProgress &Progress,bool WithLock)
return true;
}
/*}}}*/
+
+// CacheFile::Close - close the cache files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgCacheFile::Close()
+{
+ delete DCache;
+ delete Policy;
+ delete Cache;
+ delete Map;
+ _system->UnLock(true);
+
+ Map = 0;
+ DCache = 0;
+ Policy = 0;
+ Cache = 0;
+}
+ /*}}}*/
diff --git a/apt-pkg/cachefile.h b/apt-pkg/cachefile.h
index e2414446e..e2540ed58 100644
--- a/apt-pkg/cachefile.h
+++ b/apt-pkg/cachefile.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: cachefile.h,v 1.3 1999/06/27 03:18:28 jgg Exp $
+// $Id: cachefile.h,v 1.4 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
CacheFile - Simple wrapper class for opening, generating and whatnot
@@ -9,6 +9,9 @@
of caches. It can operate as root, as not root, show progress and so on,
it transparently handles everything necessary.
+ This means it can rebuild caches from the source list and instantiates
+ and prepares the standard policy mechanism.
+
##################################################################### */
/*}}}*/
#ifndef PKGLIB_CACHEFILE_H
@@ -19,30 +22,32 @@
#endif
#include <apt-pkg/depcache.h>
-#include <apt-pkg/dpkginit.h>
+class pkgPolicy;
class pkgCacheFile
{
protected:
MMap *Map;
- pkgDepCache *Cache;
- pkgDpkgLock *Lock;
+ pkgCache *Cache;
+ pkgDepCache *DCache;
public:
+
+ pkgPolicy *Policy;
// We look pretty much exactly like a pointer to a dep cache
- inline operator pkgDepCache &() {return *Cache;};
- inline operator pkgDepCache *() {return Cache;};
- inline pkgDepCache *operator ->() {return Cache;};
- inline pkgDepCache &operator *() {return *Cache;};
- inline pkgDepCache::StateCache &operator [](pkgCache::PkgIterator const &I) {return (*Cache)[I];};
- inline unsigned char &operator [](pkgCache::DepIterator const &I) {return (*Cache)[I];};
+ inline operator pkgCache &() {return *Cache;};
+ inline operator pkgCache *() {return Cache;};
+ inline operator pkgDepCache &() {return *DCache;};
+ inline operator pkgDepCache *() {return DCache;};
+ inline pkgDepCache *operator ->() {return DCache;};
+ inline pkgDepCache &operator *() {return *DCache;};
+ inline pkgDepCache::StateCache &operator [](pkgCache::PkgIterator const &I) {return (*DCache)[I];};
+ inline unsigned char &operator [](pkgCache::DepIterator const &I) {return (*DCache)[I];};
- // Release the dpkg status lock
- inline void ReleaseLock() {Lock->Close();};
-
bool Open(OpProgress &Progress,bool WithLock = true);
+ void Close();
pkgCacheFile();
~pkgCacheFile();
diff --git a/apt-pkg/cacheiterators.h b/apt-pkg/cacheiterators.h
index a3f134e63..e346f49b7 100644
--- a/apt-pkg/cacheiterators.h
+++ b/apt-pkg/cacheiterators.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: cacheiterators.h,v 1.15 1999/07/30 04:08:42 jgg Exp $
+// $Id: cacheiterators.h,v 1.16 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Cache Iterators - Iterators for navigating the cache structure
@@ -28,7 +28,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_CACHEITERATORS_H
#define PKGLIB_CACHEITERATORS_H
@@ -39,10 +38,20 @@
// Package Iterator
class pkgCache::PkgIterator
{
+ friend class pkgCache;
Package *Pkg;
pkgCache *Owner;
long HashIndex;
+ protected:
+
+ // This constructor is the 'begin' constructor, never use it.
+ inline PkgIterator(pkgCache &Owner) : Owner(&Owner), HashIndex(-1)
+ {
+ Pkg = Owner.PkgP;
+ operator ++(0);
+ };
+
public:
enum OkState {NeedsNothing,NeedsUnpack,NeedsConfigure};
@@ -62,10 +71,10 @@ class pkgCache::PkgIterator
inline Package const &operator *() const {return *Pkg;};
inline operator Package *() {return Pkg == Owner->PkgP?0:Pkg;};
inline operator Package const *() const {return Pkg == Owner->PkgP?0:Pkg;};
+ inline pkgCache *Cache() {return Owner;};
inline const char *Name() const {return Pkg->Name == 0?0:Owner->StrP + Pkg->Name;};
inline const char *Section() const {return Pkg->Section == 0?0:Owner->StrP + Pkg->Section;};
- inline const char *TargetDist() const {return Pkg->TargetDist == 0?0:Owner->StrP + Pkg->TargetDist;};
inline bool Purge() const {return Pkg->CurrentState == pkgCache::State::Purge ||
(Pkg->CurrentVer == 0 && Pkg->CurrentState == pkgCache::State::NotInstalled);};
inline VerIterator VersionList() const;
@@ -77,11 +86,6 @@ class pkgCache::PkgIterator
OkState State() const;
// Constructors
- inline PkgIterator(pkgCache &Owner) : Owner(&Owner), HashIndex(-1)
- {
- Pkg = Owner.PkgP;
- operator ++(0);
- };
inline PkgIterator(pkgCache &Owner,Package *Trg) : Pkg(Trg), Owner(&Owner),
HashIndex(0)
{
@@ -119,7 +123,8 @@ class pkgCache::VerIterator
inline Version const &operator *() const {return *Ver;};
inline operator Version *() {return Ver == Owner->VerP?0:Ver;};
inline operator Version const *() const {return Ver == Owner->VerP?0:Ver;};
-
+ inline pkgCache *Cache() {return Owner;};
+
inline const char *VerStr() const {return Ver->VerStr == 0?0:Owner->StrP + Ver->VerStr;};
inline const char *Section() const {return Ver->Section == 0?0:Owner->StrP + Ver->Section;};
inline const char *Arch() const {return Ver->Arch == 0?0:Owner->StrP + Ver->Arch;};
@@ -129,8 +134,9 @@ class pkgCache::VerIterator
inline VerFileIterator FileList() const;
inline unsigned long Index() const {return Ver - Owner->VerP;};
bool Downloadable() const;
- const char *PriorityType();
-
+ inline const char *PriorityType() {return Owner->Priority(Ver->Priority);};
+ string RelStr();
+
bool Automatic() const;
VerFileIterator NewestFile() const;
@@ -171,10 +177,11 @@ class pkgCache::DepIterator
inline Dependency const &operator *() const {return *Dep;};
inline operator Dependency *() {return Dep == Owner->DepP?0:Dep;};
inline operator Dependency const *() const {return Dep == Owner->DepP?0:Dep;};
+ inline pkgCache *Cache() {return Owner;};
inline const char *TargetVer() const {return Dep->Version == 0?0:Owner->StrP + Dep->Version;};
inline PkgIterator TargetPkg() {return PkgIterator(*Owner,Owner->PkgP + Dep->Package);};
- inline PkgIterator SmartTargetPkg() {PkgIterator R(*Owner);SmartTargetPkg(R);return R;};
+ inline PkgIterator SmartTargetPkg() {PkgIterator R(*Owner,0);SmartTargetPkg(R);return R;};
inline VerIterator ParentVer() {return VerIterator(*Owner,Owner->VerP + Dep->ParentVer);};
inline PkgIterator ParentPkg() {return PkgIterator(*Owner,Owner->PkgP + Owner->VerP[Dep->ParentVer].ParentPkg);};
inline bool Reverse() {return Type == DepRev;};
@@ -183,8 +190,8 @@ class pkgCache::DepIterator
void GlobOr(DepIterator &Start,DepIterator &End);
Version **AllTargets();
bool SmartTargetPkg(PkgIterator &Result);
- const char *CompType();
- const char *DepType();
+ inline const char *CompType() {return Owner->CompType(Dep->CompareOp);};
+ inline const char *DepType() {return Owner->DepType(Dep->Type);};
inline DepIterator(pkgCache &Owner,Dependency *Trg,Version * = 0) :
Dep(Trg), Type(DepVer), Owner(&Owner)
@@ -229,6 +236,7 @@ class pkgCache::PrvIterator
inline Provides const &operator *() const {return *Prv;};
inline operator Provides *() {return Prv == Owner->ProvideP?0:Prv;};
inline operator Provides const *() const {return Prv == Owner->ProvideP?0:Prv;};
+ inline pkgCache *Cache() {return Owner;};
inline const char *Name() const {return Owner->StrP + Owner->PkgP[Prv->ParentPkg].Name;};
inline const char *ProvideVersion() const {return Prv->ProvideVersion == 0?0:Owner->StrP + Prv->ProvideVersion;};
@@ -274,6 +282,7 @@ class pkgCache::PkgFileIterator
inline PackageFile const &operator *() const {return *File;};
inline operator PackageFile *() {return File == Owner->PkgFileP?0:File;};
inline operator PackageFile const *() const {return File == Owner->PkgFileP?0:File;};
+ inline pkgCache *Cache() {return Owner;};
inline const char *FileName() const {return File->FileName == 0?0:Owner->StrP + File->FileName;};
inline const char *Archive() const {return File->Archive == 0?0:Owner->StrP + File->Archive;};
@@ -281,14 +290,17 @@ class pkgCache::PkgFileIterator
inline const char *Version() const {return File->Version == 0?0:Owner->StrP + File->Version;};
inline const char *Origin() const {return File->Origin == 0?0:Owner->StrP + File->Origin;};
inline const char *Label() const {return File->Origin == 0?0:Owner->StrP + File->Label;};
- inline const char *Architecture() const {return File->Origin == 0?0:Owner->StrP + File->Architecture;};
+ inline const char *Site() const {return File->Site == 0?0:Owner->StrP + File->Site;};
+ inline const char *Architecture() const {return File->Architecture == 0?0:Owner->StrP + File->Architecture;};
+ inline const char *IndexType() const {return File->IndexType == 0?0:Owner->StrP + File->IndexType;};
inline unsigned long Index() const {return File - Owner->PkgFileP;};
bool IsOk();
-
+
// Constructors
- inline PkgFileIterator(pkgCache &Owner) : Owner(&Owner), File(Owner.PkgFileP + Owner.Head().FileList) {};
+ inline PkgFileIterator() : Owner(0), File(0) {};
+ inline PkgFileIterator(pkgCache &Owner) : Owner(&Owner), File(Owner.PkgFileP) {};
inline PkgFileIterator(pkgCache &Owner,PackageFile *Trg) : Owner(&Owner), File(Trg) {};
};
@@ -315,10 +327,12 @@ class pkgCache::VerFileIterator
inline VerFile const &operator *() const {return *FileP;};
inline operator VerFile *() {return FileP == Owner->VerFileP?0:FileP;};
inline operator VerFile const *() const {return FileP == Owner->VerFileP?0:FileP;};
+ inline pkgCache *Cache() {return Owner;};
inline PkgFileIterator File() const {return PkgFileIterator(*Owner,FileP->File + Owner->PkgFileP);};
inline unsigned long Index() const {return FileP - Owner->VerFileP;};
+ inline VerFileIterator() : Owner(0), FileP(0) {};
inline VerFileIterator(pkgCache &Owner,VerFile *Trg) : Owner(&Owner), FileP(Trg) {};
};
@@ -327,8 +341,6 @@ inline pkgCache::VerIterator pkgCache::PkgIterator::VersionList() const
{return VerIterator(*Owner,Owner->VerP + Pkg->VersionList);};
inline pkgCache::VerIterator pkgCache::PkgIterator::CurrentVer() const
{return VerIterator(*Owner,Owner->VerP + Pkg->CurrentVer);};
-inline pkgCache::VerIterator pkgCache::PkgIterator::TargetVer() const
- {return VerIterator(*Owner,Owner->VerP + Pkg->TargetVer);};
inline pkgCache::DepIterator pkgCache::PkgIterator::RevDependsList() const
{return DepIterator(*Owner,Owner->DepP + Pkg->RevDepends,Pkg);};
inline pkgCache::PrvIterator pkgCache::PkgIterator::ProvidesList() const
diff --git a/apt-pkg/clean.cc b/apt-pkg/clean.cc
index bad824362..0d623d862 100644
--- a/apt-pkg/clean.cc
+++ b/apt-pkg/clean.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: clean.cc,v 1.3 1999/10/03 21:09:27 jgg Exp $
+// $Id: clean.cc,v 1.4 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Clean - Clean out downloaded directories
@@ -17,6 +17,8 @@
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
+#include <apti18n.h>
+
#include <dirent.h>
#include <sys/stat.h>
#include <unistd.h>
@@ -29,16 +31,17 @@
bool pkgArchiveCleaner::Go(string Dir,pkgCache &Cache)
{
bool CleanInstalled = _config->FindB("APT::Clean-Installed",true);
-
- DIR *D = opendir(Dir.c_str());
+ string MyArch = _config->Find("APT::Architecture");
+
+ DIR *D = opendir(Dir.c_str());
if (D == 0)
- return _error->Errno("opendir","Unable to read %s",Dir.c_str());
-
+ return _error->Errno("opendir",_("Unable to read %s"),Dir.c_str());
+
string StartDir = SafeGetCWD();
if (chdir(Dir.c_str()) != 0)
{
closedir(D);
- return _error->Errno("chdir","Unable to change to ",Dir.c_str());
+ return _error->Errno("chdir",_("Unable to change to %s"),Dir.c_str());
}
for (struct dirent *Dir = readdir(D); Dir != 0; Dir = readdir(D))
@@ -52,8 +55,12 @@ bool pkgArchiveCleaner::Go(string Dir,pkgCache &Cache)
struct stat St;
if (stat(Dir->d_name,&St) != 0)
- return _error->Errno("stat","Unable to stat %s.",Dir->d_name);
-
+ {
+ chdir(StartDir.c_str());
+ closedir(D);
+ return _error->Errno("stat",_("Unable to stat %s."),Dir->d_name);
+ }
+
// Grab the package name
const char *I = Dir->d_name;
for (; *I != 0 && *I != '_';I++);
@@ -74,7 +81,10 @@ bool pkgArchiveCleaner::Go(string Dir,pkgCache &Cache)
if (*I != '.')
continue;
string Arch = DeQuoteString(string(Start,I-Start));
-
+
+ if (Arch != "all" && Arch != MyArch)
+ continue;
+
// Lookup the package
pkgCache::PkgIterator P = Cache.FindPkg(Pkg);
if (P.end() != true)
diff --git a/apt-pkg/contrib/cdromutl.cc b/apt-pkg/contrib/cdromutl.cc
index dae6f0528..ab170ec5a 100644
--- a/apt-pkg/contrib/cdromutl.cc
+++ b/apt-pkg/contrib/cdromutl.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: cdromutl.cc,v 1.11 1999/12/10 23:40:29 jgg Exp $
+// $Id: cdromutl.cc,v 1.12 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
CDROM Utilities - Some functions to manipulate CDROM mounts.
@@ -19,6 +19,8 @@
#include <apt-pkg/fileutl.h>
#include <apt-pkg/configuration.h>
+#include <apti18n.h>
+
#include <sys/wait.h>
#include <sys/errno.h>
#include <sys/statvfs.h>
@@ -50,7 +52,7 @@ bool IsMounted(string &Path)
struct stat Buf,Buf2;
if (stat(Path.c_str(),&Buf) != 0 ||
stat((Path + "../").c_str(),&Buf2) != 0)
- return _error->Errno("stat","Unable to stat the mount point %s",Path.c_str());
+ return _error->Errno("stat",_("Unable to stat the mount point %s"),Path.c_str());
if (Buf.st_dev == Buf2.st_dev)
return false;
@@ -93,7 +95,7 @@ bool UnmountCdrom(string Path)
}
// Wait for mount
- return ExecWait(Child,"mount",true);
+ return ExecWait(Child,"umount",true);
}
/*}}}*/
// MountCdrom - Mount a cdrom /*{{{*/
@@ -144,11 +146,11 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version)
string StartDir = SafeGetCWD();
if (chdir(CD.c_str()) != 0)
- return _error->Errno("chdir","Unable to change to %s",CD.c_str());
+ return _error->Errno("chdir",_("Unable to change to %s"),CD.c_str());
DIR *D = opendir(".");
if (D == 0)
- return _error->Errno("opendir","Unable to read %s",CD.c_str());
+ return _error->Errno("opendir",_("Unable to read %s"),CD.c_str());
/* Run over the directory, we assume that the reader order will never
change as the media is read-only. In theory if the kernel did
@@ -185,7 +187,7 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version)
{
struct statvfs Buf;
if (statvfs(CD.c_str(),&Buf) != 0)
- return _error->Errno("statfs","Failed to stat the cdrom");
+ return _error->Errno("statfs",_("Failed to stat the cdrom"));
// We use a kilobyte block size to advoid overflow
sprintf(S,"%lu %lu",(long)(Buf.f_blocks*(Buf.f_bsize/1024)),
diff --git a/apt-pkg/contrib/cmndline.cc b/apt-pkg/contrib/cmndline.cc
index 36039c3b8..ea15ae05d 100644
--- a/apt-pkg/contrib/cmndline.cc
+++ b/apt-pkg/contrib/cmndline.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: cmndline.cc,v 1.10 1999/05/14 02:57:48 jgg Exp $
+// $Id: cmndline.cc,v 1.11 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Command Line Class - Sophisticated command line parser
@@ -14,6 +14,8 @@
#include <apt-pkg/cmndline.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
+
+#include <apti18n.h>
/*}}}*/
// CommandLine::CommandLine - Constructor /*{{{*/
@@ -68,7 +70,7 @@ bool CommandLine::Parse(int argc,const char **argv)
Args *A;
for (A = ArgList; A->end() == false && A->ShortOpt != *Opt; A++);
if (A->end() == true)
- return _error->Error("Command line option '%c' [from %s] is not known.",*Opt,argv[I]);
+ return _error->Error(_("Command line option '%c' [from %s] is not known."),*Opt,argv[I]);
if (HandleOpt(I,argc,argv,Opt,A) == false)
return false;
@@ -94,7 +96,7 @@ bool CommandLine::Parse(int argc,const char **argv)
for (; Opt != OptEnd && *Opt != '-'; Opt++);
if (Opt == OptEnd)
- return _error->Error("Command line option %s is not understood",argv[I]);
+ return _error->Error(_("Command line option %s is not understood"),argv[I]);
Opt++;
for (A = ArgList; A->end() == false &&
@@ -102,7 +104,7 @@ bool CommandLine::Parse(int argc,const char **argv)
// Failed again..
if (A->end() == true && OptEnd - Opt != 1)
- return _error->Error("Command line option %s is not understood",argv[I]);
+ return _error->Error(_("Command line option %s is not understood"),argv[I]);
// The option could be a single letter option prefixed by a no-..
if (A->end() == true)
@@ -110,12 +112,12 @@ bool CommandLine::Parse(int argc,const char **argv)
for (A = ArgList; A->end() == false && A->ShortOpt != *Opt; A++);
if (A->end() == true)
- return _error->Error("Command line option %s is not understood",argv[I]);
+ return _error->Error(_("Command line option %s is not understood"),argv[I]);
}
// The option is not boolean
if (A->IsBoolean() == false)
- return _error->Error("Command line option %s is not boolean",argv[I]);
+ return _error->Error(_("Command line option %s is not boolean"),argv[I]);
PreceedMatch = true;
}
@@ -154,7 +156,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
// Equals was specified but we fell off the end!
if (Opt[1] == '=' && Argument == 0)
- return _error->Error("Option %s requires an argument.",argv[I]);
+ return _error->Error(_("Option %s requires an argument."),argv[I]);
if (Opt[1] == '=')
CertainArg = true;
@@ -175,7 +177,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
if ((A->Flags & HasArg) == HasArg)
{
if (Argument == 0)
- return _error->Error("Option %s requires an argument.",argv[I]);
+ return _error->Error(_("Option %s requires an argument."),argv[I]);
Opt += strlen(Opt);
I += IncI;
@@ -189,13 +191,13 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
const char *J;
for (J = Argument; *J != 0 && *J != '='; J++);
if (*J == 0)
- return _error->Error("Option %s: Configuration item sepecification must have an =<val>.",argv[I]);
+ return _error->Error(_("Option %s: Configuration item sepecification must have an =<val>."),argv[I]);
// = is trailing
if (J[1] == 0)
{
if (I+1 >= argc)
- return _error->Error("Option %s: Configuration item sepecification must have an =<val>.",argv[I]);
+ return _error->Error(_("Option %s: Configuration item sepecification must have an =<val>."),argv[I]);
Conf->Set(string(Argument,J-Argument),string(argv[I++ +1]));
}
else
@@ -225,7 +227,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
// Conversion failed and the argument was specified with an =s
if (EndPtr == Argument && CertainArg == true)
- return _error->Error("Option %s requires an integer argument, not '%s'",argv[I],Argument);
+ return _error->Error(_("Option %s requires an integer argument, not '%s'"),argv[I],Argument);
// Conversion was ok, set the value and return
if (EndPtr != 0 && EndPtr != Argument && *EndPtr == 0)
@@ -256,7 +258,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
break;
if (strlen(argv[I]) >= sizeof(Buffer))
- return _error->Error("Option '%s' is too long",argv[I]);
+ return _error->Error(_("Option '%s' is too long"),argv[I]);
// Skip the leading dash
const char *J = argv[I];
@@ -289,7 +291,7 @@ bool CommandLine::HandleOpt(int &I,int argc,const char *argv[],
}
if (CertainArg == true)
- return _error->Error("Sense %s is not understood, try true or false.",Argument);
+ return _error->Error(_("Sense %s is not understood, try true or false."),Argument);
Argument = 0;
}
@@ -339,7 +341,7 @@ bool CommandLine::DispatchArg(Dispatch *Map,bool NoMatch)
if (Map[I].Match == 0)
{
if (NoMatch == true)
- _error->Error("Invalid operation %s",FileList[0]);
+ _error->Error(_("Invalid operation %s"),FileList[0]);
}
return false;
diff --git a/apt-pkg/contrib/configuration.cc b/apt-pkg/contrib/configuration.cc
index 302feee6e..b3b425cda 100644
--- a/apt-pkg/contrib/configuration.cc
+++ b/apt-pkg/contrib/configuration.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: configuration.cc,v 1.14 2000/01/16 05:36:17 jgg Exp $
+// $Id: configuration.cc,v 1.15 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Configuration Class
@@ -18,9 +18,17 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/fileutl.h>
+#include <apti18n.h>
+#include <vector>
+#include <algorithm>
+#include <fstream>
+
#include <stdio.h>
-#include <fstream.h>
+#include <dirent.h>
+#include <sys/stat.h>
+#include <unistd.h>
/*}}}*/
Configuration *_config = new Configuration;
@@ -28,10 +36,46 @@ Configuration *_config = new Configuration;
// Configuration::Configuration - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-Configuration::Configuration()
+Configuration::Configuration() : ToFree(true)
{
Root = new Item;
}
+Configuration::Configuration(const Item *Root) : Root((Item *)Root), ToFree(false)
+{
+};
+
+ /*}}}*/
+// Configuration::~Configuration - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+Configuration::~Configuration()
+{
+ if (ToFree == false)
+ return;
+
+ Item *Top = Root;
+ for (; Top != 0;)
+ {
+ if (Top->Child != 0)
+ {
+ Top = Top->Child;
+ continue;
+ }
+
+ while (Top != 0 && Top->Next == 0)
+ {
+ Item *Parent = Top->Parent;
+ delete Top;
+ Top = Parent;
+ }
+ if (Top != 0)
+ {
+ Item *Next = Top->Next;
+ delete Top;
+ Top = Next;
+ }
+ }
+}
/*}}}*/
// Configuration::Lookup - Lookup a single item /*{{{*/
// ---------------------------------------------------------------------
@@ -105,9 +149,9 @@ Configuration::Item *Configuration::Lookup(const char *Name,bool Create)
// Configuration::Find - Find a value /*{{{*/
// ---------------------------------------------------------------------
/* */
-string Configuration::Find(const char *Name,const char *Default)
+string Configuration::Find(const char *Name,const char *Default) const
{
- Item *Itm = Lookup(Name,false);
+ const Item *Itm = Lookup(Name);
if (Itm == 0 || Itm->Value.empty() == true)
{
if (Default == 0)
@@ -124,9 +168,9 @@ string Configuration::Find(const char *Name,const char *Default)
/* Directories are stored as the base dir in the Parent node and the
sub directory in sub nodes with the final node being the end filename
*/
-string Configuration::FindFile(const char *Name,const char *Default)
+string Configuration::FindFile(const char *Name,const char *Default) const
{
- Item *Itm = Lookup(Name,false);
+ const Item *Itm = Lookup(Name);
if (Itm == 0 || Itm->Value.empty() == true)
{
if (Default == 0)
@@ -135,26 +179,35 @@ string Configuration::FindFile(const char *Name,const char *Default)
return Default;
}
- // Absolute path
- if (Itm->Value[0] == '/' || Itm->Parent == 0)
- return Itm->Value;
-
- // ./ is also considered absolute as is anything with ~ in it
- if (Itm->Value[0] != 0 &&
- ((Itm->Value[0] == '.' && Itm->Value[1] == '/') ||
- (Itm->Value[0] == '~' && Itm->Value[1] == '/')))
- return Itm->Value;
-
- if (Itm->Parent->Value.end()[-1] == '/')
- return Itm->Parent->Value + Itm->Value;
- else
- return Itm->Parent->Value + '/' + Itm->Value;
+ string val = Itm->Value;
+ while (Itm->Parent != 0 && Itm->Parent->Value.empty() == false)
+ {
+ // Absolute
+ if (val.length() >= 1 && val[0] == '/')
+ break;
+
+ // ~/foo or ./foo
+ if (val.length() >= 2 && (val[0] == '~' || val[0] == '.') && val[1] == '/')
+ break;
+
+ // ../foo
+ if (val.length() >= 3 && val[0] == '.' && val[1] == '.' && val[2] == '/')
+ break;
+
+ if (Itm->Parent->Value.end()[-1] != '/')
+ val.insert(0, "/");
+
+ val.insert(0, Itm->Parent->Value);
+ Itm = Itm->Parent;
+ }
+
+ return val;
}
/*}}}*/
// Configuration::FindDir - Find a directory name /*{{{*/
// ---------------------------------------------------------------------
/* This is like findfile execept the result is terminated in a / */
-string Configuration::FindDir(const char *Name,const char *Default)
+string Configuration::FindDir(const char *Name,const char *Default) const
{
string Res = FindFile(Name,Default);
if (Res.end()[-1] != '/')
@@ -165,9 +218,9 @@ string Configuration::FindDir(const char *Name,const char *Default)
// Configuration::FindI - Find an integer value /*{{{*/
// ---------------------------------------------------------------------
/* */
-int Configuration::FindI(const char *Name,int Default)
+int Configuration::FindI(const char *Name,int Default) const
{
- Item *Itm = Lookup(Name,false);
+ const Item *Itm = Lookup(Name);
if (Itm == 0 || Itm->Value.empty() == true)
return Default;
@@ -182,15 +235,68 @@ int Configuration::FindI(const char *Name,int Default)
// Configuration::FindB - Find a boolean type /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool Configuration::FindB(const char *Name,bool Default)
+bool Configuration::FindB(const char *Name,bool Default) const
{
- Item *Itm = Lookup(Name,false);
+ const Item *Itm = Lookup(Name);
if (Itm == 0 || Itm->Value.empty() == true)
return Default;
return StringToBool(Itm->Value,Default);
}
/*}}}*/
+// Configuration::FindAny - Find an arbitrary type /*{{{*/
+// ---------------------------------------------------------------------
+/* a key suffix of /f, /d, /b or /i calls Find{File,Dir,B,I} */
+string Configuration::FindAny(const char *Name,const char *Default) const
+{
+ string key = Name;
+ char type = 0;
+
+ if (key.size() > 2 && key.end()[-2] == '/')
+ {
+ type = key.end()[-1];
+ key.resize(key.size() - 2);
+ }
+
+ switch (type)
+ {
+ // file
+ case 'f':
+ return FindFile(key.c_str(), Default);
+
+ // directory
+ case 'd':
+ return FindDir(key.c_str(), Default);
+
+ // bool
+ case 'b':
+ return FindB(key, Default) ? "true" : "false";
+
+ // int
+ case 'i':
+ {
+ char buf[16];
+ snprintf(buf, sizeof(buf)-1, "%d", FindI(key, Default));
+ return buf;
+ }
+ }
+
+ // fallback
+ return Find(Name, Default);
+}
+ /*}}}*/
+// Configuration::CndSet - Conditinal Set a value /*{{{*/
+// ---------------------------------------------------------------------
+/* This will not overwrite */
+void Configuration::CndSet(const char *Name,string Value)
+{
+ Item *Itm = Lookup(Name,true);
+ if (Itm == 0)
+ return;
+ if (Itm->Value.empty() == true)
+ Itm->Value = Value;
+}
+ /*}}}*/
// Configuration::Set - Set a value /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -215,17 +321,73 @@ void Configuration::Set(const char *Name,int Value)
Itm->Value = S;
}
/*}}}*/
+// Configuration::Clear - Clear an entire tree /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void Configuration::Clear(string Name)
+{
+ Item *Top = Lookup(Name.c_str(),false);
+ if (Top == 0)
+ return;
+
+ Top->Value = string();
+ Item *Stop = Top;
+ Top = Top->Child;
+ Stop->Child = 0;
+ for (; Top != 0;)
+ {
+ if (Top->Child != 0)
+ {
+ Top = Top->Child;
+ continue;
+ }
+
+ while (Top != 0 && Top->Next == 0)
+ {
+ Item *Tmp = Top;
+ Top = Top->Parent;
+ delete Tmp;
+
+ if (Top == Stop)
+ return;
+ }
+
+ Item *Tmp = Top;
+ if (Top != 0)
+ Top = Top->Next;
+ delete Tmp;
+ }
+}
+ /*}}}*/
// Configuration::Exists - Returns true if the Name exists /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool Configuration::Exists(const char *Name)
+bool Configuration::Exists(const char *Name) const
{
- Item *Itm = Lookup(Name,false);
+ const Item *Itm = Lookup(Name);
if (Itm == 0)
return false;
return true;
}
/*}}}*/
+// Configuration::ExistsAny - Returns true if the Name, possibly /*{{{*/
+// ---------------------------------------------------------------------
+/* qualified by /[fdbi] exists */
+bool Configuration::ExistsAny(const char *Name) const
+{
+ string key = Name;
+
+ if (key.size() > 2 && key.end()[-2] == '/' &&
+ key.find_first_of("fdbi",key.size()-1) < key.size())
+ {
+ key.resize(key.size() - 2);
+ if (Exists(key.c_str()))
+ return true;
+ }
+
+ return Exists(Name);
+}
+ /*}}}*/
// Configuration::Dump - Dump the config /*{{{*/
// ---------------------------------------------------------------------
/* Dump the entire configuration space */
@@ -233,7 +395,7 @@ void Configuration::Dump()
{
/* Write out all of the configuration directives by walking the
configuration tree */
- const Configuration::Item *Top = _config->Tree(0);
+ const Configuration::Item *Top = Tree(0);
for (; Top != 0;)
{
clog << Top->FullTag() << " \"" << Top->Value << "\";" << endl;
@@ -248,31 +410,37 @@ void Configuration::Dump()
Top = Top->Parent;
if (Top != 0)
Top = Top->Next;
- }
+ }
}
/*}}}*/
// Configuration::Item::FullTag - Return the fully scoped tag /*{{{*/
// ---------------------------------------------------------------------
-/* */
-string Configuration::Item::FullTag() const
+/* Stop sets an optional max recursion depth if this item is being viewed as
+ part of a sub tree. */
+string Configuration::Item::FullTag(const Item *Stop) const
{
- if (Parent == 0 || Parent->Parent == 0)
+ if (Parent == 0 || Parent->Parent == 0 || Parent == Stop)
return Tag;
- return Parent->FullTag() + "::" + Tag;
+ return Parent->FullTag(Stop) + "::" + Tag;
}
/*}}}*/
// ReadConfigFile - Read a configuration file /*{{{*/
// ---------------------------------------------------------------------
/* The configuration format is very much like the named.conf format
- used in bind8, in fact this routine can parse most named.conf files. */
-bool ReadConfigFile(Configuration &Conf,string FName)
-{
+ used in bind8, in fact this routine can parse most named.conf files.
+ Sectional config files are like bind's named.conf where there are
+ sections like 'zone "foo.org" { .. };' This causes each section to be
+ added in with a tag like "zone::foo.org" instead of being split
+ tag/value. */
+bool ReadConfigFile(Configuration &Conf,string FName,bool AsSectional,
+ unsigned Depth)
+{
// Open the stream for reading
ifstream F(FName.c_str(),ios::in | ios::nocreate);
if (!F != 0)
- return _error->Errno("ifstream::ifstream","Opening configuration file %s",FName.c_str());
+ return _error->Errno("ifstream::ifstream",_("Opening configuration file %s"),FName.c_str());
char Buffer[300];
string LineBuffer;
@@ -366,7 +534,7 @@ bool ReadConfigFile(Configuration &Conf,string FName)
if (InQuote == false && (*I == '{' || *I == ';' || *I == '}'))
{
- // Put the last fragement into the buffer
+ // Put the last fragment into the buffer
char *Start = Buffer;
char *Stop = I;
for (; Start != I && isspace(*Start) != 0; Start++);
@@ -379,51 +547,65 @@ bool ReadConfigFile(Configuration &Conf,string FName)
char TermChar = *I;
memmove(Buffer,I + 1,strlen(I + 1) + 1);
I = Buffer;
-
- // Move up a tag
- if (TermChar == '}')
- {
- if (StackPos == 0)
- ParentTag = string();
- else
- ParentTag = Stack[--StackPos];
- }
// Syntax Error
if (TermChar == '{' && LineBuffer.empty() == true)
- return _error->Error("Syntax error %s:%u: Block starts with no name.",FName.c_str(),CurLine);
+ return _error->Error(_("Syntax error %s:%u: Block starts with no name."),FName.c_str(),CurLine);
+ // No string on this line
if (LineBuffer.empty() == true)
+ {
+ if (TermChar == '}')
+ {
+ if (StackPos == 0)
+ ParentTag = string();
+ else
+ ParentTag = Stack[--StackPos];
+ }
continue;
-
+ }
+
// Parse off the tag
string Tag;
const char *Pos = LineBuffer.c_str();
if (ParseQuoteWord(Pos,Tag) == false)
- return _error->Error("Syntax error %s:%u: Malformed Tag",FName.c_str(),CurLine);
-
+ return _error->Error(_("Syntax error %s:%u: Malformed Tag"),FName.c_str(),CurLine);
+
+ // Parse off the word
+ string Word;
+ if (ParseCWord(Pos,Word) == false &&
+ ParseQuoteWord(Pos,Word) == false)
+ {
+ if (TermChar != '{')
+ {
+ Word = Tag;
+ Tag = "";
+ }
+ }
+ if (strlen(Pos) != 0)
+ return _error->Error(_("Syntax error %s:%u: Extra junk after value"),FName.c_str(),CurLine);
+
// Go down a level
if (TermChar == '{')
{
if (StackPos <= 100)
Stack[StackPos++] = ParentTag;
+
+ /* Make sectional tags incorperate the section into the
+ tag string */
+ if (AsSectional == true && Word.empty() == false)
+ {
+ Tag += "::" ;
+ Tag += Word;
+ Word = "";
+ }
+
if (ParentTag.empty() == true)
ParentTag = Tag;
else
ParentTag += string("::") + Tag;
Tag = string();
}
-
- // Parse off the word
- string Word;
- if (ParseCWord(Pos,Word) == false)
- {
- if (TermChar != '{')
- {
- Word = Tag;
- Tag = "";
- }
- }
// Generate the item name
string Item;
@@ -437,11 +619,50 @@ bool ReadConfigFile(Configuration &Conf,string FName)
Item = ParentTag;
}
- // Set the item in the configuration class
- Conf.Set(Item,Word);
-
+ // Specials
+ if (Tag.length() >= 1 && Tag[0] == '#')
+ {
+ if (ParentTag.empty() == false)
+ return _error->Error(_("Syntax error %s:%u: Directives can only be done at the top level"),FName.c_str(),CurLine);
+ Tag.erase(Tag.begin());
+ if (Tag == "clear")
+ Conf.Clear(Word);
+ else if (Tag == "include")
+ {
+ if (Depth > 10)
+ return _error->Error(_("Syntax error %s:%u: Too many nested includes"),FName.c_str(),CurLine);
+ if (Word.length() > 2 && Word.end()[-1] == '/')
+ {
+ if (ReadConfigDir(Conf,Word,AsSectional,Depth+1) == false)
+ return _error->Error(_("Syntax error %s:%u: Included from here"),FName.c_str(),CurLine);
+ }
+ else
+ {
+ if (ReadConfigFile(Conf,Word,AsSectional,Depth+1) == false)
+ return _error->Error(_("Syntax error %s:%u: Included from here"),FName.c_str(),CurLine);
+ }
+ }
+ else
+ return _error->Error(_("Syntax error %s:%u: Unsupported directive '%s'"),FName.c_str(),CurLine,Tag.c_str());
+ }
+ else
+ {
+ // Set the item in the configuration class
+ Conf.Set(Item,Word);
+ }
+
// Empty the buffer
LineBuffer = string();
+
+ // Move up a tag, but only if there is no bit to parse
+ if (TermChar == '}')
+ {
+ if (StackPos == 0)
+ ParentTag = string();
+ else
+ ParentTag = Stack[--StackPos];
+ }
+
}
else
I++;
@@ -453,7 +674,60 @@ bool ReadConfigFile(Configuration &Conf,string FName)
LineBuffer += " ";
LineBuffer += Stripd;
}
+
+ if (LineBuffer.empty() == false)
+ return _error->Error(_("Syntax error %s:%u: Extra junk at end of file"),FName.c_str(),CurLine);
+ return true;
+}
+ /*}}}*/
+// ReadConfigDir - Read a directory of config files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ReadConfigDir(Configuration &Conf,string Dir,bool AsSectional,
+ unsigned Depth)
+{
+ static const char *BadExts[] = {".disabled",".dpkg-old",".dpkg-dist",
+ ".rpmsave",".rpmorig","~",",v",0};
+
+ DIR *D = opendir(Dir.c_str());
+ if (D == 0)
+ return _error->Errno("opendir",_("Unable to read %s"),Dir.c_str());
+
+ vector<string> List;
+
+ for (struct dirent *Ent = readdir(D); Ent != 0; Ent = readdir(D))
+ {
+ if (strcmp(Ent->d_name,".") == 0 ||
+ strcmp(Ent->d_name,"..") == 0)
+ continue;
+
+ // Skip bad extensions
+ const char **I;
+ for (I = BadExts; *I != 0; I++)
+ {
+ if (strcmp(Ent->d_name + strlen(Ent->d_name) - strlen(*I),*I) == 0)
+ break;
+ }
+
+ if (*I != 0)
+ continue;
+
+ // Make sure it is a file and not something else
+ string File = flCombine(Dir,Ent->d_name);
+ struct stat St;
+ if (stat(File.c_str(),&St) != 0 || S_ISREG(St.st_mode) == 0)
+ continue;
+
+ List.push_back(File);
+ }
+ closedir(D);
+ sort(List.begin(),List.end());
+
+ // Read the files
+ for (vector<string>::const_iterator I = List.begin(); I != List.end(); I++)
+ if (ReadConfigFile(Conf,*I,AsSectional,Depth) == false)
+ return false;
return true;
}
/*}}}*/
diff --git a/apt-pkg/contrib/configuration.h b/apt-pkg/contrib/configuration.h
index aeb181c7a..643e0f628 100644
--- a/apt-pkg/contrib/configuration.h
+++ b/apt-pkg/contrib/configuration.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: configuration.h,v 1.11 1999/04/03 00:34:33 jgg Exp $
+// $Id: configuration.h,v 1.12 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Configuration Class
@@ -36,6 +36,8 @@
class Configuration
{
+ public:
+
struct Item
{
string Value;
@@ -44,42 +46,61 @@ class Configuration
Item *Child;
Item *Next;
- string FullTag() const;
+ string FullTag(const Item *Stop = 0) const;
Item() : Parent(0), Child(0), Next(0) {};
};
+
+ private:
+
Item *Root;
+ bool ToFree;
Item *Lookup(Item *Head,const char *S,unsigned long Len,bool Create);
- Item *Lookup(const char *Name,bool Create);
-
+ Item *Lookup(const char *Name,bool Create);
+ inline const Item *Lookup(const char *Name) const
+ {
+ return ((Configuration *)this)->Lookup(Name,false);
+ }
+
public:
- string Find(const char *Name,const char *Default = 0);
- string Find(string Name,const char *Default = 0) {return Find(Name.c_str(),Default);};
- string FindFile(const char *Name,const char *Default = 0);
- string FindDir(const char *Name,const char *Default = 0);
- int FindI(const char *Name,int Default = 0);
- int FindI(string Name,bool Default = 0) {return FindI(Name.c_str(),Default);};
- bool FindB(const char *Name,bool Default = false);
- bool FindB(string Name,bool Default = false) {return FindB(Name.c_str(),Default);};
+ string Find(const char *Name,const char *Default = 0) const;
+ string Find(string Name,const char *Default = 0) const {return Find(Name.c_str(),Default);};
+ string FindFile(const char *Name,const char *Default = 0) const;
+ string FindDir(const char *Name,const char *Default = 0) const;
+ int FindI(const char *Name,int Default = 0) const;
+ int FindI(string Name,bool Default = 0) const {return FindI(Name.c_str(),Default);};
+ bool FindB(const char *Name,bool Default = false) const;
+ bool FindB(string Name,bool Default = false) const {return FindB(Name.c_str(),Default);};
+ string FindAny(const char *Name,const char *Default = 0) const;
inline void Set(string Name,string Value) {Set(Name.c_str(),Value);};
+ void CndSet(const char *Name,string Value);
void Set(const char *Name,string Value);
void Set(const char *Name,int Value);
- inline bool Exists(string Name) {return Exists(Name.c_str());};
- bool Exists(const char *Name);
-
- inline const Item *Tree(const char *Name) {return Lookup(Name,false);};
+ inline bool Exists(string Name) const {return Exists(Name.c_str());};
+ bool Exists(const char *Name) const;
+ bool ExistsAny(const char *Name) const;
- void Dump();
+ void Clear(string Name);
+ inline const Item *Tree(const char *Name) const {return Lookup(Name);};
+
+ void Dump();
+
+ Configuration(const Item *Root);
Configuration();
+ ~Configuration();
};
extern Configuration *_config;
-bool ReadConfigFile(Configuration &Conf,string File);
+bool ReadConfigFile(Configuration &Conf,string FName,bool AsSectional = false,
+ unsigned Depth = 0);
+
+bool ReadConfigDir(Configuration &Conf,string Dir,bool AsSectional = false,
+ unsigned Depth = 0);
#endif
diff --git a/apt-pkg/contrib/error.cc b/apt-pkg/contrib/error.cc
index e8b71fa7d..b60bd09a7 100644
--- a/apt-pkg/contrib/error.cc
+++ b/apt-pkg/contrib/error.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: error.cc,v 1.8 1999/08/08 07:24:54 jgg Exp $
+// $Id: error.cc,v 1.9 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Global Erorr Class - Global error mechanism
@@ -34,7 +34,7 @@
is compiled to be thread safe otherwise a non-safe version is used. A
Per-Thread error object is maintained in much the same manner as libc
manages errno */
-#if _POSIX_THREADS == 1 && defined(HAVE_PTHREAD)
+#if defined(_POSIX_THREADS) && defined(HAVE_PTHREAD)
#include <pthread.h>
static pthread_key_t ErrorKey;
diff --git a/apt-pkg/contrib/error.h b/apt-pkg/contrib/error.h
index bb42e73ba..0dc57927a 100644
--- a/apt-pkg/contrib/error.h
+++ b/apt-pkg/contrib/error.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: error.h,v 1.6 1999/01/18 06:20:08 jgg Exp $
+// $Id: error.h,v 1.7 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Global Erorr Class - Global error mechanism
@@ -44,6 +44,15 @@
#pragma interface "apt-pkg/error.h"
#endif
+#ifdef __GNUG__
+// Methods have a hidden this parameter that is visible to this attribute
+#define APT_MFORMAT1 __attribute__ ((format (printf, 2, 3)))
+#define APT_MFORMAT2 __attribute__ ((format (printf, 3, 4)))
+#else
+#define APT_MFORMAT1
+#define APT_MFORMAT2
+#endif
+
#include <string>
class GlobalError
@@ -62,13 +71,13 @@ class GlobalError
public:
// Call to generate an error from a library call.
- bool Errno(const char *Function,const char *Description,...);
- bool WarningE(const char *Function,const char *Description,...);
+ bool Errno(const char *Function,const char *Description,...) APT_MFORMAT2;
+ bool WarningE(const char *Function,const char *Description,...) APT_MFORMAT2;
/* A warning should be considered less severe than an error, and may be
ignored by the client. */
- bool Error(const char *Description,...);
- bool Warning(const char *Description,...);
+ bool Error(const char *Description,...) APT_MFORMAT1;
+ bool Warning(const char *Description,...) APT_MFORMAT1;
// Simple accessors
inline bool PendingError() {return PendingFlag;};
@@ -86,4 +95,7 @@ class GlobalError
GlobalError *_GetErrorObj();
#define _error _GetErrorObj()
+#undef APT_MFORMAT1
+#undef APT_MFORMAT2
+
#endif
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index 65c19ea92..0907f4dcd 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: fileutl.cc,v 1.34 2000/01/17 07:11:49 jgg Exp $
+// $Id: fileutl.cc,v 1.35 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
File Utilities
@@ -19,6 +19,9 @@
#endif
#include <apt-pkg/fileutl.h>
#include <apt-pkg/error.h>
+#include <apt-pkg/sptr.h>
+
+#include <apti18n.h>
#include <unistd.h>
#include <fcntl.h>
@@ -39,7 +42,7 @@ bool CopyFile(FileFd &From,FileFd &To)
return false;
// Buffered copy between fds
- unsigned char *Buf = new unsigned char[64000];
+ SPtrArray<unsigned char> Buf = new unsigned char[64000];
unsigned long Size = From.Size();
while (Size != 0)
{
@@ -49,15 +52,11 @@ bool CopyFile(FileFd &From,FileFd &To)
if (From.Read(Buf,ToRead) == false ||
To.Write(Buf,ToRead) == false)
- {
- delete [] Buf;
return false;
- }
Size -= ToRead;
}
- delete [] Buf;
return true;
}
/*}}}*/
@@ -72,11 +71,22 @@ int GetLock(string File,bool Errors)
int FD = open(File.c_str(),O_RDWR | O_CREAT | O_TRUNC,0640);
if (FD < 0)
{
+ // Read only .. cant have locking problems there.
+ if (errno == EROFS)
+ {
+ _error->Warning(_("Not using locking for read only lock file %s"),File.c_str());
+ return dup(0); // Need something for the caller to close
+ }
+
if (Errors == true)
- _error->Errno("open","Could not open lock file %s",File.c_str());
+ _error->Errno("open",_("Could not open lock file %s"),File.c_str());
+
+ // Feh.. We do this to distinguish the lock vs open case..
+ errno = EPERM;
return -1;
}
-
+ SetCloseExec(FD,true);
+
// Aquire a write lock
struct flock fl;
fl.l_type = F_WRLCK;
@@ -87,12 +97,15 @@ int GetLock(string File,bool Errors)
{
if (errno == ENOLCK)
{
- _error->Warning("Not using locking for nfs mounted lock file %s",File.c_str());
- return true;
+ _error->Warning(_("Not using locking for nfs mounted lock file %s"),File.c_str());
+ return dup(0); // Need something for the caller to close
}
if (Errors == true)
- _error->Errno("open","Could not get lock %s",File.c_str());
+ _error->Errno("open",_("Could not get lock %s"),File.c_str());
+
+ int Tmp = errno;
close(FD);
+ errno = Tmp;
return -1;
}
@@ -150,6 +163,18 @@ string flNotFile(string File)
return string(File,0,Res);
}
/*}}}*/
+// flExtension - Return the extension for the file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string flExtension(string File)
+{
+ string::size_type Res = File.rfind('.');
+ if (Res == string::npos)
+ return File;
+ Res++;
+ return string(File,Res,Res - File.length());
+}
+ /*}}}*/
// flNoLink - If file is a symlink then deref it /*{{{*/
// ---------------------------------------------------------------------
/* If the name is not a link then the returned path is the input. */
@@ -189,6 +214,24 @@ string flNoLink(string File)
}
}
/*}}}*/
+// flCombine - Combine a file and a directory /*{{{*/
+// ---------------------------------------------------------------------
+/* If the file is an absolute path then it is just returned, otherwise
+ the directory is pre-pended to it. */
+string flCombine(string Dir,string File)
+{
+ if (File.empty() == true)
+ return string();
+
+ if (File[0] == '/' || Dir.empty() == true)
+ return File;
+ if (File.length() >= 2 && File[0] == '.' && File[1] == '/')
+ return File;
+ if (Dir[Dir.length()-1] == '/')
+ return Dir + File;
+ return Dir + '/' + File;
+}
+ /*}}}*/
// SetCloseExec - Set the close on exec flag /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -216,7 +259,7 @@ void SetNonBlock(int Fd,bool Block)
/*}}}*/
// WaitFd - Wait for a FD to become readable /*{{{*/
// ---------------------------------------------------------------------
-/* This waits for a FD to become readable using select. It is usefull for
+/* This waits for a FD to become readable using select. It is useful for
applications making use of non-blocking sockets. The timeout is
in seconds. */
bool WaitFd(int Fd,bool write,unsigned long timeout)
@@ -309,7 +352,7 @@ bool ExecWait(int Pid,const char *Name,bool Reap)
if (Reap == true)
return false;
- return _error->Error("Waited, for %s but it wasn't there",Name);
+ return _error->Error(_("Waited, for %s but it wasn't there"),Name);
}
@@ -319,12 +362,12 @@ bool ExecWait(int Pid,const char *Name,bool Reap)
if (Reap == true)
return false;
if (WIFSIGNALED(Status) != 0 && WTERMSIG(Status) == SIGSEGV)
- return _error->Error("Sub-process %s recieved a segmentation fault.",Name);
+ return _error->Error(_("Sub-process %s received a segmentation fault."),Name);
if (WIFEXITED(Status) != 0)
- return _error->Error("Sub-process %s returned an error code (%u)",Name,WEXITSTATUS(Status));
+ return _error->Error(_("Sub-process %s returned an error code (%u)"),Name,WEXITSTATUS(Status));
- return _error->Error("Sub-process %s exited unexpectedly",Name);
+ return _error->Error(_("Sub-process %s exited unexpectedly"),Name);
}
return true;
@@ -363,7 +406,7 @@ bool FileFd::Open(string FileName,OpenMode Mode, unsigned long Perms)
}
if (iFd < 0)
- return _error->Errno("open","Could not open file %s",FileName.c_str());
+ return _error->Errno("open",_("Could not open file %s"),FileName.c_str());
this->FileName = FileName;
SetCloseExec(iFd,true);
@@ -395,7 +438,7 @@ bool FileFd::Read(void *To,unsigned long Size,bool AllowEof)
if (Res < 0)
{
Flags |= Fail;
- return _error->Errno("read","Read error");
+ return _error->Errno("read",_("Read error"));
}
To = (char *)To + Res;
@@ -414,7 +457,7 @@ bool FileFd::Read(void *To,unsigned long Size,bool AllowEof)
}
Flags |= Fail;
- return _error->Error("read, still have %u to read but none left",Size);
+ return _error->Error(_("read, still have %lu to read but none left"),Size);
}
/*}}}*/
// FileFd::Write - Write to the file /*{{{*/
@@ -432,7 +475,7 @@ bool FileFd::Write(const void *From,unsigned long Size)
if (Res < 0)
{
Flags |= Fail;
- return _error->Errno("write","Write error");
+ return _error->Errno("write",_("Write error"));
}
From = (char *)From + Res;
@@ -444,7 +487,7 @@ bool FileFd::Write(const void *From,unsigned long Size)
return true;
Flags |= Fail;
- return _error->Error("write, still have %u to write but couldn't",Size);
+ return _error->Error(_("write, still have %lu to write but couldn't"),Size);
}
/*}}}*/
// FileFd::Seek - Seek in the file /*{{{*/
@@ -455,7 +498,7 @@ bool FileFd::Seek(unsigned long To)
if (lseek(iFd,To,SEEK_SET) != (signed)To)
{
Flags |= Fail;
- return _error->Error("Unable to seek to %u",To);
+ return _error->Error("Unable to seek to %lu",To);
}
return true;
@@ -469,7 +512,7 @@ bool FileFd::Skip(unsigned long Over)
if (lseek(iFd,Over,SEEK_CUR) < 0)
{
Flags |= Fail;
- return _error->Error("Unable to seek ahead %u",Over);
+ return _error->Error("Unable to seek ahead %lu",Over);
}
return true;
@@ -483,7 +526,7 @@ bool FileFd::Truncate(unsigned long To)
if (ftruncate(iFd,To) != 0)
{
Flags |= Fail;
- return _error->Error("Unable to truncate to %u",To);
+ return _error->Error("Unable to truncate to %lu",To);
}
return true;
@@ -519,13 +562,25 @@ bool FileFd::Close()
bool Res = true;
if ((Flags & AutoClose) == AutoClose)
if (iFd >= 0 && close(iFd) != 0)
- Res &= _error->Errno("close","Problem closing the file");
+ Res &= _error->Errno("close",_("Problem closing the file"));
iFd = -1;
if ((Flags & Fail) == Fail && (Flags & DelOnFail) == DelOnFail &&
FileName.empty() == false)
if (unlink(FileName.c_str()) != 0)
- Res &= _error->Warning("unlnk","Problem unlinking the file");
+ Res &= _error->WarningE("unlnk",_("Problem unlinking the file"));
return Res;
}
/*}}}*/
+// FileFd::Sync - Sync the file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FileFd::Sync()
+{
+#ifdef _POSIX_SYNCHRONIZED_IO
+ if (fsync(iFd) != 0)
+ return _error->Errno("sync",_("Problem syncing the file"));
+#endif
+ return true;
+}
+ /*}}}*/
diff --git a/apt-pkg/contrib/fileutl.h b/apt-pkg/contrib/fileutl.h
index 7ad630ce3..9cf351d0b 100644
--- a/apt-pkg/contrib/fileutl.h
+++ b/apt-pkg/contrib/fileutl.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: fileutl.h,v 1.22 1999/09/30 06:30:34 jgg Exp $
+// $Id: fileutl.h,v 1.23 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
File Utilities
@@ -49,7 +49,8 @@ class FileFd
unsigned long Size();
bool Open(string FileName,OpenMode Mode,unsigned long Perms = 0666);
bool Close();
-
+ bool Sync();
+
// Simple manipulators
inline int Fd() {return iFd;};
inline void Fd(int fd) {iFd = fd;};
@@ -84,5 +85,7 @@ bool ExecWait(int Pid,const char *Name,bool Reap = false);
string flNotDir(string File);
string flNotFile(string File);
string flNoLink(string File);
+string flExtension(string File);
+string flCombine(string Dir,string File);
#endif
diff --git a/apt-pkg/contrib/md5.h b/apt-pkg/contrib/md5.h
index 19b5bac6c..8b809729b 100644
--- a/apt-pkg/contrib/md5.h
+++ b/apt-pkg/contrib/md5.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: md5.h,v 1.4 1999/08/02 03:07:47 jgg Exp $
+// $Id: md5.h,v 1.5 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
MD5SumValue - Storage for a MD5Sum
@@ -33,7 +33,7 @@ class MD5Summation;
class MD5SumValue
{
- friend MD5Summation;
+ friend class MD5Summation;
unsigned char Sum[4*4];
public:
diff --git a/apt-pkg/contrib/mmap.cc b/apt-pkg/contrib/mmap.cc
index 230e133a5..cfe476763 100644
--- a/apt-pkg/contrib/mmap.cc
+++ b/apt-pkg/contrib/mmap.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: mmap.cc,v 1.20 1999/10/02 04:14:54 jgg Exp $
+// $Id: mmap.cc,v 1.21 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
MMap Class - Provides 'real' mmap or a faked mmap using read().
@@ -29,6 +29,8 @@
#include <apt-pkg/mmap.h>
#include <apt-pkg/error.h>
+#include <apti18n.h>
+
#include <sys/mman.h>
#include <sys/stat.h>
#include <unistd.h>
@@ -77,12 +79,12 @@ bool MMap::Map(FileFd &Fd)
Map = MAP_PRIVATE;
if (iSize == 0)
- return _error->Error("Can't mmap an empty file");
+ return _error->Error(_("Can't mmap an empty file"));
// Map it.
Base = mmap(0,iSize,Prot,Map,Fd.Fd(),0);
if (Base == (void *)-1)
- return _error->Errno("mmap","Couldn't make mmap of %u bytes",iSize);
+ return _error->Errno("mmap",_("Couldn't make mmap of %lu bytes"),iSize);
return true;
}
@@ -102,6 +104,7 @@ bool MMap::Close(bool DoSync)
_error->Warning("Unable to munmap");
iSize = 0;
+ Base = 0;
return true;
}
/*}}}*/
@@ -150,9 +153,15 @@ DynamicMMap::DynamicMMap(FileFd &F,unsigned long Flags,unsigned long WorkSpace)
return;
unsigned long EndOfFile = Fd->Size();
- Fd->Seek(WorkSpace);
- char C = 0;
- Fd->Write(&C,sizeof(C));
+ if (EndOfFile > WorkSpace)
+ WorkSpace = EndOfFile;
+ else
+ {
+ Fd->Seek(WorkSpace);
+ char C = 0;
+ Fd->Write(&C,sizeof(C));
+ }
+
Map(F);
iSize = EndOfFile;
}
@@ -182,11 +191,9 @@ DynamicMMap::~DynamicMMap()
}
unsigned long EndOfFile = iSize;
- Sync();
iSize = WorkSpace;
Close(false);
ftruncate(Fd->Fd(),EndOfFile);
- Fd->Close();
}
/*}}}*/
// DynamicMMap::RawAllocate - Allocate a raw chunk of unaligned space /*{{{*/
diff --git a/apt-pkg/contrib/progress.cc b/apt-pkg/contrib/progress.cc
index dfa978485..70e488d5f 100644
--- a/apt-pkg/contrib/progress.cc
+++ b/apt-pkg/contrib/progress.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: progress.cc,v 1.9 2000/06/05 04:22:25 jgg Exp $
+// $Id: progress.cc,v 1.10 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
OpProgress - Operation Progress
@@ -14,6 +14,9 @@
#include <apt-pkg/progress.h>
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
+
+#include <apti18n.h>
+
#include <stdio.h>
/*}}}*/
@@ -110,6 +113,9 @@ bool OpProgress::CheckChange(float Interval)
if ((int)LastPercent == (int)Percent)
return false;
+ if (Interval == 0)
+ return false;
+
// Check time delta
struct timeval Now;
gettimeofday(&Now,0);
@@ -142,9 +148,9 @@ void OpTextProgress::Done()
{
char S[300];
if (_error->PendingError() == true)
- snprintf(S,sizeof(S),"\r%s... Error!",OldOp.c_str());
+ snprintf(S,sizeof(S),_("\r%s... Error!"),OldOp.c_str());
else
- snprintf(S,sizeof(S),"\r%s... Done",OldOp.c_str());
+ snprintf(S,sizeof(S),_("\r%s... Done"),OldOp.c_str());
Write(S);
cout << endl;
OldOp = string();
@@ -162,7 +168,7 @@ void OpTextProgress::Done()
/* */
void OpTextProgress::Update()
{
- if (CheckChange() == false)
+ if (CheckChange((NoUpdate == true?0:0.7)) == false)
return;
// No percent spinner
diff --git a/apt-pkg/contrib/sptr.h b/apt-pkg/contrib/sptr.h
new file mode 100644
index 000000000..a9347edf9
--- /dev/null
+++ b/apt-pkg/contrib/sptr.h
@@ -0,0 +1,66 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: sptr.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Trivial non-ref counted 'smart pointer'
+
+ This is really only good to eliminate
+ {
+ delete Foo;
+ return;
+ }
+
+ Blocks from functions.
+
+ I think G++ has become good enough that doing this won't have much
+ code size implications.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef SMART_POINTER_H
+#define SMART_POINTER_H
+
+template <class T>
+class SPtr
+{
+ public:
+ T *Ptr;
+
+ inline T *operator ->() {return Ptr;};
+ inline T &operator *() {return *Ptr;};
+ inline operator T *() {return Ptr;};
+ inline operator void *() {return Ptr;};
+ inline T *UnGuard() {T *Tmp = Ptr; Ptr = 0; return Tmp;};
+ inline void operator =(T *N) {Ptr = N;};
+ inline bool operator ==(T *lhs) const {return Ptr == lhs;};
+ inline bool operator !=(T *lhs) const {return Ptr != lhs;};
+ inline T*Get() {return Ptr;};
+
+ inline SPtr(T *Ptr) : Ptr(Ptr) {};
+ inline SPtr() : Ptr(0) {};
+ inline ~SPtr() {delete Ptr;};
+};
+
+template <class T>
+class SPtrArray
+{
+ public:
+ T *Ptr;
+
+ inline T &operator *() {return *Ptr;};
+ inline operator T *() {return Ptr;};
+ inline operator void *() {return Ptr;};
+ inline T *UnGuard() {T *Tmp = Ptr; Ptr = 0; return Tmp;};
+ inline T &operator [](signed long I) {return Ptr[I];};
+ inline void operator =(T *N) {Ptr = N;};
+ inline bool operator ==(T *lhs) const {return Ptr == lhs;};
+ inline bool operator !=(T *lhs) const {return Ptr != lhs;};
+ inline T*Get() {return Ptr;};
+
+ inline SPtrArray(T *Ptr) : Ptr(Ptr) {};
+ inline SPtrArray() : Ptr(0) {};
+ inline ~SPtrArray() {delete []Ptr;};
+};
+
+#endif
diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc
index f8a3f8e2b..c1a1cb4db 100644
--- a/apt-pkg/contrib/strutl.cc
+++ b/apt-pkg/contrib/strutl.cc
@@ -1,12 +1,12 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: strutl.cc,v 1.34 2000/01/16 05:36:17 jgg Exp $
+// $Id: strutl.cc,v 1.35 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
- String Util - Some usefull string functions.
+ String Util - Some useful string functions.
- These have been collected from here and there to do all sorts of usefull
- things to strings. They are usefull in file parsers, URI handlers and
+ These have been collected from here and there to do all sorts of useful
+ things to strings. They are useful in file parsers, URI handlers and
especially in APT methods.
This source is placed in the Public Domain, do with it what you will
@@ -21,12 +21,17 @@
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
+#include <apt-pkg/error.h>
+#include <apti18n.h>
+
#include <ctype.h>
#include <string.h>
#include <stdio.h>
#include <unistd.h>
+#include <regex.h>
#include <errno.h>
+#include <stdarg.h>
/*}}}*/
// strstrip - Remove white space from the front and back of a string /*{{{*/
@@ -147,9 +152,9 @@ bool ParseQuoteWord(const char *&String,string &Res)
/*}}}*/
// ParseCWord - Parses a string like a C "" expression /*{{{*/
// ---------------------------------------------------------------------
-/* This expects a series of space seperated strings enclosed in ""'s.
+/* This expects a series of space separated strings enclosed in ""'s.
It concatenates the ""'s into a single string. */
-bool ParseCWord(const char *String,string &Res)
+bool ParseCWord(const char *&String,string &Res)
{
// Skip leading whitespace
const char *C = String;
@@ -180,9 +185,10 @@ bool ParseCWord(const char *String,string &Res)
if (isspace(*C) == 0)
return false;
*Buf++ = ' ';
- }
+ }
*Buf = 0;
Res = Buffer;
+ String = C;
return true;
}
/*}}}*/
@@ -325,6 +331,13 @@ string SubstVar(string Str,string Subst,string Contents)
return Temp + string(Str,OldPos);
}
+
+string SubstVar(string Str,const struct SubstVar *Vars)
+{
+ for (; Vars->Subst != 0; Vars++)
+ Str = SubstVar(Str,Vars->Subst,*Vars->Contents);
+ return Str;
+}
/*}}}*/
// URItoFileName - Convert the uri into a unique file name /*{{{*/
// ---------------------------------------------------------------------
@@ -548,9 +561,11 @@ bool ReadMessages(int Fd, vector<string> &List)
return false;
// No data
- if (Res <= 0)
+ if (Res < 0 && errno == EAGAIN)
return true;
-
+ if (Res < 0)
+ return false;
+
End += Res;
// Look for the end of the message
@@ -749,6 +764,121 @@ bool Hex2Num(const char *Start,const char *End,unsigned char *Num,
return true;
}
/*}}}*/
+// TokSplitString - Split a string up by a given token /*{{{*/
+// ---------------------------------------------------------------------
+/* This is intended to be a faster splitter, it does not use dynamic
+ memories. Input is changed to insert nulls at each token location. */
+bool TokSplitString(char Tok,char *Input,char **List,
+ unsigned long ListMax)
+{
+ // Strip any leading spaces
+ char *Start = Input;
+ char *Stop = Start + strlen(Start);
+ for (; *Start != 0 && isspace(*Start) != 0; Start++);
+
+ unsigned long Count = 0;
+ char *Pos = Start;
+ while (Pos != Stop)
+ {
+ // Skip to the next Token
+ for (; Pos != Stop && *Pos != Tok; Pos++);
+
+ // Back remove spaces
+ char *End = Pos;
+ for (; End > Start && (End[-1] == Tok || isspace(End[-1]) != 0); End--);
+ *End = 0;
+
+ List[Count++] = Start;
+ if (Count >= ListMax)
+ {
+ List[Count-1] = 0;
+ return false;
+ }
+
+ // Advance pos
+ for (; Pos != Stop && (*Pos == Tok || isspace(*Pos) != 0 || *Pos == 0); Pos++);
+ Start = Pos;
+ }
+
+ List[Count] = 0;
+ return true;
+}
+ /*}}}*/
+// RegexChoice - Simple regex list/list matcher /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+unsigned long RegexChoice(RxChoiceList *Rxs,const char **ListBegin,
+ const char **ListEnd)
+{
+ for (RxChoiceList *R = Rxs; R->Str != 0; R++)
+ R->Hit = false;
+
+ unsigned long Hits = 0;
+ for (; ListBegin != ListEnd; ListBegin++)
+ {
+ // Check if the name is a regex
+ const char *I;
+ bool Regex = true;
+ for (I = *ListBegin; *I != 0; I++)
+ if (*I == '.' || *I == '?' || *I == '*' || *I == '|')
+ break;
+ if (*I == 0)
+ Regex = false;
+
+ // Compile the regex pattern
+ regex_t Pattern;
+ if (Regex == true)
+ if (regcomp(&Pattern,*ListBegin,REG_EXTENDED | REG_ICASE |
+ REG_NOSUB) != 0)
+ Regex = false;
+
+ // Search the list
+ bool Done = false;
+ for (RxChoiceList *R = Rxs; R->Str != 0; R++)
+ {
+ if (R->Str[0] == 0)
+ continue;
+
+ if (strcasecmp(R->Str,*ListBegin) != 0)
+ {
+ if (Regex == false)
+ continue;
+ if (regexec(&Pattern,R->Str,0,0,0) != 0)
+ continue;
+ }
+ Done = true;
+
+ if (R->Hit == false)
+ Hits++;
+
+ R->Hit = true;
+ }
+
+ if (Regex == true)
+ regfree(&Pattern);
+
+ if (Done == false)
+ _error->Warning(_("Selection %s not found"),*ListBegin);
+ }
+
+ return Hits;
+}
+ /*}}}*/
+// ioprintf - C format string outputter to C++ iostreams /*{{{*/
+// ---------------------------------------------------------------------
+/* This is used to make the internationalization strinc easier to translate
+ and to allow reordering of parameters */
+void ioprintf(ostream &out,const char *format,...)
+{
+ va_list args;
+ va_start(args,format);
+
+ // sprintf the description
+ char S[400];
+ vsnprintf(S,sizeof(S),format,args);
+ out << S;
+}
+ /*}}}*/
// URI::CopyFrom - Copy from an object /*{{{*/
// ---------------------------------------------------------------------
@@ -757,7 +887,7 @@ void URI::CopyFrom(string U)
{
string::const_iterator I = U.begin();
- // Locate the first colon, this seperates the scheme
+ // Locate the first colon, this separates the scheme
for (; I < U.end() && *I != ':' ; I++);
string::const_iterator FirstColon = I;
@@ -912,3 +1042,16 @@ URI::operator string()
return Res;
}
/*}}}*/
+// URI::SiteOnly - Return the schema and site for the URI /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string URI::SiteOnly(string URI)
+{
+ ::URI U(URI);
+ U.User = string();
+ U.Password = string();
+ U.Path = string();
+ U.Port = 0;
+ return U;
+}
+ /*}}}*/
diff --git a/apt-pkg/contrib/strutl.h b/apt-pkg/contrib/strutl.h
index 14293ae05..5549673a1 100644
--- a/apt-pkg/contrib/strutl.h
+++ b/apt-pkg/contrib/strutl.h
@@ -1,9 +1,9 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: strutl.h,v 1.15 1999/08/02 03:07:48 jgg Exp $
+// $Id: strutl.h,v 1.16 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
- String Util - These are some usefull string functions
+ String Util - These are some useful string functions
_strstrip is a function to remove whitespace from the front and end
of a string.
@@ -25,15 +25,21 @@
#include <vector>
#include <time.h>
+#ifdef __GNUG__
+// Methods have a hidden this parameter that is visible to this attribute
+#define APT_FORMAT2 __attribute__ ((format (printf, 2, 3)))
+#else
+#define APT_FORMAT2
+#endif
+
char *_strstrip(char *String);
char *_strtabexpand(char *String,size_t Len);
bool ParseQuoteWord(const char *&String,string &Res);
-bool ParseCWord(const char *String,string &Res);
+bool ParseCWord(const char *&String,string &Res);
string QuoteString(string Str,const char *Bad);
string DeQuoteString(string Str);
string SizeToStr(double Bytes);
string TimeToStr(unsigned long Sec);
-string SubstVar(string Str,string Subst,string Contents);
string Base64Encode(string Str);
string URItoFileName(string URI);
string TimeRFC1123(time_t Date);
@@ -44,6 +50,9 @@ bool ReadMessages(int Fd, vector<string> &List);
bool StrToNum(const char *Str,unsigned long &Res,unsigned Len,unsigned Base = 0);
bool Hex2Num(const char *Start,const char *End,unsigned char *Num,
unsigned int Length);
+bool TokSplitString(char Tok,char *Input,char **List,
+ unsigned long ListMax);
+void ioprintf(ostream &out,const char *format,...) APT_FORMAT2;
int stringcmp(const char *A,const char *AEnd,const char *B,const char *BEnd);
inline int stringcmp(const char *A,const char *AEnd,const char *B) {return stringcmp(A,AEnd,B,B+strlen(B));};
@@ -51,6 +60,7 @@ inline int stringcmp(string A,const char *B) {return stringcmp(A.begin(),A.end()
int stringcasecmp(const char *A,const char *AEnd,const char *B,const char *BEnd);
inline int stringcasecmp(const char *A,const char *AEnd,const char *B) {return stringcasecmp(A,AEnd,B,B+strlen(B));};
inline int stringcasecmp(string A,const char *B) {return stringcasecmp(A.begin(),A.end(),B,B+strlen(B));};
+inline int stringcasecmp(string A,string B) {return stringcasecmp(A.begin(),A.end(),B.begin(),B.end());};
class URI
{
@@ -68,9 +78,29 @@ class URI
operator string();
inline void operator =(string From) {CopyFrom(From);};
inline bool empty() {return Access.empty();};
+ static string SiteOnly(string URI);
URI(string Path) {CopyFrom(Path);};
URI() : Port(0) {};
};
+struct SubstVar
+{
+ const char *Subst;
+ const string *Contents;
+};
+string SubstVar(string Str,const struct SubstVar *Vars);
+string SubstVar(string Str,string Subst,string Contents);
+
+struct RxChoiceList
+{
+ void *UserData;
+ const char *Str;
+ bool Hit;
+};
+unsigned long RegexChoice(RxChoiceList *Rxs,const char **ListBegin,
+ const char **ListEnd);
+
+#undef APT_FORMAT2
+
#endif
diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc
new file mode 100644
index 000000000..67afc89e6
--- /dev/null
+++ b/apt-pkg/deb/debindexfile.cc
@@ -0,0 +1,506 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: debindexfile.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Debian Specific sources.list types and the three sorts of Debian
+ index files.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/debindexfile.h"
+#endif
+
+#include <apt-pkg/debindexfile.h>
+#include <apt-pkg/debsrcrecords.h>
+#include <apt-pkg/deblistparser.h>
+#include <apt-pkg/debrecords.h>
+#include <apt-pkg/sourcelist.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/progress.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/acquire-item.h>
+
+#include <sys/stat.h>
+ /*}}}*/
+
+// SourcesIndex::debSourcesIndex - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+debSourcesIndex::debSourcesIndex(string URI,string Dist,string Section) :
+ URI(URI), Dist(Dist), Section(Section)
+{
+}
+ /*}}}*/
+// SourcesIndex::SourceInfo - Short 1 liner describing a source /*{{{*/
+// ---------------------------------------------------------------------
+/* The result looks like:
+ http://foo/ stable/main src 1.1.1 (dsc) */
+string debSourcesIndex::SourceInfo(pkgSrcRecords::Parser const &Record,
+ pkgSrcRecords::File const &File) const
+{
+ string Res;
+ Res = ::URI::SiteOnly(URI) + ' ';
+ if (Dist[Dist.size() - 1] == '/')
+ {
+ if (Dist != "/")
+ Res += Dist;
+ }
+ else
+ Res += Dist + '/' + Section;
+
+ Res += " ";
+ Res += Record.Package();
+ Res += " ";
+ Res += Record.Version();
+ if (File.Type.empty() == false)
+ Res += " (" + File.Type + ")";
+ return Res;
+}
+ /*}}}*/
+// SourcesIndex::CreateSrcParser - Get a parser for the source files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgSrcRecords::Parser *debSourcesIndex::CreateSrcParser() const
+{
+ string SourcesURI;
+ if (Dist[Dist.size() - 1] == '/')
+ SourcesURI = URI + Dist;
+ else
+ SourcesURI = URI + "dists/" + Dist + '/' + Section +
+ "/source/";
+
+ SourcesURI += "Sources";
+ SourcesURI = URItoFileName(SourcesURI);
+ return new debSrcRecordParser(_config->FindDir("Dir::State::lists") +
+ SourcesURI,this);
+}
+ /*}}}*/
+// SourcesIndex::Describe - Give a descriptive path to the index /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string debSourcesIndex::Describe() const
+{
+ char S[300];
+ snprintf(S,sizeof(S),"%s (%s)",Info("Packages").c_str(),
+ IndexFile("Sources").c_str());
+ return S;
+}
+ /*}}}*/
+// SourcesIndex::Info - One liner describing the index URI /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string debSourcesIndex::Info(const char *Type) const
+{
+ string Info = ::URI::SiteOnly(URI) + ' ';
+ if (Dist[Dist.size() - 1] == '/')
+ {
+ if (Dist != "/")
+ Info += Dist;
+ }
+ else
+ Info += Dist + '/' + Section;
+ Info += " ";
+ Info += Type;
+ return Info;
+}
+ /*}}}*/
+// SourcesIndex::Index* - Return the URI to the index files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+inline string debSourcesIndex::IndexFile(const char *Type) const
+{
+ return URItoFileName(IndexURI(Type));
+}
+string debSourcesIndex::IndexURI(const char *Type) const
+{
+ string Res;
+ if (Dist[Dist.size() - 1] == '/')
+ {
+ if (Dist != "/")
+ Res = URI + Dist;
+ else
+ Res = URI;
+ }
+ else
+ Res = URI + "dists/" + Dist + '/' + Section +
+ "/source/";
+
+ Res += Type;
+ return Res;
+}
+ /*}}}*/
+// SourcesIndex::GetIndexes - Fetch the index files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debSourcesIndex::GetIndexes(pkgAcquire *Owner) const
+{
+ new pkgAcqIndex(Owner,IndexURI("Sources"),Info("Sources"),"Sources");
+ new pkgAcqIndexRel(Owner,IndexURI("Release"),Info("Release"),"Release");
+ return true;
+}
+ /*}}}*/
+// SourcesIndex::Exists - Check if the index is available /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debSourcesIndex::Exists() const
+{
+ return FileExists(IndexFile("Sources"));
+}
+ /*}}}*/
+// SourcesIndex::Size - Return the size of the index /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+unsigned long debSourcesIndex::Size() const
+{
+ struct stat S;
+ if (stat(IndexFile("Sources").c_str(),&S) != 0)
+ return 0;
+ return S.st_size;
+}
+ /*}}}*/
+
+// PackagesIndex::debPackagesIndex - Contructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+debPackagesIndex::debPackagesIndex(string URI,string Dist,string Section) :
+ URI(URI), Dist(Dist), Section(Section)
+{
+}
+ /*}}}*/
+// PackagesIndex::ArchiveInfo - Short version of the archive url /*{{{*/
+// ---------------------------------------------------------------------
+/* This is a shorter version that is designed to be < 60 chars or so */
+string debPackagesIndex::ArchiveInfo(pkgCache::VerIterator Ver) const
+{
+ string Res = ::URI::SiteOnly(URI) + ' ';
+ if (Dist[Dist.size() - 1] == '/')
+ {
+ if (Dist != "/")
+ Res += Dist;
+ }
+ else
+ Res += Dist + '/' + Section;
+
+ Res += " ";
+ Res += Ver.ParentPkg().Name();
+ Res += " ";
+ Res += Ver.VerStr();
+ return Res;
+}
+ /*}}}*/
+// PackagesIndex::Describe - Give a descriptive path to the index /*{{{*/
+// ---------------------------------------------------------------------
+/* This should help the user find the index in the sources.list and
+ in the filesystem for problem solving */
+string debPackagesIndex::Describe() const
+{
+ char S[300];
+ snprintf(S,sizeof(S),"%s (%s)",Info("Packages").c_str(),
+ IndexFile("Packages").c_str());
+ return S;
+}
+ /*}}}*/
+// PackagesIndex::Info - One liner describing the index URI /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string debPackagesIndex::Info(const char *Type) const
+{
+ string Info = ::URI::SiteOnly(URI) + ' ';
+ if (Dist[Dist.size() - 1] == '/')
+ {
+ if (Dist != "/")
+ Info += Dist;
+ }
+ else
+ Info += Dist + '/' + Section;
+ Info += " ";
+ Info += Type;
+ return Info;
+}
+ /*}}}*/
+// PackagesIndex::Index* - Return the URI to the index files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+inline string debPackagesIndex::IndexFile(const char *Type) const
+{
+ return _config->FindDir("Dir::State::lists") + URItoFileName(IndexURI(Type));
+}
+string debPackagesIndex::IndexURI(const char *Type) const
+{
+ string Res;
+ if (Dist[Dist.size() - 1] == '/')
+ {
+ if (Dist != "/")
+ Res = URI + Dist;
+ else
+ Res = URI;
+ }
+ else
+ Res = URI + "dists/" + Dist + '/' + Section +
+ "/binary-" + _config->Find("APT::Architecture") + '/';
+
+ Res += Type;
+ return Res;
+}
+ /*}}}*/
+// PackagesIndex::GetIndexes - Fetch the index files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debPackagesIndex::GetIndexes(pkgAcquire *Owner) const
+{
+ new pkgAcqIndex(Owner,IndexURI("Packages"),Info("Packages"),"Packages");
+ new pkgAcqIndexRel(Owner,IndexURI("Release"),Info("Release"),"Release");
+ return true;
+}
+ /*}}}*/
+// PackagesIndex::Exists - Check if the index is available /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debPackagesIndex::Exists() const
+{
+ return FileExists(IndexFile("Packages"));
+}
+ /*}}}*/
+// PackagesIndex::Size - Return the size of the index /*{{{*/
+// ---------------------------------------------------------------------
+/* This is really only used for progress reporting. */
+unsigned long debPackagesIndex::Size() const
+{
+ struct stat S;
+ if (stat(IndexFile("Packages").c_str(),&S) != 0)
+ return 0;
+ return S.st_size;
+}
+ /*}}}*/
+// PackagesIndex::Merge - Load the index file into a cache /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debPackagesIndex::Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const
+{
+ string PackageFile = IndexFile("Packages");
+ FileFd Pkg(PackageFile,FileFd::ReadOnly);
+ debListParser Parser(&Pkg);
+ if (_error->PendingError() == true)
+ return _error->Error("Problem opening %s",PackageFile.c_str());
+
+ Prog.SubProgress(0,Info("Packages"));
+ ::URI Tmp(URI);
+ if (Gen.SelectFile(PackageFile,Tmp.Host,*this) == false)
+ return _error->Error("Problem with SelectFile %s",PackageFile.c_str());
+
+ // Store the IMS information
+ pkgCache::PkgFileIterator File = Gen.GetCurFile();
+ struct stat St;
+ if (fstat(Pkg.Fd(),&St) != 0)
+ return _error->Errno("fstat","Failed to stat");
+ File->Size = St.st_size;
+ File->mtime = St.st_mtime;
+
+ if (Gen.MergeList(Parser) == false)
+ return _error->Error("Problem with MergeList %s",PackageFile.c_str());
+
+ // Check the release file
+ string ReleaseFile = IndexFile("Release");
+ if (FileExists(ReleaseFile) == true)
+ {
+ FileFd Rel(ReleaseFile,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+ Parser.LoadReleaseInfo(File,Rel);
+ }
+
+ return true;
+}
+ /*}}}*/
+// PackagesIndex::FindInCache - Find this index /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgCache::PkgFileIterator debPackagesIndex::FindInCache(pkgCache &Cache) const
+{
+ string FileName = IndexFile("Packages");
+ pkgCache::PkgFileIterator File = Cache.FileBegin();
+ for (; File.end() == false; File++)
+ {
+ if (FileName != File.FileName())
+ continue;
+
+ struct stat St;
+ if (stat(File.FileName(),&St) != 0)
+ return pkgCache::PkgFileIterator(Cache);
+ if ((unsigned)St.st_size != File->Size || St.st_mtime != File->mtime)
+ return pkgCache::PkgFileIterator(Cache);
+ return File;
+ }
+
+ return File;
+}
+ /*}}}*/
+
+// StatusIndex::debStatusIndex - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+debStatusIndex::debStatusIndex(string File) : File(File)
+{
+}
+ /*}}}*/
+// StatusIndex::Size - Return the size of the index /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+unsigned long debStatusIndex::Size() const
+{
+ struct stat S;
+ if (stat(File.c_str(),&S) != 0)
+ return 0;
+ return S.st_size;
+}
+ /*}}}*/
+// StatusIndex::Merge - Load the index file into a cache /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debStatusIndex::Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const
+{
+ FileFd Pkg(File,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+ debListParser Parser(&Pkg);
+ if (_error->PendingError() == true)
+ return false;
+
+ Prog.SubProgress(0,File);
+ if (Gen.SelectFile(File,string(),*this,pkgCache::Flag::NotSource) == false)
+ return _error->Error("Problem with SelectFile %s",File.c_str());
+
+ // Store the IMS information
+ pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
+ struct stat St;
+ if (fstat(Pkg.Fd(),&St) != 0)
+ return _error->Errno("fstat","Failed to stat");
+ CFile->Size = St.st_size;
+ CFile->mtime = St.st_mtime;
+ CFile->Archive = Gen.WriteUniqString("now");
+
+ if (Gen.MergeList(Parser) == false)
+ return _error->Error("Problem with MergeList %s",File.c_str());
+ return true;
+}
+ /*}}}*/
+// StatusIndex::FindInCache - Find this index /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgCache::PkgFileIterator debStatusIndex::FindInCache(pkgCache &Cache) const
+{
+ pkgCache::PkgFileIterator File = Cache.FileBegin();
+ for (; File.end() == false; File++)
+ {
+ if (this->File != File.FileName())
+ continue;
+
+ struct stat St;
+ if (stat(File.FileName(),&St) != 0)
+ return pkgCache::PkgFileIterator(Cache);
+ if ((unsigned)St.st_size != File->Size || St.st_mtime != File->mtime)
+ return pkgCache::PkgFileIterator(Cache);
+ return File;
+ }
+ return File;
+}
+ /*}}}*/
+// StatusIndex::Exists - Check if the index is available /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debStatusIndex::Exists() const
+{
+ // Abort if the file does not exist.
+ return true;
+}
+ /*}}}*/
+
+// Source List types for Debian /*{{{*/
+class debSLTypeDeb : public pkgSourceList::Type
+{
+ public:
+
+ bool CreateItem(vector<pkgIndexFile *> &List,string URI,
+ string Dist,string Section) const
+ {
+ List.push_back(new debPackagesIndex(URI,Dist,Section));
+ return true;
+ };
+
+ debSLTypeDeb()
+ {
+ Name = "deb";
+ Label = "Standard Debian binary tree";
+ }
+};
+
+class debSLTypeDebSrc : public pkgSourceList::Type
+{
+ public:
+
+ bool CreateItem(vector<pkgIndexFile *> &List,string URI,
+ string Dist,string Section) const
+ {
+ List.push_back(new debSourcesIndex(URI,Dist,Section));
+ return true;
+ };
+
+ debSLTypeDebSrc()
+ {
+ Name = "deb-src";
+ Label = "Standard Debian source tree";
+ }
+};
+
+debSLTypeDeb _apt_DebType;
+debSLTypeDebSrc _apt_DebSrcType;
+ /*}}}*/
+// Index File types for Debian /*{{{*/
+class debIFTypeSrc : public pkgIndexFile::Type
+{
+ public:
+
+ debIFTypeSrc() {Label = "Debian Source Index";};
+};
+class debIFTypePkg : public pkgIndexFile::Type
+{
+ public:
+
+ virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator File) const
+ {
+ return new debRecordParser(File.FileName(),*File.Cache());
+ };
+ debIFTypePkg() {Label = "Debian Package Index";};
+};
+class debIFTypeStatus : public pkgIndexFile::Type
+{
+ public:
+
+ virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator File) const
+ {
+ return new debRecordParser(File.FileName(),*File.Cache());
+ };
+ debIFTypeStatus() {Label = "Debian dpkg status file";};
+};
+static debIFTypeSrc _apt_Src;
+static debIFTypePkg _apt_Pkg;
+static debIFTypeStatus _apt_Status;
+
+const pkgIndexFile::Type *debSourcesIndex::GetType() const
+{
+ return &_apt_Src;
+}
+const pkgIndexFile::Type *debPackagesIndex::GetType() const
+{
+ return &_apt_Pkg;
+}
+const pkgIndexFile::Type *debStatusIndex::GetType() const
+{
+ return &_apt_Status;
+}
+
+ /*}}}*/
diff --git a/apt-pkg/deb/debindexfile.h b/apt-pkg/deb/debindexfile.h
new file mode 100644
index 000000000..b4dee3c22
--- /dev/null
+++ b/apt-pkg/deb/debindexfile.h
@@ -0,0 +1,112 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: debindexfile.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Debian Index Files
+
+ There are three sorts currently
+
+ Package files that have File: tags
+ Package files that don't (/var/lib/dpkg/status)
+ Source files
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_DEBINDEXFILE_H
+#define PKGLIB_DEBINDEXFILE_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/debindexfile.h"
+#endif
+
+#include <apt-pkg/indexfile.h>
+
+class debStatusIndex : public pkgIndexFile
+{
+ string File;
+
+ public:
+
+ virtual const Type *GetType() const;
+
+ // Interface for acquire
+ virtual string Describe() const {return File;};
+
+ // Interface for the Cache Generator
+ virtual bool Exists() const;
+ virtual bool HasPackages() const {return true;};
+ virtual unsigned long Size() const;
+ virtual bool Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const;
+ virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
+
+ debStatusIndex(string File);
+};
+
+class debPackagesIndex : public pkgIndexFile
+{
+ string URI;
+ string Dist;
+ string Section;
+
+ string Info(const char *Type) const;
+ string IndexFile(const char *Type) const;
+ string IndexURI(const char *Type) const;
+
+ public:
+
+ virtual const Type *GetType() const;
+
+ // Stuff for accessing files on remote items
+ virtual string ArchiveInfo(pkgCache::VerIterator Ver) const;
+ virtual string ArchiveURI(string File) const {return URI + File;};
+
+ // Interface for acquire
+ virtual string Describe() const;
+ virtual bool GetIndexes(pkgAcquire *Owner) const;
+
+ // Interface for the Cache Generator
+ virtual bool Exists() const;
+ virtual bool HasPackages() const {return true;};
+ virtual unsigned long Size() const;
+ virtual bool Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const;
+ virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
+
+ debPackagesIndex(string URI,string Dist,string Section);
+};
+
+class debSourcesIndex : public pkgIndexFile
+{
+ string URI;
+ string Dist;
+ string Section;
+
+ string Info(const char *Type) const;
+ string IndexFile(const char *Type) const;
+ string IndexURI(const char *Type) const;
+
+ public:
+
+ virtual const Type *GetType() const;
+
+ // Stuff for accessing files on remote items
+ virtual string SourceInfo(pkgSrcRecords::Parser const &Record,
+ pkgSrcRecords::File const &File) const;
+ virtual string ArchiveURI(string File) const {return URI + File;};
+
+ // Interface for acquire
+ virtual string Describe() const;
+ virtual bool GetIndexes(pkgAcquire *Owner) const;
+
+ // Interface for the record parsers
+ virtual pkgSrcRecords::Parser *CreateSrcParser() const;
+
+ // Interface for the Cache Generator
+ virtual bool Exists() const;
+ virtual bool HasPackages() const {return false;};
+ virtual unsigned long Size() const;
+
+ debSourcesIndex(string URI,string Dist,string Section);
+};
+
+#endif
diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc
index 9da03a7f6..2b3dfaf6e 100644
--- a/apt-pkg/deb/deblistparser.cc
+++ b/apt-pkg/deb/deblistparser.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: deblistparser.cc,v 1.23 1999/09/30 06:30:34 jgg Exp $
+// $Id: deblistparser.cc,v 1.24 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Package Cache Generator - Generator for the cache structure.
@@ -19,10 +19,17 @@
#include <system.h>
/*}}}*/
+static debListParser::WordList PrioList[] = {{"important",pkgCache::State::Important},
+ {"required",pkgCache::State::Required},
+ {"standard",pkgCache::State::Standard},
+ {"optional",pkgCache::State::Optional},
+ {"extra",pkgCache::State::Extra},
+ {}};
+
// ListParser::debListParser - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-debListParser::debListParser(FileFd &File) : Tags(File)
+debListParser::debListParser(FileFd *File) : Tags(File)
{
Arch = _config->Find("APT::architecture");
}
@@ -80,14 +87,8 @@ bool debListParser::NewVersion(pkgCache::VerIterator Ver)
const char *Start;
const char *Stop;
if (Section.Find("Priority",Start,Stop) == true)
- {
- WordList PrioList[] = {{"important",pkgCache::State::Important},
- {"required",pkgCache::State::Required},
- {"standard",pkgCache::State::Standard},
- {"optional",pkgCache::State::Optional},
- {"extra",pkgCache::State::Extra}};
- if (GrabWord(string(Start,Stop-Start),PrioList,
- _count(PrioList),Ver->Priority) == false)
+ {
+ if (GrabWord(string(Start,Stop-Start),PrioList,Ver->Priority) == false)
Ver->Priority = pkgCache::State::Extra;
}
@@ -104,6 +105,10 @@ bool debListParser::NewVersion(pkgCache::VerIterator Ver)
if (ParseDepends(Ver,"Replaces",pkgCache::Dep::Replaces) == false)
return false;
+ // Obsolete.
+ if (ParseDepends(Ver,"Optional",pkgCache::Dep::Suggests) == false)
+ return false;
+
if (ParseProvides(Ver) == false)
return false;
@@ -205,9 +210,9 @@ bool debListParser::ParseStatus(pkgCache::PkgIterator Pkg,
{"install",pkgCache::State::Install},
{"hold",pkgCache::State::Hold},
{"deinstall",pkgCache::State::DeInstall},
- {"purge",pkgCache::State::Purge}};
- if (GrabWord(string(Start,I-Start),WantList,
- _count(WantList),Pkg->SelectedState) == false)
+ {"purge",pkgCache::State::Purge},
+ {}};
+ if (GrabWord(string(Start,I-Start),WantList,Pkg->SelectedState) == false)
return _error->Error("Malformed 1st word in the Status line");
// Isloate the next word
@@ -221,9 +226,9 @@ bool debListParser::ParseStatus(pkgCache::PkgIterator Pkg,
WordList FlagList[] = {{"ok",pkgCache::State::Ok},
{"reinstreq",pkgCache::State::ReInstReq},
{"hold",pkgCache::State::HoldInst},
- {"hold-reinstreq",pkgCache::State::HoldReInstReq}};
- if (GrabWord(string(Start,I-Start),FlagList,
- _count(FlagList),Pkg->InstState) == false)
+ {"hold-reinstreq",pkgCache::State::HoldReInstReq},
+ {}};
+ if (GrabWord(string(Start,I-Start),FlagList,Pkg->InstState) == false)
return _error->Error("Malformed 2nd word in the Status line");
// Isloate the last word
@@ -241,9 +246,9 @@ bool debListParser::ParseStatus(pkgCache::PkgIterator Pkg,
{"half-installed",pkgCache::State::HalfInstalled},
{"config-files",pkgCache::State::ConfigFiles},
{"post-inst-failed",pkgCache::State::HalfConfigured},
- {"removal-failed",pkgCache::State::HalfInstalled}};
- if (GrabWord(string(Start,I-Start),StatusList,
- _count(StatusList),Pkg->CurrentState) == false)
+ {"removal-failed",pkgCache::State::HalfInstalled},
+ {}};
+ if (GrabWord(string(Start,I-Start),StatusList,Pkg->CurrentState) == false)
return _error->Error("Malformed 3rd word in the Status line");
/* A Status line marks the package as indicating the current
@@ -266,9 +271,67 @@ bool debListParser::ParseStatus(pkgCache::PkgIterator Pkg,
// ---------------------------------------------------------------------
/* This parses the dependency elements out of a standard string in place,
bit by bit. */
+const char *debListParser::ConvertRelation(const char *I,unsigned int &Op)
+{
+ // Determine the operator
+ switch (*I)
+ {
+ case '<':
+ I++;
+ if (*I == '=')
+ {
+ I++;
+ Op = pkgCache::Dep::LessEq;
+ break;
+ }
+
+ if (*I == '<')
+ {
+ I++;
+ Op = pkgCache::Dep::Less;
+ break;
+ }
+
+ // < is the same as <= and << is really Cs < for some reason
+ Op = pkgCache::Dep::LessEq;
+ break;
+
+ case '>':
+ I++;
+ if (*I == '=')
+ {
+ I++;
+ Op = pkgCache::Dep::GreaterEq;
+ break;
+ }
+
+ if (*I == '>')
+ {
+ I++;
+ Op = pkgCache::Dep::Greater;
+ break;
+ }
+
+ // > is the same as >= and >> is really Cs > for some reason
+ Op = pkgCache::Dep::GreaterEq;
+ break;
+
+ case '=':
+ Op = pkgCache::Dep::Equals;
+ I++;
+ break;
+
+ // HACK around bad package definitions
+ default:
+ Op = pkgCache::Dep::Equals;
+ break;
+ }
+ return I;
+}
+
const char *debListParser::ParseDepends(const char *Start,const char *Stop,
string &Package,string &Ver,
- unsigned int &Op)
+ unsigned int &Op, bool ParseArchFlags)
{
// Strip off leading space
for (;Start != Stop && isspace(*Start) != 0; Start++);
@@ -298,60 +361,7 @@ const char *debListParser::ParseDepends(const char *Start,const char *Stop,
for (I++; I != Stop && isspace(*I) != 0 ; I++);
if (I + 3 >= Stop)
return 0;
-
- // Determine the operator
- switch (*I)
- {
- case '<':
- I++;
- if (*I == '=')
- {
- I++;
- Op = pkgCache::Dep::LessEq;
- break;
- }
-
- if (*I == '<')
- {
- I++;
- Op = pkgCache::Dep::Less;
- break;
- }
-
- // < is the same as <= and << is really Cs < for some reason
- Op = pkgCache::Dep::LessEq;
- break;
-
- case '>':
- I++;
- if (*I == '=')
- {
- I++;
- Op = pkgCache::Dep::GreaterEq;
- break;
- }
-
- if (*I == '>')
- {
- I++;
- Op = pkgCache::Dep::Greater;
- break;
- }
-
- // > is the same as >= and >> is really Cs > for some reason
- Op = pkgCache::Dep::GreaterEq;
- break;
-
- case '=':
- Op = pkgCache::Dep::Equals;
- I++;
- break;
-
- // HACK around bad package definitions
- default:
- Op = pkgCache::Dep::Equals;
- break;
- }
+ I = ConvertRelation(I,Op);
// Skip whitespace
for (;I != Stop && isspace(*I) != 0; I++);
@@ -375,6 +385,50 @@ const char *debListParser::ParseDepends(const char *Start,const char *Stop,
// Skip whitespace
for (;I != Stop && isspace(*I) != 0; I++);
+
+ if (ParseArchFlags == true)
+ {
+ string arch = _config->Find("APT::Architecture");
+
+ // Parse an architecture
+ if (I != Stop && *I == '[')
+ {
+ // malformed
+ I++;
+ if (I == Stop)
+ return 0;
+
+ const char *End = I;
+ bool Found = false;
+ while (I != Stop)
+ {
+ // look for whitespace or ending ']'
+ while (End != Stop && !isspace(*End) && *End != ']')
+ End++;
+
+ if (End == Stop)
+ return 0;
+
+ if (stringcmp(I,End,arch.begin(),arch.end()) == 0)
+ Found = true;
+
+ if (*End++ == ']') {
+ I = End;
+ break;
+ }
+
+ I = End;
+ for (;I != Stop && isspace(*I) != 0; I++);
+ }
+
+ if (Found == false)
+ Package = ""; /* not for this arch */
+ }
+
+ // Skip whitespace
+ for (;I != Stop && isspace(*I) != 0; I++);
+ }
+
if (I != Stop && *I == '|')
Op |= pkgCache::Dep::Or;
@@ -453,10 +507,9 @@ bool debListParser::ParseProvides(pkgCache::VerIterator Ver)
// ListParser::GrabWord - Matches a word and returns /*{{{*/
// ---------------------------------------------------------------------
/* Looks for a word in a list of words - for ParseStatus */
-bool debListParser::GrabWord(string Word,WordList *List,int Count,
- unsigned char &Out)
+bool debListParser::GrabWord(string Word,WordList *List,unsigned char &Out)
{
- for (int C = 0; C != Count; C++)
+ for (unsigned int C = 0; List[C].Str != 0; C++)
{
if (strcasecmp(Word.c_str(),List[C].Str) == 0)
{
@@ -500,7 +553,7 @@ bool debListParser::Step()
bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator FileI,
FileFd &File)
{
- pkgTagFile Tags(File);
+ pkgTagFile Tags(&File);
pkgTagSection Section;
if (Tags.Step(Section) == false)
return false;
@@ -527,3 +580,15 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator FileI,
return !_error->PendingError();
}
/*}}}*/
+// ListParser::GetPrio - Convert the priority from a string /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+unsigned char debListParser::GetPrio(string Str)
+{
+ unsigned char Out;
+ if (GrabWord(Str,PrioList,Out) == false)
+ Out = pkgCache::State::Extra;
+
+ return Out;
+}
+ /*}}}*/
diff --git a/apt-pkg/deb/deblistparser.h b/apt-pkg/deb/deblistparser.h
index 6e2c5ef94..9f305211a 100644
--- a/apt-pkg/deb/deblistparser.h
+++ b/apt-pkg/deb/deblistparser.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: deblistparser.h,v 1.8 1999/07/26 17:46:08 jgg Exp $
+// $Id: deblistparser.h,v 1.9 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Debian Package List Parser - This implements the abstract parser
@@ -8,7 +8,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_DEBLISTPARSER_H
#define PKGLIB_DEBLISTPARSER_H
@@ -17,29 +16,33 @@
class debListParser : public pkgCacheGenerator::ListParser
{
- pkgTagFile Tags;
- pkgTagSection Section;
- unsigned long iOffset;
- string Arch;
-
+ public:
+
// Parser Helper
struct WordList
{
- char *Str;
+ const char *Str;
unsigned char Val;
};
+ private:
+
+ pkgTagFile Tags;
+ pkgTagSection Section;
+ unsigned long iOffset;
+ string Arch;
+
unsigned long UniqFindTagWrite(const char *Tag);
bool ParseStatus(pkgCache::PkgIterator Pkg,pkgCache::VerIterator Ver);
- const char *ParseDepends(const char *Start,const char *Stop,
- string &Package,string &Ver,unsigned int &Op);
bool ParseDepends(pkgCache::VerIterator Ver,const char *Tag,
unsigned int Type);
bool ParseProvides(pkgCache::VerIterator Ver);
- bool GrabWord(string Word,WordList *List,int Count,unsigned char &Out);
+ static bool GrabWord(string Word,WordList *List,unsigned char &Out);
public:
-
+
+ static unsigned char GetPrio(string Str);
+
// These all operate against the current section
virtual string Package();
virtual string Version();
@@ -51,10 +54,15 @@ class debListParser : public pkgCacheGenerator::ListParser
virtual unsigned long Size() {return Section.size();};
virtual bool Step();
-
+
bool LoadReleaseInfo(pkgCache::PkgFileIterator FileI,FileFd &File);
- debListParser(FileFd &File);
+ static const char *ParseDepends(const char *Start,const char *Stop,
+ string &Package,string &Ver,unsigned int &Op,
+ bool ParseArchFlags = false);
+ static const char *ConvertRelation(const char *I,unsigned int &Op);
+
+ debListParser(FileFd *File);
};
#endif
diff --git a/apt-pkg/deb/debrecords.cc b/apt-pkg/deb/debrecords.cc
index 49e3d02c8..0196992c6 100644
--- a/apt-pkg/deb/debrecords.cc
+++ b/apt-pkg/deb/debrecords.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: debrecords.cc,v 1.8 1999/05/18 05:28:03 jgg Exp $
+// $Id: debrecords.cc,v 1.9 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Debian Package Records - Parser for debian package records
@@ -18,8 +18,9 @@
// RecordParser::debRecordParser - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-debRecordParser::debRecordParser(FileFd &File,pkgCache &Cache) :
- Tags(File,Cache.Head().MaxVerFileSize + 20)
+debRecordParser::debRecordParser(string FileName,pkgCache &Cache) :
+ File(FileName,FileFd::ReadOnly),
+ Tags(&File,Cache.Head().MaxVerFileSize + 200)
{
}
/*}}}*/
@@ -39,6 +40,14 @@ string debRecordParser::FileName()
return Section.FindS("Filename");
}
/*}}}*/
+// RecordParser::Name - Return the package name /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string debRecordParser::Name()
+{
+ return Section.FindS("Package");
+}
+ /*}}}*/
// RecordParser::MD5Hash - Return the archive hash /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -87,3 +96,11 @@ string debRecordParser::SourcePkg()
return string(Res,0,Pos);
}
/*}}}*/
+// RecordParser::GetRec - Return the whole record /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void debRecordParser::GetRec(const char *&Start,const char *&Stop)
+{
+ Section.GetSection(Start,Stop);
+}
+ /*}}}*/
diff --git a/apt-pkg/deb/debrecords.h b/apt-pkg/deb/debrecords.h
index 9191ebcda..fd1c380dc 100644
--- a/apt-pkg/deb/debrecords.h
+++ b/apt-pkg/deb/debrecords.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: debrecords.h,v 1.6 1999/04/07 05:30:18 jgg Exp $
+// $Id: debrecords.h,v 1.7 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Debian Package Records - Parser for debian package records
@@ -11,7 +11,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_DEBRECORDS_H
#define PKGLIB_DEBRECORDS_H
@@ -24,9 +23,10 @@
class debRecordParser : public pkgRecords::Parser
{
+ FileFd File;
pkgTagFile Tags;
pkgTagSection Section;
-
+
protected:
virtual bool Jump(pkgCache::VerFileIterator const &Ver);
@@ -42,9 +42,11 @@ class debRecordParser : public pkgRecords::Parser
virtual string Maintainer();
virtual string ShortDesc();
virtual string LongDesc();
+ virtual string Name();
+
+ virtual void GetRec(const char *&Start,const char *&Stop);
- debRecordParser(FileFd &File,pkgCache &Cache);
+ debRecordParser(string FileName,pkgCache &Cache);
};
-
#endif
diff --git a/apt-pkg/deb/debsrcrecords.cc b/apt-pkg/deb/debsrcrecords.cc
index 7a06e30b9..d452095cd 100644
--- a/apt-pkg/deb/debsrcrecords.cc
+++ b/apt-pkg/deb/debsrcrecords.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: debsrcrecords.cc,v 1.3 1999/04/07 05:30:18 jgg Exp $
+// $Id: debsrcrecords.cc,v 1.4 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Debian Source Package Records - Parser implementation for Debian style
@@ -13,9 +13,11 @@
#pragma implementation "apt-pkg/debsrcrecords.h"
#endif
+#include <apt-pkg/deblistparser.h>
#include <apt-pkg/debsrcrecords.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/configuration.h>
/*}}}*/
// SrcRecordParser::Binaries - Return the binaries field /*{{{*/
@@ -23,43 +25,63 @@
/* This member parses the binaries field into a pair of class arrays and
returns a list of strings representing all of the components of the
binaries field. The returned array need not be freed and will be
- reused by the next Binaries function call. */
+ reused by the next Binaries function call. This function is commonly
+ used during scanning to find the right package */
const char **debSrcRecordParser::Binaries()
{
+ // This should use Start/Stop too, it is supposed to be efficient after all.
string Bins = Sect.FindS("Binary");
- char *Buf = Buffer;
- unsigned int Bin = 0;
- if (Bins.empty() == true)
+ if (Bins.empty() == true || Bins.length() >= sizeof(Buffer))
return 0;
- // Strip any leading spaces
- string::const_iterator Start = Bins.begin();
- for (; Start != Bins.end() && isspace(*Start) != 0; Start++);
+ strcpy(Buffer,Bins.c_str());
+ if (TokSplitString(',',Buffer,StaticBinList,
+ sizeof(StaticBinList)/sizeof(StaticBinList[0])) == false)
+ return 0;
+ return (const char **)StaticBinList;
+}
+ /*}}}*/
+// SrcRecordParser::BuildDepends - Return the Build-Depends information /*{{{*/
+// ---------------------------------------------------------------------
+/* This member parses the build-depends information and returns a list of
+ package/version records representing the build dependency. The returned
+ array need not be freed and will be reused by the next call to this
+ function */
+bool debSrcRecordParser::BuildDepends(vector<pkgSrcRecords::Parser::BuildDepRec> &BuildDeps)
+{
+ unsigned int I;
+ const char *Start, *Stop;
+ BuildDepRec rec;
+ const char *fields[] = {"Build-Depends",
+ "Build-Depends-Indep",
+ "Build-Conflicts",
+ "Build-Conflicts-Indep"};
+
+ BuildDeps.clear();
- string::const_iterator Pos = Start;
- while (Pos != Bins.end())
+ for (I = 0; I < 4; I++)
{
- // Skip to the next ','
- for (; Pos != Bins.end() && *Pos != ','; Pos++);
+ if (Sect.Find(fields[I], Start, Stop) == false)
+ continue;
- // Back remove spaces
- string::const_iterator End = Pos;
- for (; End > Start && (End[-1] == ',' || isspace(End[-1]) != 0); End--);
-
- // Stash the string
- memcpy(Buf,Start,End-Start);
- StaticBinList[Bin] = Buf;
- Bin++;
- Buf += End-Start;
- *Buf++ = 0;
-
- // Advance pos
- for (; Pos != Bins.end() && (*Pos == ',' || isspace(*Pos) != 0); Pos++);
- Start = Pos;
+ while (1)
+ {
+ Start = debListParser::ParseDepends(Start, Stop,
+ rec.Package,rec.Version,rec.Op,true);
+
+ if (Start == 0)
+ return _error->Error("Problem parsing dependency: %s", fields[I]);
+ rec.Type = I;
+
+ if (rec.Package != "")
+ BuildDeps.push_back(rec);
+
+ if (Start == Stop)
+ break;
+ }
}
- StaticBinList[Bin] = 0;
- return StaticBinList;
+ return true;
}
/*}}}*/
// SrcRecordParser::Files - Return a list of files for this source /*{{{*/
@@ -95,6 +117,25 @@ bool debSrcRecordParser::Files(vector<pkgSrcRecords::File> &List)
// Parse the size and append the directory
F.Size = atoi(Size.c_str());
F.Path = Base + F.Path;
+
+ // Try to guess what sort of file it is we are getting.
+ string::size_type Pos = F.Path.length()-1;
+ while (1)
+ {
+ string::size_type Tmp = F.Path.rfind('.',Pos);
+ if (Tmp == string::npos)
+ break;
+ F.Type = string(F.Path,Tmp+1,Pos-Tmp);
+
+ if (F.Type == "gz" || F.Type == "bz2")
+ {
+ Pos = Tmp-1;
+ continue;
+ }
+
+ break;
+ }
+
List.push_back(F);
}
diff --git a/apt-pkg/deb/debsrcrecords.h b/apt-pkg/deb/debsrcrecords.h
index 50488d4b6..477fe4fe3 100644
--- a/apt-pkg/deb/debsrcrecords.h
+++ b/apt-pkg/deb/debsrcrecords.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: debsrcrecords.h,v 1.5 1999/10/18 04:15:25 jgg Exp $
+// $Id: debsrcrecords.h,v 1.6 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Debian Source Package Records - Parser implementation for Debian style
@@ -17,13 +17,15 @@
#include <apt-pkg/srcrecords.h>
#include <apt-pkg/tagfile.h>
+#include <apt-pkg/fileutl.h>
class debSrcRecordParser : public pkgSrcRecords::Parser
{
+ FileFd Fd;
pkgTagFile Tags;
pkgTagSection Sect;
char Buffer[10000];
- const char *StaticBinList[400];
+ char *StaticBinList[400];
unsigned long iOffset;
public:
@@ -32,11 +34,12 @@ class debSrcRecordParser : public pkgSrcRecords::Parser
virtual bool Step() {iOffset = Tags.Offset(); return Tags.Step(Sect);};
virtual bool Jump(unsigned long Off) {iOffset = Off; return Tags.Jump(Sect,Off);};
- virtual string Package() {return Sect.FindS("Package");};
- virtual string Version() {return Sect.FindS("Version");};
- virtual string Maintainer() {return Sect.FindS("Maintainer");};
- virtual string Section() {return Sect.FindS("Section");};
+ virtual string Package() const {return Sect.FindS("Package");};
+ virtual string Version() const {return Sect.FindS("Version");};
+ virtual string Maintainer() const {return Sect.FindS("Maintainer");};
+ virtual string Section() const {return Sect.FindS("Section");};
virtual const char **Binaries();
+ virtual bool BuildDepends(vector<BuildDepRec> &BuildDeps);
virtual unsigned long Offset() {return iOffset;};
virtual string AsStr()
{
@@ -45,10 +48,11 @@ class debSrcRecordParser : public pkgSrcRecords::Parser
return string(Start,Stop);
};
virtual bool Files(vector<pkgSrcRecords::File> &F);
-
- debSrcRecordParser(FileFd *File,pkgSourceList::const_iterator SrcItem) :
- Parser(File,SrcItem),
- Tags(*File,sizeof(Buffer)) {};
+
+ debSrcRecordParser(string File,pkgIndexFile const *Index) :
+ Parser(Index),
+ Fd(File,FileFd::ReadOnly),
+ Tags(&Fd,sizeof(Buffer)) {};
};
#endif
diff --git a/apt-pkg/deb/debsystem.cc b/apt-pkg/deb/debsystem.cc
new file mode 100644
index 000000000..0abd4c8aa
--- /dev/null
+++ b/apt-pkg/deb/debsystem.cc
@@ -0,0 +1,197 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: debsystem.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ System - Abstraction for running on different systems.
+
+ Basic general structure..
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/debsystem.h"
+#endif
+
+#include <apt-pkg/debsystem.h>
+#include <apt-pkg/debversion.h>
+#include <apt-pkg/debindexfile.h>
+#include <apt-pkg/dpkgpm.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/fileutl.h>
+
+#include <sys/types.h>
+#include <unistd.h>
+#include <dirent.h>
+#include <errno.h>
+ /*}}}*/
+
+debSystem debSys;
+
+// System::debSystem - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+debSystem::debSystem()
+{
+ LockFD = -1;
+ LockCount = 0;
+
+ Label = "Debian dpkg interface";
+ VS = &debVS;
+}
+ /*}}}*/
+// System::Lock - Get the lock /*{{{*/
+// ---------------------------------------------------------------------
+/* This mirrors the operations dpkg does when it starts up. Note the
+ checking of the updates directory. */
+bool debSystem::Lock()
+{
+ // Disable file locking
+ if (_config->FindB("Debug::NoLocking",false) == true || LockCount > 1)
+ {
+ LockCount++;
+ return true;
+ }
+
+ // Create the lockfile
+ string AdminDir = flNotFile(_config->Find("Dir::State::status"));
+ LockFD = GetLock(AdminDir + "lock");
+ if (LockFD == -1)
+ {
+ if (errno == EACCES || errno == EAGAIN)
+ return _error->Error("Unable to lock the administration directory (%s), "
+ "is another process using it?",AdminDir.c_str());
+ else
+ return _error->Error("Unable to lock the administration directory (%s), "
+ "are you root?",AdminDir.c_str());
+ }
+
+ // See if we need to abort with a dirty journal
+ if (CheckUpdates() == true)
+ {
+ close(LockFD);
+ LockFD = -1;
+ return _error->Error("dpkg was interrupted, you must manually "
+ "run 'dpkg --configure -a' to correct the problem. ");
+ }
+
+ LockCount++;
+
+ return true;
+}
+ /*}}}*/
+// System::UnLock - Drop a lock /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debSystem::UnLock(bool NoErrors)
+{
+ if (LockCount == 0 && NoErrors == true)
+ return false;
+
+ if (LockCount < 1)
+ return _error->Error("Not locked");
+ if (--LockCount == 0)
+ {
+ close(LockFD);
+ LockCount = 0;
+ }
+
+ return true;
+}
+ /*}}}*/
+// System::CheckUpdates - Check if the updates dir is dirty /*{{{*/
+// ---------------------------------------------------------------------
+/* This does a check of the updates directory (dpkg journal) to see if it has
+ any entries in it. */
+bool debSystem::CheckUpdates()
+{
+ // Check for updates.. (dirty)
+ string File = flNotFile(_config->Find("Dir::State::status")) + "updates/";
+ DIR *DirP = opendir(File.c_str());
+ if (DirP == 0)
+ return false;
+
+ /* We ignore any files that are not all digits, this skips .,.. and
+ some tmp files dpkg will leave behind.. */
+ bool Damaged = false;
+ for (struct dirent *Ent = readdir(DirP); Ent != 0; Ent = readdir(DirP))
+ {
+ Damaged = true;
+ for (unsigned int I = 0; Ent->d_name[I] != 0; I++)
+ {
+ // Check if its not a digit..
+ if (isdigit(Ent->d_name[I]) == 0)
+ {
+ Damaged = false;
+ break;
+ }
+ }
+ if (Damaged == true)
+ break;
+ }
+ closedir(DirP);
+
+ return Damaged;
+}
+ /*}}}*/
+// System::CreatePM - Create the underlying package manager /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgPackageManager *debSystem::CreatePM(pkgDepCache *Cache) const
+{
+ return new pkgDPkgPM(Cache);
+}
+ /*}}}*/
+// System::Initialize - Setup the configuration space.. /*{{{*/
+// ---------------------------------------------------------------------
+/* These are the Debian specific configuration variables.. */
+bool debSystem::Initialize(Configuration &Cnf)
+{
+ /* These really should be jammed into a generic 'Local Database' engine
+ which is yet to be determined. The functions in pkgcachegen should
+ be the only users of these */
+ Cnf.CndSet("Dir::State::userstatus","status.user"); // Defunct
+ Cnf.CndSet("Dir::State::status","/var/lib/dpkg/status");
+ Cnf.CndSet("Dir::Bin::dpkg","/usr/bin/dpkg");
+
+ return true;
+}
+ /*}}}*/
+// System::ArchiveSupported - Is a file format supported /*{{{*/
+// ---------------------------------------------------------------------
+/* The standard name for a deb is 'deb'.. There are no seperate versions
+ of .deb to worry about.. */
+bool debSystem::ArchiveSupported(const char *Type)
+{
+ if (strcmp(Type,"deb") == 0)
+ return true;
+ return false;
+}
+ /*}}}*/
+// System::Score - Determine how 'Debiany' this sys is.. /*{{{*/
+// ---------------------------------------------------------------------
+/* We check some files that are sure tell signs of this being a Debian
+ System.. */
+signed debSystem::Score(Configuration const &Cnf)
+{
+ signed Score = 0;
+ if (FileExists(Cnf.FindFile("Dir::State::status","/var/lib/dpkg/status")) == true)
+ Score += 10;
+ if (FileExists(Cnf.FindFile("Dir::Bin::dpkg","/usr/bin/dpkg")) == true)
+ Score += 10;
+ if (FileExists("/etc/debian_version") == true)
+ Score += 10;
+ return Score;
+}
+ /*}}}*/
+// System::AddStatusFiles - Register the status files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debSystem::AddStatusFiles(vector<pkgIndexFile *> &List)
+{
+ List.push_back(new debStatusIndex(_config->FindFile("Dir::State::status")));
+ return true;
+}
+ /*}}}*/
diff --git a/apt-pkg/deb/debsystem.h b/apt-pkg/deb/debsystem.h
new file mode 100644
index 000000000..4fd267f77
--- /dev/null
+++ b/apt-pkg/deb/debsystem.h
@@ -0,0 +1,41 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: debsystem.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ System - Debian version of the System Class
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_DEBSYSTEM_H
+#define PKGLIB_DEBSYSTEM_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/debsystem.h"
+#endif
+
+#include <apt-pkg/pkgsystem.h>
+
+class debSystem : public pkgSystem
+{
+ // For locking support
+ int LockFD;
+ unsigned LockCount;
+ bool CheckUpdates();
+
+ public:
+
+ virtual bool Lock();
+ virtual bool UnLock(bool NoErrors = false);
+ virtual pkgPackageManager *CreatePM(pkgDepCache *Cache) const;
+ virtual bool Initialize(Configuration &Cnf);
+ virtual bool ArchiveSupported(const char *Type);
+ virtual signed Score(Configuration const &Cnf);
+ virtual bool AddStatusFiles(vector<pkgIndexFile *> &List);
+
+ debSystem();
+};
+
+extern debSystem debSys;
+
+#endif
diff --git a/apt-pkg/deb/debversion.cc b/apt-pkg/deb/debversion.cc
new file mode 100644
index 000000000..e7c42fd11
--- /dev/null
+++ b/apt-pkg/deb/debversion.cc
@@ -0,0 +1,266 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: debversion.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Debian Version - Versioning system for Debian
+
+ This implements the standard Debian versioning system.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#define APT_COMPATIBILITY 986
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/debversion.h"
+#endif
+
+#include <apt-pkg/debversion.h>
+#include <apt-pkg/pkgcache.h>
+
+#include <stdlib.h>
+ /*}}}*/
+
+debVersioningSystem debVS;
+
+// debVS::debVersioningSystem - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+debVersioningSystem::debVersioningSystem()
+{
+ Label = "Standard .deb";
+}
+ /*}}}*/
+// StrToLong - Convert the string between two iterators to a long /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+static unsigned long StrToLong(const char *begin,const char *end)
+{
+ char S[40];
+ char *I = S;
+ for (; begin != end && I < S + 40;)
+ *I++ = *begin++;
+ *I = 0;
+ return strtoul(S,0,10);
+}
+ /*}}}*/
+// debVS::CmpFragment - Compare versions /*{{{*/
+// ---------------------------------------------------------------------
+/* This compares a fragment of the version. Dpkg has a really short
+ version of this, but it is uh.. interesting to grok. */
+int debVersioningSystem::CmpFragment(const char *A,const char *AEnd,
+ const char *B,const char *BEnd)
+{
+ if (A >= AEnd && B >= BEnd)
+ return 0;
+ if (A >= AEnd)
+ return -1;
+ if (B >= BEnd)
+ return 1;
+
+ /* Iterate over the whole string
+ What this does is to spilt the whole string into groups of
+ numeric and non numeric portions. For instance:
+ a67bhgs89
+ Has 4 portions 'a', '67', 'bhgs', '89'. A more normal:
+ 2.7.2-linux-1
+ Has '2', '.', '7', '.' ,'-linux-','1' */
+ const char *lhs = A;
+ const char *rhs = B;
+ while (lhs != AEnd && rhs != BEnd)
+ {
+ // Starting points
+ const char *Slhs = lhs;
+ const char *Srhs = rhs;
+
+ // Compute ending points were we have passed over the portion
+ bool Digit = (isdigit(*lhs) > 0?true:false);
+ for (;lhs != AEnd && (isdigit(*lhs) > 0?true:false) == Digit; lhs++);
+ for (;rhs != BEnd && (isdigit(*rhs) > 0?true:false) == Digit; rhs++);
+
+ if (Digit == true)
+ {
+ // If the lhs has a digit and the rhs does not then <
+ if (rhs - Srhs == 0)
+ return -1;
+
+ // Generate integers from the strings.
+ unsigned long Ilhs = StrToLong(Slhs,lhs);
+ unsigned long Irhs = StrToLong(Srhs,rhs);
+ if (Ilhs != Irhs)
+ {
+ if (Ilhs > Irhs)
+ return 1;
+ return -1;
+ }
+ }
+ else
+ {
+ // They are equal length so do a straight text compare
+ for (;Slhs != lhs && Srhs != rhs; Slhs++, Srhs++)
+ {
+ if (*Slhs != *Srhs)
+ {
+ /* We need to compare non alpha chars as higher than alpha
+ chars (a < !) */
+ int lc = *Slhs;
+ int rc = *Srhs;
+ if (isalpha(lc) == 0) lc += 256;
+ if (isalpha(rc) == 0) rc += 256;
+ if (lc > rc)
+ return 1;
+ return -1;
+ }
+ }
+
+ // If the lhs is shorter than the right it is 'less'
+ if (lhs - Slhs < rhs - Srhs)
+ return -1;
+
+ // If the lhs is longer than the right it is 'more'
+ if (lhs - Slhs > rhs - Srhs)
+ return 1;
+ }
+ }
+
+ // The strings must be equal
+ if (lhs == AEnd && rhs == BEnd)
+ return 0;
+
+ // lhs is shorter
+ if (lhs == AEnd)
+ return -1;
+
+ // rhs is shorter
+ if (rhs == BEnd)
+ return 1;
+
+ // Shouldnt happen
+ return 1;
+}
+ /*}}}*/
+// debVS::CmpVersion - Comparison for versions /*{{{*/
+// ---------------------------------------------------------------------
+/* This fragments the version into E:V-R triples and compares each
+ portion separately. */
+int debVersioningSystem::DoCmpVersion(const char *A,const char *AEnd,
+ const char *B,const char *BEnd)
+{
+ // Strip off the epoch and compare it
+ const char *lhs = A;
+ const char *rhs = B;
+ for (;lhs != AEnd && *lhs != ':'; lhs++);
+ for (;rhs != BEnd && *rhs != ':'; rhs++);
+ if (lhs == AEnd)
+ lhs = A;
+ if (rhs == BEnd)
+ rhs = B;
+
+ // Compare the epoch
+ int Res = CmpFragment(A,lhs,B,rhs);
+ if (Res != 0)
+ return Res;
+
+ // Skip the :
+ if (lhs != A)
+ lhs++;
+ if (rhs != B)
+ rhs++;
+
+ // Find the last -
+ const char *dlhs = AEnd-1;
+ const char *drhs = BEnd-1;
+ for (;dlhs > lhs && *dlhs != '-'; dlhs--);
+ for (;drhs > rhs && *drhs != '-'; drhs--);
+
+ if (dlhs == lhs)
+ dlhs = AEnd;
+ if (drhs == rhs)
+ drhs = BEnd;
+
+ // Compare the main version
+ Res = CmpFragment(lhs,dlhs,rhs,drhs);
+ if (Res != 0)
+ return Res;
+
+ // Skip the -
+ if (dlhs != lhs)
+ dlhs++;
+ if (drhs != rhs)
+ drhs++;
+
+ return CmpFragment(dlhs,AEnd,drhs,BEnd);
+}
+ /*}}}*/
+// debVS::CheckDep - Check a single dependency /*{{{*/
+// ---------------------------------------------------------------------
+/* This simply preforms the version comparison and switch based on
+ operator. If DepVer is 0 then we are comparing against a provides
+ with no version. */
+bool debVersioningSystem::CheckDep(const char *PkgVer,
+ int Op,const char *DepVer)
+{
+ if (DepVer == 0 || DepVer[0] == 0)
+ return true;
+ if (PkgVer == 0 || PkgVer[0] == 0)
+ return false;
+
+ // Perform the actual comparision.
+ int Res = CmpVersion(PkgVer,DepVer);
+ switch (Op & 0x0F)
+ {
+ case pkgCache::Dep::LessEq:
+ if (Res <= 0)
+ return true;
+ break;
+
+ case pkgCache::Dep::GreaterEq:
+ if (Res >= 0)
+ return true;
+ break;
+
+ case pkgCache::Dep::Less:
+ if (Res < 0)
+ return true;
+ break;
+
+ case pkgCache::Dep::Greater:
+ if (Res > 0)
+ return true;
+ break;
+
+ case pkgCache::Dep::Equals:
+ if (Res == 0)
+ return true;
+ break;
+
+ case pkgCache::Dep::NotEquals:
+ if (Res != 0)
+ return true;
+ break;
+ }
+
+ return false;
+}
+ /*}}}*/
+// debVS::UpstreamVersion - Return the upstream version string /*{{{*/
+// ---------------------------------------------------------------------
+/* This strips all the debian specific information from the version number */
+string debVersioningSystem::UpstreamVersion(const char *Ver)
+{
+ // Strip off the bit before the first colon
+ const char *I = Ver;
+ for (; *I != 0 && *I != ':'; I++);
+ if (*I == ':')
+ Ver = I + 1;
+
+ // Chop off the trailing -
+ I = Ver;
+ unsigned Last = strlen(Ver);
+ for (; *I != 0; I++)
+ if (*I == '-')
+ Last = I - Ver;
+
+ return string(Ver,Last);
+}
+ /*}}}*/
diff --git a/apt-pkg/deb/debversion.h b/apt-pkg/deb/debversion.h
new file mode 100644
index 000000000..d313f78a6
--- /dev/null
+++ b/apt-pkg/deb/debversion.h
@@ -0,0 +1,72 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: debversion.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Debian Version - Versioning system for Debian
+
+ This implements the standard Debian versioning system.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_DEBVERSION_H
+#define PKGLIB_DEBVERSION_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/debversion.h"
+#endif
+
+#include <apt-pkg/version.h>
+
+class debVersioningSystem : public pkgVersioningSystem
+{
+ static int CmpFragment(const char *A, const char *AEnd, const char *B,
+ const char *BEnd);
+
+ public:
+
+ // Compare versions..
+ virtual int DoCmpVersion(const char *A,const char *Aend,
+ const char *B,const char *Bend);
+ virtual bool CheckDep(const char *PkgVer,int Op,const char *DepVer);
+ virtual int DoCmpReleaseVer(const char *A,const char *Aend,
+ const char *B,const char *Bend)
+ {
+ return DoCmpVersion(A,Aend,B,Bend);
+ }
+ virtual string UpstreamVersion(const char *A);
+
+ debVersioningSystem();
+};
+
+extern debVersioningSystem debVS;
+
+#ifdef APT_COMPATIBILITY
+#if APT_COMPATIBILITY != 986
+#warning "Using APT_COMPATIBILITY"
+#endif
+
+inline int pkgVersionCompare(const char *A, const char *B)
+{
+ return debVS.CmpVersion(A,B);
+}
+inline int pkgVersionCompare(const char *A, const char *AEnd,
+ const char *B, const char *BEnd)
+{
+ return debVS.DoCmpVersion(A,AEnd,B,BEnd);
+}
+inline int pkgVersionCompare(string A,string B)
+{
+ return debVS.CmpVersion(A,B);
+}
+inline bool pkgCheckDep(const char *DepVer,const char *PkgVer,int Op)
+{
+ return debVS.CheckDep(PkgVer,Op,DepVer);
+}
+inline string pkgBaseVersion(const char *Ver)
+{
+ return debVS.UpstreamVersion(Ver);
+}
+#endif
+
+#endif
diff --git a/apt-pkg/deb/dpkginit.cc b/apt-pkg/deb/dpkginit.cc
deleted file mode 100644
index 576e1967a..000000000
--- a/apt-pkg/deb/dpkginit.cc
+++ /dev/null
@@ -1,119 +0,0 @@
-// -*- mode: cpp; mode: fold -*-
-// Description /*{{{*/
-// $Id: dpkginit.cc,v 1.5 1999/08/03 05:21:19 jgg Exp $
-/* ######################################################################
-
- DPKG init - Initialize the dpkg stuff
-
- This class provides the locking mechanism used by dpkg for its
- database area. It does the proper consistency checks and acquires the
- correct kind of lock.
-
- ##################################################################### */
- /*}}}*/
-// Includes /*{{{*/
-#ifdef __GNUG__
-#pragma implementation "apt-pkg/dpkginit.h"
-#endif
-#include <apt-pkg/dpkginit.h>
-#include <apt-pkg/error.h>
-#include <apt-pkg/configuration.h>
-#include <apt-pkg/fileutl.h>
-
-#include <sys/types.h>
-#include <unistd.h>
-#include <dirent.h>
- /*}}}*/
-
-// DpkgLock::pkgDpkgLock - Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-pkgDpkgLock::pkgDpkgLock(bool WithUpdates)
-{
- LockFD = -1;
- GetLock(WithUpdates);
-}
- /*}}}*/
-// DpkgLock::~pkgDpkgLock - Destructor /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-pkgDpkgLock::~pkgDpkgLock()
-{
- Close();
-}
- /*}}}*/
-// DpkgLock::GetLock - Get the lock /*{{{*/
-// ---------------------------------------------------------------------
-/* This mirrors the operations dpkg does when it starts up. Note the
- checking of the updates directory. */
-bool pkgDpkgLock::GetLock(bool WithUpdates)
-{
- // Disable file locking
- if (_config->FindB("Debug::NoLocking",false) == true)
- return true;
-
- Close();
-
- // Create the lockfile
- string AdminDir = flNotFile(_config->Find("Dir::State::status"));
- LockFD = ::GetLock(AdminDir + "lock");
- if (LockFD == -1)
- return _error->Error("Unable to lock the administration directory, "
- "are you root?");
-
- // See if we need to abort with a dirty journal
- if (WithUpdates == true && CheckUpdates() == true)
- {
- Close();
- return _error->Error("dpkg was interrupted, you must manually "
- "run 'dpkg --configure -a' to correct the problem. ");
- }
-
- return true;
-}
- /*}}}*/
-// DpkgLock::Close - Close the lock /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-void pkgDpkgLock::Close()
-{
- close(LockFD);
- LockFD = -1;
-}
- /*}}}*/
-// DpkgLock::CheckUpdates - Check if the updates dir is dirty /*{{{*/
-// ---------------------------------------------------------------------
-/* This does a check of the updates directory to see if it has any entries
- in it. */
-bool pkgDpkgLock::CheckUpdates()
-{
- // Check for updates.. (dirty)
- string File = flNotFile(_config->Find("Dir::State::status")) + "updates/";
- DIR *DirP = opendir(File.c_str());
- if (DirP == 0)
- return false;
-
- /* We ignore any files that are not all digits, this skips .,.. and
- some tmp files dpkg will leave behind.. */
- bool Damaged = false;
- for (struct dirent *Ent = readdir(DirP); Ent != 0; Ent = readdir(DirP))
- {
- Damaged = true;
- for (unsigned int I = 0; Ent->d_name[I] != 0; I++)
- {
- // Check if its not a digit..
- if (isdigit(Ent->d_name[I]) == 0)
- {
- Damaged = false;
- break;
- }
- }
- if (Damaged == true)
- break;
- }
- closedir(DirP);
-
- return Damaged;
-}
- /*}}}*/
-
diff --git a/apt-pkg/deb/dpkginit.h b/apt-pkg/deb/dpkginit.h
deleted file mode 100644
index 532ff6236..000000000
--- a/apt-pkg/deb/dpkginit.h
+++ /dev/null
@@ -1,34 +0,0 @@
-// -*- mode: cpp; mode: fold -*-
-// Description /*{{{*/
-// $Id: dpkginit.h,v 1.2 1999/07/26 17:46:08 jgg Exp $
-/* ######################################################################
-
- DPKG init - Initialize the dpkg stuff
-
- This basically gets a lock in /var/lib/dpkg and checks the updates
- directory
-
- ##################################################################### */
- /*}}}*/
-#ifndef PKGLIB_DPKGINIT_H
-#define PKGLIB_DPKGINIT_H
-
-#ifdef __GNUG__
-#pragma interface "apt-pkg/dpkginit.h"
-#endif
-
-class pkgDpkgLock
-{
- int LockFD;
-
- public:
-
- bool CheckUpdates();
- bool GetLock(bool WithUpdates);
- void Close();
-
- pkgDpkgLock(bool WithUpdates = true);
- ~pkgDpkgLock();
-};
-
-#endif
diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc
index 09cf20440..34c19ef4b 100644
--- a/apt-pkg/deb/dpkgpm.cc
+++ b/apt-pkg/deb/dpkgpm.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: dpkgpm.cc,v 1.17 2000/05/13 01:52:38 jgg Exp $
+// $Id: dpkgpm.cc,v 1.18 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
DPKG Package Manager - Provide an interface to dpkg
@@ -14,7 +14,9 @@
#include <apt-pkg/dpkgpm.h>
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
-
+#include <apt-pkg/depcache.h>
+#include <apt-pkg/strutl.h>
+
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
@@ -28,7 +30,7 @@
// DPkgPM::pkgDPkgPM - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgDPkgPM::pkgDPkgPM(pkgDepCache &Cache) : pkgPackageManager(Cache)
+pkgDPkgPM::pkgDPkgPM(pkgDepCache *Cache) : pkgPackageManager(Cache)
{
}
/*}}}*/
@@ -141,6 +143,90 @@ bool pkgDPkgPM::RunScripts(const char *Cnf)
}
/*}}}*/
+// DPkgPM::SendV2Pkgs - Send version 2 package info /*{{{*/
+// ---------------------------------------------------------------------
+/* This is part of the helper script communication interface, it sends
+ very complete information down to the other end of the pipe.*/
+bool pkgDPkgPM::SendV2Pkgs(FILE *F)
+{
+ fprintf(F,"VERSION 2\n");
+
+ /* Write out all of the configuration directives by walking the
+ configuration tree */
+ const Configuration::Item *Top = _config->Tree(0);
+ for (; Top != 0;)
+ {
+ if (Top->Value.empty() == false)
+ {
+ fprintf(F,"%s=%s\n",
+ QuoteString(Top->FullTag(),"=\"\n").c_str(),
+ QuoteString(Top->Value,"\n").c_str());
+ }
+
+ if (Top->Child != 0)
+ {
+ Top = Top->Child;
+ continue;
+ }
+
+ while (Top != 0 && Top->Next == 0)
+ Top = Top->Parent;
+ if (Top != 0)
+ Top = Top->Next;
+ }
+ fprintf(F,"\n");
+
+ // Write out the package actions in order.
+ for (vector<Item>::iterator I = List.begin(); I != List.end(); I++)
+ {
+ pkgDepCache::StateCache &S = Cache[I->Pkg];
+
+ fprintf(F,"%s ",I->Pkg.Name());
+ // Current version
+ if (I->Pkg->CurrentVer == 0)
+ fprintf(F,"- ");
+ else
+ fprintf(F,"%s ",I->Pkg.CurrentVer().VerStr());
+
+ // Show the compare operator
+ // Target version
+ if (S.InstallVer != 0)
+ {
+ int Comp = 2;
+ if (I->Pkg->CurrentVer != 0)
+ Comp = S.InstVerIter(Cache).CompareVer(I->Pkg.CurrentVer());
+ if (Comp < 0)
+ fprintf(F,"> ");
+ if (Comp == 0)
+ fprintf(F,"= ");
+ if (Comp > 0)
+ fprintf(F,"< ");
+ fprintf(F,"%s ",S.InstVerIter(Cache).VerStr());
+ }
+ else
+ fprintf(F,"> - ");
+
+ // Show the filename/operation
+ if (I->Op == Item::Install)
+ {
+ // No errors here..
+ if (I->File[0] != '/')
+ fprintf(F,"**ERROR**\n");
+ else
+ fprintf(F,"%s\n",I->File.c_str());
+ }
+ if (I->Op == Item::Configure)
+ fprintf(F,"**CONFIGURE**\n");
+ if (I->Op == Item::Remove ||
+ I->Op == Item::Purge)
+ fprintf(F,"**REMOVE**\n");
+
+ if (ferror(F) != 0)
+ return false;
+ }
+ return true;
+}
+ /*}}}*/
// DPkgPM::RunScriptsWithPkgs - Run scripts with package names on stdin /*{{{*/
// ---------------------------------------------------------------------
/* This looks for a list of scripts to run from the configuration file
@@ -158,7 +244,18 @@ bool pkgDPkgPM::RunScriptsWithPkgs(const char *Cnf)
{
if (Opts->Value.empty() == true)
continue;
-
+
+ // Determine the protocol version
+ string OptSec = Opts->Value;
+ string::size_type Pos;
+ if ((Pos = OptSec.find(' ')) == string::npos || Pos == 0)
+ Pos = OptSec.length();
+ else
+ Pos--;
+ OptSec = "DPkg::Tools::Options::" + string(Opts->Value.c_str(),Pos);
+
+ unsigned int Version = _config->FindI(OptSec+"::Version",1);
+
// Create the pipes
int Pipes[2];
if (pipe(Pipes) != 0)
@@ -185,31 +282,44 @@ bool pkgDPkgPM::RunScriptsWithPkgs(const char *Cnf)
_exit(100);
}
close(Pipes[0]);
- FileFd Fd(Pipes[1]);
-
+ FILE *F = fdopen(Pipes[1],"w");
+ if (F == 0)
+ return _error->Errno("fdopen","Faild to open new FD");
+
// Feed it the filenames.
- for (vector<Item>::iterator I = List.begin(); I != List.end(); I++)
+ bool Die = false;
+ if (Version <= 1)
{
- // Only deal with packages to be installed from .deb
- if (I->Op != Item::Install)
- continue;
-
- // No errors here..
- if (I->File[0] != '/')
- continue;
-
- /* Feed the filename of each package that is pending install
- into the pipe. */
- if (Fd.Write(I->File.begin(),I->File.length()) == false ||
- Fd.Write("\n",1) == false)
+ for (vector<Item>::iterator I = List.begin(); I != List.end(); I++)
{
- kill(Process,SIGINT);
- Fd.Close();
- ExecWait(Process,Opts->Value.c_str(),true);
- return _error->Error("Failure running script %s",Opts->Value.c_str());
+ // Only deal with packages to be installed from .deb
+ if (I->Op != Item::Install)
+ continue;
+
+ // No errors here..
+ if (I->File[0] != '/')
+ continue;
+
+ /* Feed the filename of each package that is pending install
+ into the pipe. */
+ fprintf(F,"%s\n",I->File.c_str());
+ if (ferror(F) != 0)
+ {
+ Die = true;
+ break;
+ }
}
}
- Fd.Close();
+ else
+ Die = !SendV2Pkgs(F);
+
+ fclose(F);
+ if (Die == true)
+ {
+ kill(Process,SIGINT);
+ ExecWait(Process,Opts->Value.c_str(),true);
+ return _error->Error("Failure running script %s",Opts->Value.c_str());
+ }
// Clean up the sub process
if (ExecWait(Process,Opts->Value.c_str()) == false)
@@ -384,8 +494,8 @@ bool pkgDPkgPM::Go()
{
RunScripts("DPkg::Post-Invoke");
if (WIFSIGNALED(Status) != 0 && WTERMSIG(Status) == SIGSEGV)
- return _error->Error("Sub-process %s recieved a segmentation fault.",Args[0]);
-
+ return _error->Error("Sub-process %s received a segmentation fault.",Args[0]);
+
if (WIFEXITED(Status) != 0)
return _error->Error("Sub-process %s returned an error code (%u)",Args[0],WEXITSTATUS(Status));
diff --git a/apt-pkg/deb/dpkgpm.h b/apt-pkg/deb/dpkgpm.h
index 0cc32f731..761aac76b 100644
--- a/apt-pkg/deb/dpkgpm.h
+++ b/apt-pkg/deb/dpkgpm.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: dpkgpm.h,v 1.6 1999/07/30 06:15:14 jgg Exp $
+// $Id: dpkgpm.h,v 1.7 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
DPKG Package Manager - Provide an interface to dpkg
@@ -16,6 +16,7 @@
#include <apt-pkg/packagemanager.h>
#include <vector>
+#include <stdio.h>
class pkgDPkgPM : public pkgPackageManager
{
@@ -26,7 +27,7 @@ class pkgDPkgPM : public pkgPackageManager
enum Ops {Install, Configure, Remove, Purge} Op;
string File;
PkgIterator Pkg;
- Item(Ops Op,PkgIterator Pkg,string File = "") : Op(Op),
+ Item(Ops Op,PkgIterator Pkg,string File = "") : Op(Op),
File(File), Pkg(Pkg) {};
Item() {};
@@ -36,6 +37,7 @@ class pkgDPkgPM : public pkgPackageManager
// Helpers
bool RunScripts(const char *Cnf);
bool RunScriptsWithPkgs(const char *Cnf);
+ bool SendV2Pkgs(FILE *F);
// The Actuall installation implementation
virtual bool Install(PkgIterator Pkg,string File);
@@ -46,7 +48,7 @@ class pkgDPkgPM : public pkgPackageManager
public:
- pkgDPkgPM(pkgDepCache &Cache);
+ pkgDPkgPM(pkgDepCache *Cache);
virtual ~pkgDPkgPM();
};
diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc
index 1469126d2..d410413d4 100644
--- a/apt-pkg/depcache.cc
+++ b/apt-pkg/depcache.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: depcache.cc,v 1.22 2000/05/31 02:49:37 jgg Exp $
+// $Id: depcache.cc,v 1.23 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Dependency Cache - Caches Dependency information.
@@ -12,25 +12,24 @@
#pragma implementation "apt-pkg/depcache.h"
#endif
#include <apt-pkg/depcache.h>
-
#include <apt-pkg/version.h>
#include <apt-pkg/error.h>
+#include <apt-pkg/sptr.h>
+#include <apt-pkg/algorithms.h>
+
+#include <apti18n.h>
/*}}}*/
// DepCache::pkgDepCache - Constructors /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgDepCache::pkgDepCache(MMap &Map,OpProgress &Prog) :
- pkgCache(Map), PkgState(0), DepState(0)
+pkgDepCache::pkgDepCache(pkgCache *pCache,Policy *Plcy) :
+ Cache(pCache), PkgState(0), DepState(0)
{
- if (_error->PendingError() == false)
- Init(&Prog);
-}
-pkgDepCache::pkgDepCache(MMap &Map) :
- pkgCache(Map), PkgState(0), DepState(0)
-{
- if (_error->PendingError() == false)
- Init(0);
+ delLocalPolicy = 0;
+ LocalPolicy = Plcy;
+ if (LocalPolicy == 0)
+ delLocalPolicy = LocalPolicy = new Policy;
}
/*}}}*/
// DepCache::~pkgDepCache - Destructor /*{{{*/
@@ -40,6 +39,7 @@ pkgDepCache::~pkgDepCache()
{
delete [] PkgState;
delete [] DepState;
+ delete delLocalPolicy;
}
/*}}}*/
// DepCache::Init - Generate the initial extra structures. /*{{{*/
@@ -53,12 +53,12 @@ bool pkgDepCache::Init(OpProgress *Prog)
DepState = new unsigned char[Head().DependsCount];
memset(PkgState,0,sizeof(*PkgState)*Head().PackageCount);
memset(DepState,0,sizeof(*DepState)*Head().DependsCount);
-
+
if (Prog != 0)
{
Prog->OverallProgress(0,2*Head().PackageCount,Head().PackageCount,
- "Building Dependency Tree");
- Prog->SubProgress(Head().PackageCount,"Candidate Versions");
+ _("Building Dependency Tree"));
+ Prog->SubProgress(Head().PackageCount,_("Candidate Versions"));
}
/* Set the current state of everything. In this state all of the
@@ -86,8 +86,8 @@ bool pkgDepCache::Init(OpProgress *Prog)
Prog->OverallProgress(Head().PackageCount,2*Head().PackageCount,
Head().PackageCount,
- "Building Dependency Tree");
- Prog->SubProgress(Head().PackageCount,"Dependency Generation");
+ _("Building Dependency Tree"));
+ Prog->SubProgress(Head().PackageCount,_("Dependency Generation"));
}
Update(Prog);
@@ -95,29 +95,6 @@ bool pkgDepCache::Init(OpProgress *Prog)
return true;
}
/*}}}*/
-// DepCache::GetCandidateVer - Returns the Candidate install version /*{{{*/
-// ---------------------------------------------------------------------
-/* The default just returns the target version if it exists or the
- highest version. */
-pkgDepCache::VerIterator pkgDepCache::GetCandidateVer(PkgIterator Pkg,
- bool AllowCurrent)
-{
- // Try to use an explicit target
- if (Pkg->TargetVer == 0 ||
- (AllowCurrent == false && Pkg.TargetVer() == Pkg.CurrentVer()))
- return pkgCache::GetCandidateVer(Pkg,AllowCurrent);
- else
- return Pkg.TargetVer();
-}
- /*}}}*/
-// DepCache::IsImportantDep - True if the dependency is important /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool pkgDepCache::IsImportantDep(DepIterator Dep)
-{
- return Dep.IsCritical();
-}
- /*}}}*/
// DepCache::CheckDep - Checks a single dependency /*{{{*/
// ---------------------------------------------------------------------
@@ -132,28 +109,30 @@ bool pkgDepCache::CheckDep(DepIterator Dep,int Type,PkgIterator &Res)
/* Check simple depends. A depends -should- never self match but
we allow it anyhow because dpkg does. Technically it is a packaging
bug. Conflicts may never self match */
- if (Dep.TargetPkg() != Dep.ParentPkg() || Dep->Type != Dep::Conflicts)
+ if (Dep.TargetPkg() != Dep.ParentPkg() ||
+ (Dep->Type != Dep::Conflicts && Dep->Type != Dep::Obsoletes))
{
PkgIterator Pkg = Dep.TargetPkg();
// Check the base package
if (Type == NowVersion && Pkg->CurrentVer != 0)
- if (pkgCheckDep(Dep.TargetVer(),
- Pkg.CurrentVer().VerStr(),Dep->CompareOp) == true)
+ if (VS().CheckDep(Pkg.CurrentVer().VerStr(),Dep->CompareOp,
+ Dep.TargetVer()) == true)
return true;
if (Type == InstallVersion && PkgState[Pkg->ID].InstallVer != 0)
- if (pkgCheckDep(Dep.TargetVer(),
- PkgState[Pkg->ID].InstVerIter(*this).VerStr(),
- Dep->CompareOp) == true)
+ if (VS().CheckDep(PkgState[Pkg->ID].InstVerIter(*this).VerStr(),
+ Dep->CompareOp,Dep.TargetVer()) == true)
return true;
if (Type == CandidateVersion && PkgState[Pkg->ID].CandidateVer != 0)
- if (pkgCheckDep(Dep.TargetVer(),
- PkgState[Pkg->ID].CandidateVerIter(*this).VerStr(),
- Dep->CompareOp) == true)
+ if (VS().CheckDep(PkgState[Pkg->ID].CandidateVerIter(*this).VerStr(),
+ Dep->CompareOp,Dep.TargetVer()) == true)
return true;
}
+ if (Dep->Type == Dep::Obsoletes)
+ return false;
+
// Check the providing packages
PrvIterator P = Dep.TargetPkg().ProvidesList();
PkgIterator Pkg = Dep.ParentPkg();
@@ -186,7 +165,7 @@ bool pkgDepCache::CheckDep(DepIterator Dep,int Type,PkgIterator &Res)
}
// Compare the versions.
- if (pkgCheckDep(Dep.TargetVer(),P.ProvideVersion(),Dep->CompareOp) == true)
+ if (VS().CheckDep(P.ProvideVersion(),Dep->CompareOp,Dep.TargetVer()) == true)
{
Res = P.OwnerPkg();
return true;
@@ -199,7 +178,7 @@ bool pkgDepCache::CheckDep(DepIterator Dep,int Type,PkgIterator &Res)
// DepCache::AddSizes - Add the packages sizes to the counters /*{{{*/
// ---------------------------------------------------------------------
/* Call with Mult = -1 to preform the inverse opration */
-void pkgDepCache::AddSizes(const PkgIterator &Pkg,long Mult)
+void pkgDepCache::AddSizes(const PkgIterator &Pkg,signed long Mult)
{
StateCache &P = PkgState[Pkg->ID];
@@ -210,8 +189,8 @@ void pkgDepCache::AddSizes(const PkgIterator &Pkg,long Mult)
// Compute the size data
if (P.NewInstall() == true)
{
- iUsrSize += Mult*P.InstVerIter(*this)->InstalledSize;
- iDownloadSize += Mult*P.InstVerIter(*this)->Size;
+ iUsrSize += (signed)(Mult*P.InstVerIter(*this)->InstalledSize);
+ iDownloadSize += (signed)(Mult*P.InstVerIter(*this)->Size);
return;
}
@@ -220,9 +199,9 @@ void pkgDepCache::AddSizes(const PkgIterator &Pkg,long Mult)
(P.InstallVer != (Version *)Pkg.CurrentVer() ||
(P.iFlags & ReInstall) == ReInstall) && P.InstallVer != 0)
{
- iUsrSize += Mult*((signed)P.InstVerIter(*this)->InstalledSize -
- (signed)Pkg.CurrentVer()->InstalledSize);
- iDownloadSize += Mult*P.InstVerIter(*this)->Size;
+ iUsrSize += (signed)(Mult*((signed)P.InstVerIter(*this)->InstalledSize -
+ (signed)Pkg.CurrentVer()->InstalledSize));
+ iDownloadSize += (signed)(Mult*P.InstVerIter(*this)->Size);
return;
}
@@ -230,14 +209,14 @@ void pkgDepCache::AddSizes(const PkgIterator &Pkg,long Mult)
if (Pkg.State() == pkgCache::PkgIterator::NeedsUnpack &&
P.Delete() == false)
{
- iDownloadSize += Mult*P.InstVerIter(*this)->Size;
+ iDownloadSize += (signed)(Mult*P.InstVerIter(*this)->Size);
return;
}
// Removing
if (Pkg->CurrentVer != 0 && P.InstallVer == 0)
{
- iUsrSize -= Mult*Pkg.CurrentVer()->InstalledSize;
+ iUsrSize -= (signed)(Mult*Pkg.CurrentVer()->InstalledSize);
return;
}
}
@@ -310,7 +289,7 @@ void pkgDepCache::BuildGroupOrs(VerIterator const &V)
/* Invert for Conflicts. We have to do this twice to get the
right sense for a conflicts group */
- if (D->Type == Dep::Conflicts)
+ if (D->Type == Dep::Conflicts || D->Type == Dep::Obsoletes)
State = ~State;
// Add to the group if we are within an or..
@@ -321,7 +300,7 @@ void pkgDepCache::BuildGroupOrs(VerIterator const &V)
Group = 0;
// Invert for Conflicts
- if (D->Type == Dep::Conflicts)
+ if (D->Type == Dep::Conflicts || D->Type == Dep::Obsoletes)
State = ~State;
}
}
@@ -445,7 +424,7 @@ void pkgDepCache::Update(OpProgress *Prog)
{
// Build the dependency state.
unsigned char &State = DepState[D->ID];
- State = DependencyState(D);;
+ State = DependencyState(D);
// Add to the group if we are within an or..
Group |= State;
@@ -454,7 +433,7 @@ void pkgDepCache::Update(OpProgress *Prog)
Group = 0;
// Invert for Conflicts
- if (D->Type == Dep::Conflicts)
+ if (D->Type == Dep::Conflicts || D->Type == Dep::Obsoletes)
State = ~State;
}
}
@@ -482,9 +461,9 @@ void pkgDepCache::Update(DepIterator D)
State = DependencyState(D);
// Invert for Conflicts
- if (D->Type == Dep::Conflicts)
+ if (D->Type == Dep::Conflicts || D->Type == Dep::Obsoletes)
State = ~State;
-
+
RemoveStates(D.ParentPkg());
BuildGroupOrs(D.ParentVer());
UpdateVerState(D.ParentPkg());
@@ -497,7 +476,7 @@ void pkgDepCache::Update(DepIterator D)
/* This is called whenever the state of a package changes. It updates
all cached dependencies related to this package. */
void pkgDepCache::Update(PkgIterator const &Pkg)
-{
+{
// Recompute the dep of the package
RemoveStates(Pkg);
UpdateVerState(Pkg);
@@ -610,8 +589,12 @@ void pkgDepCache::MarkDelete(PkgIterator const &Pkg, bool rPurge)
// DepCache::MarkInstall - Put the package in the install state /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst)
-{
+void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
+ unsigned long Depth)
+{
+ if (Depth > 100)
+ return;
+
// Simplifies other routines.
if (Pkg.end() == true)
return;
@@ -627,6 +610,10 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst)
MarkKeep(Pkg);
return;
}
+
+ // See if there is even any possible instalation candidate
+ if (P.CandidateVer == 0)
+ return;
// We dont even try to install virtual packages..
if (Pkg->VersionList == 0)
@@ -657,7 +644,8 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst)
// Grok or groups
DepIterator Start = Dep;
bool Result = true;
- for (bool LastOR = true; Dep.end() == false && LastOR == true; Dep++)
+ unsigned Ors = 0;
+ for (bool LastOR = true; Dep.end() == false && LastOR == true; Dep++,Ors++)
{
LastOR = (Dep->CompareOp & Dep::Or) == Dep::Or;
@@ -676,24 +664,62 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst)
continue;
if (Pkg->CurrentVer != 0 && Start.IsCritical() == false)
continue;
-
- // Now we have to take action...
- PkgIterator P = Start.SmartTargetPkg();
+
+ /* If we are in an or group locate the first or that can
+ succeed. We have already cached this.. */
+ for (; Ors > 1 && (DepState[Start->ID] & DepCVer) != DepCVer; Ors--)
+ Start++;
+
+ /* This bit is for processing the possibilty of an install/upgrade
+ fixing the problem */
+ SPtrArray<Version *> List = Start.AllTargets();
if ((DepState[Start->ID] & DepCVer) == DepCVer)
{
- MarkInstall(P,true);
+ // Right, find the best version to install..
+ Version **Cur = List;
+ PkgIterator P = Start.TargetPkg();
+ PkgIterator InstPkg(*Cache,0);
- // Set the autoflag, after MarkInstall because MarkInstall unsets it
- if (P->CurrentVer == 0)
- PkgState[P->ID].Flags |= Flag::Auto;
+ // See if there are direct matches (at the start of the list)
+ for (; *Cur != 0 && (*Cur)->ParentPkg == P.Index(); Cur++)
+ {
+ PkgIterator Pkg(*Cache,Cache->PkgP + (*Cur)->ParentPkg);
+ if (PkgState[Pkg->ID].CandidateVer != *Cur)
+ continue;
+ InstPkg = Pkg;
+ break;
+ }
+
+ // Select the highest priority providing package
+ if (InstPkg.end() == false)
+ {
+ pkgPrioSortList(*Cache,Cur);
+ for (; *Cur != 0; Cur++)
+ {
+ PkgIterator Pkg(*Cache,Cache->PkgP + (*Cur)->ParentPkg);
+ if (PkgState[Pkg->ID].CandidateVer != *Cur)
+ continue;
+ InstPkg = Pkg;
+ break;
+ }
+ }
+
+ if (InstPkg.end() == false)
+ {
+ MarkInstall(InstPkg,true,Depth + 1);
+ // Set the autoflag, after MarkInstall because MarkInstall unsets it
+ if (P->CurrentVer == 0)
+ PkgState[InstPkg->ID].Flags |= Flag::Auto;
+ }
+
continue;
}
- // For conflicts we just de-install the package and mark as auto
- if (Start->Type == Dep::Conflicts)
+ /* For conflicts we just de-install the package and mark as auto,
+ Conflicts may not have or groups */
+ if (Start->Type == Dep::Conflicts || Start->Type == Dep::Obsoletes)
{
- Version **List = Start.AllTargets();
for (Version **I = List; *I != 0; I++)
{
VerIterator Ver(*this,*I);
@@ -702,7 +728,6 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst)
MarkDelete(Pkg);
PkgState[Pkg->ID].Flags |= Flag::Auto;
}
- delete [] List;
continue;
}
}
@@ -726,6 +751,27 @@ void pkgDepCache::SetReInstall(PkgIterator const &Pkg,bool To)
AddSizes(Pkg);
}
/*}}}*/
+// DepCache::SetCandidateVersion - Change the candidate version /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgDepCache::SetCandidateVersion(VerIterator TargetVer)
+{
+ pkgCache::PkgIterator Pkg = TargetVer.ParentPkg();
+ StateCache &P = PkgState[Pkg->ID];
+
+ RemoveSizes(Pkg);
+ RemoveStates(Pkg);
+
+ if (P.CandidateVer == P.InstallVer)
+ P.InstallVer = (Version *)TargetVer;
+ P.CandidateVer = (Version *)TargetVer;
+ P.Update(Pkg,*this);
+
+ AddStates(Pkg);
+ Update(Pkg);
+ AddSizes(Pkg);
+}
+ /*}}}*/
// StateCache::Update - Compute the various static display things /*{{{*/
// ---------------------------------------------------------------------
/* This is called whenever the Candidate version changes. */
@@ -770,21 +816,48 @@ const char *pkgDepCache::StateCache::StripEpoch(const char *Ver)
return Ver;
}
/*}}}*/
-// StateCache::SetCandidateVersion - Change the candidate version /*{{{*/
+
+// Policy::GetCandidateVer - Returns the Candidate install version /*{{{*/
// ---------------------------------------------------------------------
-/* */
-void pkgDepCache::SetCandidateVersion(VerIterator TargetVer)
+/* The default just returns the highest available version that is not
+ a source and automatic. */
+pkgCache::VerIterator pkgDepCache::Policy::GetCandidateVer(PkgIterator Pkg)
{
- pkgCache::PkgIterator I = TargetVer.ParentPkg();
-
- RemoveSizes(I);
- RemoveStates(I);
+ /* Not source/not automatic versions cannot be a candidate version
+ unless they are already installed */
+ VerIterator Last(*(pkgCache *)this,0);
- PkgState[I->ID].CandidateVer = (Version *) TargetVer;
- PkgState[I->ID].Update(I, *this);
+ for (VerIterator I = Pkg.VersionList(); I.end() == false; I++)
+ {
+ if (Pkg.CurrentVer() == I)
+ return I;
+
+ for (VerFileIterator J = I.FileList(); J.end() == false; J++)
+ {
+ if ((J.File()->Flags & Flag::NotSource) != 0)
+ continue;
+
+ /* Stash the highest version of a not-automatic source, we use it
+ if there is nothing better */
+ if ((J.File()->Flags & Flag::NotAutomatic) != 0)
+ {
+ if (Last.end() == true)
+ Last = I;
+ continue;
+ }
+
+ return I;
+ }
+ }
- AddStates(I);
- Update(I);
- AddSizes(I);
+ return Last;
+}
+ /*}}}*/
+// Policy::IsImportantDep - True if the dependency is important /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgDepCache::Policy::IsImportantDep(DepIterator Dep)
+{
+ return Dep.IsCritical();
}
/*}}}*/
diff --git a/apt-pkg/depcache.h b/apt-pkg/depcache.h
index 3bb677dc1..6d51920e9 100644
--- a/apt-pkg/depcache.h
+++ b/apt-pkg/depcache.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: depcache.h,v 1.13 2000/05/31 02:49:37 jgg Exp $
+// $Id: depcache.h,v 1.14 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
DepCache - Dependency Extension data for the cache
@@ -35,7 +35,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_DEPCACHE_H
#define PKGLIB_DEPCACHE_H
@@ -46,7 +45,7 @@
#include <apt-pkg/pkgcache.h>
#include <apt-pkg/progress.h>
-class pkgDepCache : public pkgCache
+class pkgDepCache : protected pkgCache::Namespace
{
public:
@@ -75,16 +74,16 @@ class pkgDepCache : public pkgCache
// Pointer to the install version.
Version *InstallVer;
+
+ // Copy of Package::Flags
+ unsigned short Flags;
+ unsigned short iFlags; // Internal flags
// Various tree indicators
signed char Status; // -1,0,1,2
unsigned char Mode; // ModeList
unsigned char DepState; // DepState Flags
- // Copy of Package::Flags
- unsigned short Flags;
- unsigned short iFlags; // Internal flags
-
// Update of candidate version
const char *StripEpoch(const char *Ver);
void Update(PkgIterator Pkg,pkgCache &Cache);
@@ -110,29 +109,42 @@ class pkgDepCache : public pkgCache
void BuildGroupOrs(VerIterator const &V);
void UpdateVerState(PkgIterator Pkg);
- bool Init(OpProgress *Prog);
-
+ // User Policy control
+ class Policy
+ {
+ public:
+
+ virtual VerIterator GetCandidateVer(PkgIterator Pkg);
+ virtual bool IsImportantDep(DepIterator Dep);
+
+ virtual ~Policy() {};
+ };
+
protected:
// State information
+ pkgCache *Cache;
StateCache *PkgState;
unsigned char *DepState;
- signed long iUsrSize;
- unsigned long iDownloadSize;
+ double iUsrSize;
+ double iDownloadSize;
unsigned long iInstCount;
unsigned long iDelCount;
unsigned long iKeepCount;
unsigned long iBrokenCount;
unsigned long iBadCount;
-
+
+ Policy *delLocalPolicy; // For memory clean up..
+ Policy *LocalPolicy;
+
// Check for a matching provides
bool CheckDep(DepIterator Dep,int Type,PkgIterator &Res);
inline bool CheckDep(DepIterator Dep,int Type)
{
- PkgIterator Res(*this);
+ PkgIterator Res(*this,0);
return CheckDep(Dep,Type,Res);
- }
+ }
// Computes state information for deps and versions (w/o storing)
unsigned char DependencyState(DepIterator &D);
@@ -145,17 +157,27 @@ class pkgDepCache : public pkgCache
void Update(PkgIterator const &P);
// Count manipulators
- void AddSizes(const PkgIterator &Pkg,long Mult = 1);
+ void AddSizes(const PkgIterator &Pkg,signed long Mult = 1);
inline void RemoveSizes(const PkgIterator &Pkg) {AddSizes(Pkg,-1);};
void AddStates(const PkgIterator &Pkg,int Add = 1);
inline void RemoveStates(const PkgIterator &Pkg) {AddStates(Pkg,-1);};
-
+
public:
+ // Legacy.. We look like a pkgCache
+ inline operator pkgCache &() {return *Cache;};
+ inline Header &Head() {return *Cache->HeaderP;};
+ inline PkgIterator PkgBegin() {return Cache->PkgBegin();};
+ inline PkgIterator FindPkg(string const &Name) {return Cache->FindPkg(Name);};
+
+ inline pkgCache &GetCache() {return *Cache;};
+ inline pkgVersioningSystem &VS() {return *Cache->VS;};
+
// Policy implementation
- virtual VerIterator GetCandidateVer(PkgIterator Pkg,bool AllowCurrent = true);
- virtual bool IsImportantDep(DepIterator Dep);
-
+ inline VerIterator GetCandidateVer(PkgIterator Pkg) {return LocalPolicy->GetCandidateVer(Pkg);};
+ inline bool IsImportantDep(DepIterator Dep) {return LocalPolicy->IsImportantDep(Dep);};
+ inline Policy &GetPolicy() {return *LocalPolicy;};
+
// Accessors
inline StateCache &operator [](PkgIterator const &I) {return PkgState[I->ID];};
inline unsigned char &operator [](DepIterator const &I) {return DepState[I->ID];};
@@ -163,7 +185,8 @@ class pkgDepCache : public pkgCache
// Manipulators
void MarkKeep(PkgIterator const &Pkg,bool Soft = false);
void MarkDelete(PkgIterator const &Pkg,bool Purge = false);
- void MarkInstall(PkgIterator const &Pkg,bool AutoInst = true);
+ void MarkInstall(PkgIterator const &Pkg,bool AutoInst = true,
+ unsigned long Depth = 0);
void SetReInstall(PkgIterator const &Pkg,bool To);
void SetCandidateVersion(VerIterator TargetVer);
@@ -171,16 +194,17 @@ class pkgDepCache : public pkgCache
void Update(OpProgress *Prog = 0);
// Size queries
- inline signed long UsrSize() {return iUsrSize;};
- inline unsigned long DebSize() {return iDownloadSize;};
+ inline double UsrSize() {return iUsrSize;};
+ inline double DebSize() {return iDownloadSize;};
inline unsigned long DelCount() {return iDelCount;};
inline unsigned long KeepCount() {return iKeepCount;};
inline unsigned long InstCount() {return iInstCount;};
inline unsigned long BrokenCount() {return iBrokenCount;};
inline unsigned long BadCount() {return iBadCount;};
+
+ bool Init(OpProgress *Prog);
- pkgDepCache(MMap &Map,OpProgress &Prog);
- pkgDepCache(MMap &Map);
+ pkgDepCache(pkgCache *Cache,Policy *Plcy = 0);
virtual ~pkgDepCache();
};
diff --git a/apt-pkg/indexfile.cc b/apt-pkg/indexfile.cc
new file mode 100644
index 000000000..f59387c1a
--- /dev/null
+++ b/apt-pkg/indexfile.cc
@@ -0,0 +1,77 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: indexfile.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Index File - Abstraction for an index of archive/souce file.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/indexfile.h"
+#endif
+
+#include <apt-pkg/indexfile.h>
+#include <apt-pkg/error.h>
+ /*}}}*/
+
+// Global list of Item supported
+static pkgIndexFile::Type *ItmList[10];
+pkgIndexFile::Type **pkgIndexFile::Type::GlobalList = ItmList;
+unsigned long pkgIndexFile::Type::GlobalListLen = 0;
+
+// Type::Type - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgIndexFile::Type::Type()
+{
+ ItmList[GlobalListLen] = this;
+ GlobalListLen++;
+}
+ /*}}}*/
+// Type::GetType - Locate the type by name /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgIndexFile::Type *pkgIndexFile::Type::GetType(const char *Type)
+{
+ for (unsigned I = 0; I != GlobalListLen; I++)
+ if (strcmp(GlobalList[I]->Label,Type) == 0)
+ return GlobalList[I];
+ return 0;
+}
+ /*}}}*/
+
+// IndexFile::GetIndexes - Stub /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgIndexFile::GetIndexes(pkgAcquire *Owner) const
+{
+ return _error->Error("Internal Error, this index file is not downloadable");
+}
+ /*}}}*/
+// IndexFile::ArchiveInfo - Stub /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string pkgIndexFile::ArchiveInfo(pkgCache::VerIterator Ver) const
+{
+ return string();
+}
+ /*}}}*/
+// IndexFile::FindInCache - Stub /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgCache::PkgFileIterator pkgIndexFile::FindInCache(pkgCache &Cache) const
+{
+ return pkgCache::PkgFileIterator(Cache);
+}
+ /*}}}*/
+// IndexFile::SourceIndex - Stub /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string pkgIndexFile::SourceInfo(pkgSrcRecords::Parser const &Record,
+ pkgSrcRecords::File const &File) const
+{
+ return string();
+}
+ /*}}}*/
diff --git a/apt-pkg/indexfile.h b/apt-pkg/indexfile.h
new file mode 100644
index 000000000..d264a3aba
--- /dev/null
+++ b/apt-pkg/indexfile.h
@@ -0,0 +1,80 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: indexfile.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Index File - Abstraction for an index of archive/source file.
+
+ There are 3 primary sorts of index files, all represented by this
+ class:
+
+ Binary index files
+ Bianry index files decribing the local system
+ Source index files
+
+ They are all bundled together here, and the interfaces for
+ sources.list, acquire, cache gen and record parsing all use this class
+ to acess the underlying representation.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_INDEXFILE_H
+#define PKGLIB_INDEXFILE_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/indexfile.h"
+#endif
+
+#include <string>
+#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/srcrecords.h>
+#include <apt-pkg/pkgrecords.h>
+
+class pkgAcquire;
+class pkgCacheGenerator;
+class OpProgress;
+class pkgIndexFile
+{
+ public:
+
+ class Type
+ {
+ public:
+
+ // Global list of Items supported
+ static Type **GlobalList;
+ static unsigned long GlobalListLen;
+ static Type *GetType(const char *Type);
+
+ const char *Label;
+
+ virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator File) const {return 0;};
+ Type();
+ };
+
+ virtual const Type *GetType() const = 0;
+
+ // Return descriptive strings of various sorts
+ virtual string ArchiveInfo(pkgCache::VerIterator Ver) const;
+ virtual string SourceInfo(pkgSrcRecords::Parser const &Record,
+ pkgSrcRecords::File const &File) const;
+ virtual string Describe() const = 0;
+
+ // Interface for acquire
+ virtual string ArchiveURI(string File) const {return string();};
+ virtual bool GetIndexes(pkgAcquire *Owner) const;
+
+ // Interface for the record parsers
+ virtual pkgSrcRecords::Parser *CreateSrcParser() const {return 0;};
+
+ // Interface for the Cache Generator
+ virtual bool Exists() const = 0;
+ virtual bool HasPackages() const = 0;
+ virtual unsigned long Size() const = 0;
+ virtual bool Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const {return false;};
+ virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
+
+ virtual ~pkgIndexFile() {};
+};
+
+#endif
diff --git a/apt-pkg/init.cc b/apt-pkg/init.cc
index 29926b97a..01b9d8665 100644
--- a/apt-pkg/init.cc
+++ b/apt-pkg/init.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: init.cc,v 1.14 1998/11/25 23:54:06 jgg Exp $
+// $Id: init.cc,v 1.15 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Init - Initialize the package library
@@ -10,61 +10,120 @@
// Include files /*{{{*/
#include <apt-pkg/init.h>
#include <apt-pkg/fileutl.h>
+#include <apt-pkg/error.h>
+
+#include <apti18n.h>
#include <config.h>
+#include <sys/stat.h>
/*}}}*/
-// pkgInitialize - Initialize the configuration class /*{{{*/
+#define Stringfy_(x) # x
+#define Stringfy(x) Stringfy_(x)
+const char *pkgVersion = VERSION;
+const char *pkgLibVersion = Stringfy(APT_PKG_MAJOR) "."
+ Stringfy(APT_PKG_MINOR) "."
+ Stringfy(APT_PKG_RELEASE);
+const char *pkgCPU = COMMON_CPU;
+const char *pkgOS = COMMON_OS;
+
+// pkgInitConfig - Initialize the configuration class /*{{{*/
// ---------------------------------------------------------------------
/* Directories are specified in such a way that the FindDir function will
understand them. That is, if they don't start with a / then their parent
is prepended, this allows a fair degree of flexability. */
-bool pkgInitialize(Configuration &Cnf)
+bool pkgInitConfig(Configuration &Cnf)
{
// General APT things
- Cnf.Set("APT::Architecture",ARCHITECTURE);
-
- // State
- Cnf.Set("Dir::State","/var/state/apt/");
- Cnf.Set("Dir::State::lists","lists/");
+ if (strcmp(COMMON_OS,"linux") == 0 ||
+ strcmp(COMMON_OS,"unknown") == 0)
+ Cnf.Set("APT::Architecture",COMMON_CPU);
+ else
+ Cnf.Set("APT::Architecture",COMMON_OS "-" COMMON_CPU);
+ Cnf.Set("Dir","/");
- /* These really should be jammed into a generic 'Local Database' engine
- which is yet to be determined. The functions in pkgcachegen should
- be the only users of these */
- Cnf.Set("Dir::State::xstatus","xstatus");
- Cnf.Set("Dir::State::userstatus","status.user");
- Cnf.Set("Dir::State::status","/var/lib/dpkg/status");
+ // State
+ Cnf.Set("Dir::State","var/lib/apt/");
+
+ /* Just in case something goes horribly wrong, we can fall back to the
+ old /var/state paths.. */
+ struct stat St;
+ if (stat("/var/lib/apt/.",&St) != 0 &&
+ stat("/var/state/apt/.",&St) == 0)
+ Cnf.Set("Dir::State","var/state/apt/");
+
+ Cnf.Set("Dir::State::lists","lists/");
Cnf.Set("Dir::State::cdroms","cdroms.list");
// Cache
- Cnf.Set("Dir::Cache","/var/cache/apt/");
+ Cnf.Set("Dir::Cache","var/cache/apt/");
Cnf.Set("Dir::Cache::archives","archives/");
Cnf.Set("Dir::Cache::srcpkgcache","srcpkgcache.bin");
Cnf.Set("Dir::Cache::pkgcache","pkgcache.bin");
// Configuration
- Cnf.Set("Dir::Etc","/etc/apt/");
+ Cnf.Set("Dir::Etc","etc/apt/");
Cnf.Set("Dir::Etc::sourcelist","sources.list");
Cnf.Set("Dir::Etc::main","apt.conf");
+ Cnf.Set("Dir::Etc::parts","apt.conf.d");
+ Cnf.Set("Dir::Etc::preferences","preferences");
Cnf.Set("Dir::Bin::methods","/usr/lib/apt/methods");
- Cnf.Set("Dir::Bin::dpkg","/usr/bin/dpkg");
-
- // Read the main config file
- string FName = Cnf.FindFile("Dir::Etc::main");
+
bool Res = true;
- if (FileExists(FName) == true)
- Res &= ReadConfigFile(Cnf,FName);
// Read an alternate config file
const char *Cfg = getenv("APT_CONFIG");
if (Cfg != 0 && FileExists(Cfg) == true)
Res &= ReadConfigFile(Cnf,Cfg);
+ // Read the configuration parts dir
+ string Parts = Cnf.FindDir("Dir::Etc::parts");
+ if (FileExists(Parts) == true)
+ Res &= ReadConfigDir(Cnf,Parts);
+
+ // Read the main config file
+ string FName = Cnf.FindFile("Dir::Etc::main");
+ if (FileExists(FName) == true)
+ Res &= ReadConfigFile(Cnf,FName);
+
if (Res == false)
return false;
- if (Cnf.FindB("Debug::pkgInitialize",false) == true)
+ if (Cnf.FindB("Debug::pkgInitConfig",false) == true)
Cnf.Dump();
return true;
}
/*}}}*/
+// pkgInitSystem - Initialize the _system calss /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgInitSystem(Configuration &Cnf,pkgSystem *&Sys)
+{
+ Sys = 0;
+ string Label = Cnf.Find("Apt::System","");
+ if (Label.empty() == false)
+ {
+ Sys = pkgSystem::GetSystem(Label.c_str());
+ if (Sys == 0)
+ return _error->Error(_("Packaging system '%s' is not supported"),Label.c_str());
+ }
+ else
+ {
+ signed MaxScore = 0;
+ for (unsigned I = 0; I != pkgSystem::GlobalListLen; I++)
+ {
+ signed Score = pkgSystem::GlobalList[I]->Score(Cnf);
+ if (Score > MaxScore)
+ {
+ MaxScore = Score;
+ Sys = pkgSystem::GlobalList[I];
+ }
+ }
+
+ if (Sys == 0)
+ return _error->Error(_("Unable to determine a suitable system type"));
+ }
+
+ return Sys->Initialize(Cnf);
+}
+ /*}}}*/
diff --git a/apt-pkg/init.h b/apt-pkg/init.h
index 27bfd8868..ac256cba2 100644
--- a/apt-pkg/init.h
+++ b/apt-pkg/init.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: init.h,v 1.3 1998/07/16 06:08:37 jgg Exp $
+// $Id: init.h,v 1.4 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Init - Initialize the package library
@@ -10,12 +10,34 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_INIT_H
#define PKGLIB_INIT_H
#include <apt-pkg/configuration.h>
+#include <apt-pkg/pkgsystem.h>
-bool pkgInitialize(Configuration &Cnf);
+// See the makefile
+#define APT_PKG_MAJOR 3
+#define APT_PKG_MINOR 1
+#define APT_PKG_RELEASE 0
+
+extern const char *pkgVersion;
+extern const char *pkgLibVersion;
+extern const char *pkgOS;
+extern const char *pkgCPU;
+
+bool pkgInitConfig(Configuration &Cnf);
+bool pkgInitSystem(Configuration &Cnf,pkgSystem *&Sys);
+
+#ifdef APT_COMPATIBILITY
+#if APT_COMPATIBILITY != 986
+#warning "Using APT_COMPATIBILITY"
+#endif
+
+inline bool pkgInitialize(Configuration &Cnf)
+{
+ return pkgInitConfig(Cnf) && pkgInitSystem(Cnf,_system);
+};
+#endif
#endif
diff --git a/apt-pkg/makefile b/apt-pkg/makefile
index d5e20374d..cc3e47597 100644
--- a/apt-pkg/makefile
+++ b/apt-pkg/makefile
@@ -9,36 +9,41 @@ HEADER_TARGETDIRS = apt-pkg
# Bring in the default rules
include ../buildlib/defaults.mak
-# The library name
+# The library name, don't forget to update init.h
LIBRARY=apt-pkg
-MAJOR=2.7
-MINOR=2
+MAJOR=3.1
+MINOR=0
SLIBS=$(PTHREADLIB)
# Source code for the contributed non-core things
SOURCE = contrib/mmap.cc contrib/error.cc contrib/strutl.cc \
contrib/configuration.cc contrib/progress.cc contrib/cmndline.cc \
- contrib/md5.cc contrib/cdromutl.cc contrib/crc-16.cc
+ contrib/md5.cc contrib/cdromutl.cc contrib/crc-16.cc \
+ contrib/fileutl.cc
+HEADERS = mmap.h error.h configuration.h fileutl.h cmndline.h \
+ md5.h crc-16.h cdromutl.h strutl.h sptr.h
-# Source code for the main library
-SOURCE+= pkgcache.cc version.cc fileutl.cc pkgcachegen.cc depcache.cc \
+# Source code for the core main library
+SOURCE+= pkgcache.cc version.cc depcache.cc \
orderlist.cc tagfile.cc sourcelist.cc packagemanager.cc \
- pkgrecords.cc algorithms.cc acquire.cc acquire-item.cc \
+ pkgrecords.cc algorithms.cc acquire.cc\
acquire-worker.cc acquire-method.cc init.cc clean.cc \
- srcrecords.cc cachefile.cc
+ srcrecords.cc cachefile.cc versionmatch.cc policy.cc \
+ pkgsystem.cc indexfile.cc pkgcachegen.cc acquire-item.cc
+HEADERS+= algorithms.h depcache.h pkgcachegen.h cacheiterators.h \
+ orderlist.h sourcelist.h packagemanager.h tagfile.h \
+ init.h pkgcache.h version.h progress.h pkgrecords.h \
+ acquire.h acquire-worker.h acquire-item.h acquire-method.h \
+ clean.h srcrecords.h cachefile.h versionmatch.h policy.h \
+ pkgsystem.h indexfile.h
# Source code for the debian specific components
-SOURCE+= deb/deblistparser.cc deb/debrecords.cc deb/dpkgpm.cc deb/dpkginit.cc \
- deb/debsrcrecords.cc
-
-# Public apt-pkg header files
-HEADERS = algorithms.h depcache.h mmap.h pkgcachegen.h cacheiterators.h \
- error.h orderlist.h sourcelist.h configuration.h fileutl.h \
- packagemanager.h tagfile.h deblistparser.h init.h pkgcache.h \
- version.h progress.h pkgrecords.h debrecords.h cmndline.h \
- acquire.h acquire-worker.h acquire-item.h acquire-method.h md5.h \
- dpkgpm.h dpkginit.h cdromutl.h strutl.h clean.h srcrecords.h \
- debsrcrecords.h cachefile.h crc-16.h
+# In theory the deb headers do not need to be exported..
+SOURCE+= deb/deblistparser.cc deb/debrecords.cc deb/dpkgpm.cc \
+ deb/debsrcrecords.cc deb/debversion.cc deb/debsystem.cc \
+ deb/debindexfile.cc deb/debindexfile.cc
+HEADERS+= debversion.h debsrcrecords.h dpkgpm.h debrecords.h \
+ deblistparser.h debsystem.h debindexfile.h
HEADERS := $(addprefix apt-pkg/,$(HEADERS))
diff --git a/apt-pkg/orderlist.cc b/apt-pkg/orderlist.cc
index b46056128..4bd603726 100644
--- a/apt-pkg/orderlist.cc
+++ b/apt-pkg/orderlist.cc
@@ -1,13 +1,13 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: orderlist.cc,v 1.11 2000/01/16 08:45:47 jgg Exp $
+// $Id: orderlist.cc,v 1.12 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Order List - Represents and Manipulates an ordered list of packages.
A list of packages can be ordered by a number of conflicting criteria
each given a specific priority. Each package also has a set of flags
- indicating some usefull things about it that are derived in the
+ indicating some useful things about it that are derived in the
course of sorting. The pkgPackageManager class uses this class for
all of it's installation ordering needs.
@@ -54,6 +54,12 @@
after flag set. This forces them and all their dependents to be ordered
toward the end.
+ There are complications in this algorithm when presented with cycles.
+ For all known practical cases it works, all cases where it doesn't work
+ is fixable by tweaking the package descriptions. However, it should be
+ possible to impove this further to make some better choices when
+ presented with cycles.
+
##################################################################### */
/*}}}*/
// Include Files /*{{{*/
@@ -64,6 +70,8 @@
#include <apt-pkg/depcache.h>
#include <apt-pkg/error.h>
#include <apt-pkg/version.h>
+#include <apt-pkg/sptr.h>
+#include <apt-pkg/configuration.h>
/*}}}*/
pkgOrderList *pkgOrderList::Me = 0;
@@ -71,7 +79,7 @@ pkgOrderList *pkgOrderList::Me = 0;
// OrderList::pkgOrderList - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgOrderList::pkgOrderList(pkgDepCache &Cache) : Cache(Cache)
+pkgOrderList::pkgOrderList(pkgDepCache *pCache) : Cache(*pCache)
{
FileList = 0;
Primary = 0;
@@ -79,10 +87,11 @@ pkgOrderList::pkgOrderList(pkgDepCache &Cache) : Cache(Cache)
RevDepends = 0;
Remove = 0;
LoopCount = -1;
-
+ Debug = _config->FindB("Debug::pkgOrderList",false);
+
/* Construct the arrays, egcs 1.0.1 bug requires the package count
hack */
- unsigned long Size = Cache.HeaderP->PackageCount;
+ unsigned long Size = Cache.Head().PackageCount;
Flags = new unsigned short[Size];
End = List = new Package *[Size];
memset(Flags,0,sizeof(*Flags)*Size);
@@ -123,9 +132,9 @@ bool pkgOrderList::IsMissing(PkgIterator Pkg)
bool pkgOrderList::DoRun()
{
// Temp list
- unsigned long Size = Cache.HeaderP->PackageCount;
- Package **NList = new Package *[Size];
- AfterList = new Package *[Size];
+ unsigned long Size = Cache.Head().PackageCount;
+ SPtrArray<Package *> NList = new Package *[Size];
+ SPtrArray<Package *> AfterList = new Package *[Size];
AfterEnd = AfterList;
Depth = 0;
@@ -141,8 +150,6 @@ bool pkgOrderList::DoRun()
if (VisitNode(PkgIterator(Cache,*I)) == false)
{
End = OldEnd;
- delete [] NList;
- delete [] AfterList;
return false;
}
@@ -152,8 +159,7 @@ bool pkgOrderList::DoRun()
// Swap the main list to the new list
delete [] List;
- delete [] AfterList;
- List = NList;
+ List = NList.UnGuard();
return true;
}
/*}}}*/
@@ -216,32 +222,43 @@ bool pkgOrderList::OrderUnpack(string *FileList)
Me = this;
qsort(List,End - List,sizeof(*List),&OrderCompareA);
+ if (Debug == true)
+ clog << "** Pass A" << endl;
if (DoRun() == false)
return false;
+ if (Debug == true)
+ clog << "** Pass B" << endl;
Secondary = 0;
if (DoRun() == false)
return false;
+ if (Debug == true)
+ clog << "** Pass C" << endl;
LoopCount = 0;
RevDepends = 0;
Remove = 0; // Otherwise the libreadline remove problem occures
if (DoRun() == false)
return false;
-
+
+ if (Debug == true)
+ clog << "** Pass D" << endl;
LoopCount = 0;
Primary = &pkgOrderList::DepUnPackPre;
if (DoRun() == false)
return false;
-/* cout << "----------END" << endl;
-
- for (iterator I = List; I != End; I++)
+ if (Debug == true)
{
- PkgIterator P(Cache,*I);
- if (IsNow(P) == true)
- cout << P.Name() << ' ' << IsMissing(P) << ',' << IsFlag(P,After) << endl;
- }*/
+ clog << "** Unpack ordering done" << endl;
+
+ for (iterator I = List; I != End; I++)
+ {
+ PkgIterator P(Cache,*I);
+ if (IsNow(P) == true)
+ clog << P.Name() << ' ' << IsMissing(P) << ',' << IsFlag(P,After) << endl;
+ }
+ }
return true;
}
@@ -279,6 +296,9 @@ int pkgOrderList::Score(PkgIterator Pkg)
if ((Pkg->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
Score += 100;
+ if (IsFlag(Pkg,Immediate) == true)
+ Score += 10;
+
for (DepIterator D = Cache[Pkg].InstVerIter(Cache).DependsList();
D.end() == false; D++)
if (D->Type == pkgCache::Dep::PreDepends)
@@ -375,7 +395,7 @@ int pkgOrderList::OrderCompareA(const void *a, const void *b)
/*}}}*/
// OrderList::OrderCompareB - Order the installation by source /*{{{*/
// ---------------------------------------------------------------------
-/* This orders by installation source. This is usefull to handle
+/* This orders by installation source. This is useful to handle
inter-source breaks */
int pkgOrderList::OrderCompareB(const void *a, const void *b)
{
@@ -454,7 +474,7 @@ bool pkgOrderList::VisitRProvides(DepFunc F,VerIterator Ver)
/* This routine calls visit on all providing packages. */
bool pkgOrderList::VisitProvides(DepIterator D,bool Critical)
{
- Version **List = D.AllTargets();
+ SPtrArray<Version *> List = D.AllTargets();
for (Version **I = List; *I != 0; I++)
{
VerIterator Ver(Cache,*I);
@@ -463,10 +483,14 @@ bool pkgOrderList::VisitProvides(DepIterator D,bool Critical)
if (Cache[Pkg].Keep() == true && Pkg.State() == PkgIterator::NeedsNothing)
continue;
- if (D->Type != pkgCache::Dep::Conflicts && Cache[Pkg].InstallVer != *I)
+ if (D->Type != pkgCache::Dep::Conflicts &&
+ D->Type != pkgCache::Dep::Obsoletes &&
+ Cache[Pkg].InstallVer != *I)
continue;
- if (D->Type == pkgCache::Dep::Conflicts && (Version *)Pkg.CurrentVer() != *I)
+ if ((D->Type == pkgCache::Dep::Conflicts ||
+ D->Type == pkgCache::Dep::Obsoletes) &&
+ (Version *)Pkg.CurrentVer() != *I)
continue;
// Skip over missing files
@@ -474,12 +498,8 @@ bool pkgOrderList::VisitProvides(DepIterator D,bool Critical)
continue;
if (VisitNode(Pkg) == false)
- {
- delete [] List;
return false;
- }
}
- delete [] List;
return true;
}
/*}}}*/
@@ -496,8 +516,12 @@ bool pkgOrderList::VisitNode(PkgIterator Pkg)
IsFlag(Pkg,AddPending) == true || IsFlag(Pkg,InList) == false)
return true;
-/* for (int j = 0; j != Depth; j++) cout << ' ';
- cout << "Visit " << Pkg.Name() << endl;*/
+ if (Debug == true)
+ {
+ for (int j = 0; j != Depth; j++) clog << ' ';
+ clog << "Visit " << Pkg.Name() << endl;
+ }
+
Depth++;
// Color grey
@@ -550,10 +574,13 @@ bool pkgOrderList::VisitNode(PkgIterator Pkg)
Primary = Old;
Depth--;
-
-/* for (int j = 0; j != Depth; j++) cout << ' ';
- cout << "Leave " << Pkg.Name() << ' ' << IsFlag(Pkg,Added) << ',' << IsFlag(Pkg,AddPending) << endl;*/
+ if (Debug == true)
+ {
+ for (int j = 0; j != Depth; j++) clog << ' ';
+ clog << "Leave " << Pkg.Name() << ' ' << IsFlag(Pkg,Added) << ',' << IsFlag(Pkg,AddPending) << endl;
+ }
+
return true;
}
/*}}}*/
@@ -573,7 +600,8 @@ bool pkgOrderList::DepUnPackCrit(DepIterator D)
{
/* Reverse depenanices are only interested in conflicts,
predepend breakage is ignored here */
- if (D->Type != pkgCache::Dep::Conflicts)
+ if (D->Type != pkgCache::Dep::Conflicts &&
+ D->Type != pkgCache::Dep::Obsoletes)
continue;
// Duplication elimination, consider only the current version
@@ -594,7 +622,9 @@ bool pkgOrderList::DepUnPackCrit(DepIterator D)
{
/* Forward critical dependencies MUST be correct before the
package can be unpacked. */
- if (D->Type != pkgCache::Dep::Conflicts && D->Type != pkgCache::Dep::PreDepends)
+ if (D->Type != pkgCache::Dep::Conflicts &&
+ D->Type != pkgCache::Dep::Obsoletes &&
+ D->Type != pkgCache::Dep::PreDepends)
continue;
/* We wish to check if the dep is okay in the now state of the
@@ -702,7 +732,7 @@ bool pkgOrderList::DepUnPackPre(DepIterator D)
else
continue;
}
-
+
/* We wish to check if the dep is okay in the now state of the
target package against the install state of this package. */
if (CheckDep(D) == true)
@@ -712,7 +742,7 @@ bool pkgOrderList::DepUnPackPre(DepIterator D)
if (IsFlag(D.TargetPkg(),AddPending) == false)
continue;
}
-
+
// This is the loop detection
if (IsFlag(D.TargetPkg(),Added) == true ||
IsFlag(D.TargetPkg(),AddPending) == true)
@@ -875,7 +905,7 @@ bool pkgOrderList::AddLoop(DepIterator D)
/* */
void pkgOrderList::WipeFlags(unsigned long F)
{
- unsigned long Size = Cache.HeaderP->PackageCount;
+ unsigned long Size = Cache.Head().PackageCount;
for (unsigned long I = 0; I != Size; I++)
Flags[I] &= ~F;
}
@@ -889,7 +919,7 @@ void pkgOrderList::WipeFlags(unsigned long F)
this fails to produce a suitable result. */
bool pkgOrderList::CheckDep(DepIterator D)
{
- Version **List = D.AllTargets();
+ SPtrArray<Version *> List = D.AllTargets();
bool Hit = false;
for (Version **I = List; *I != 0; I++)
{
@@ -912,10 +942,11 @@ bool pkgOrderList::CheckDep(DepIterator D)
if ((Version *)Pkg.CurrentVer() != *I ||
Pkg.State() != PkgIterator::NeedsNothing)
continue;
-
+
/* Conflicts requires that all versions are not present, depends
just needs one */
- if (D->Type != pkgCache::Dep::Conflicts)
+ if (D->Type != pkgCache::Dep::Conflicts &&
+ D->Type != pkgCache::Dep::Obsoletes)
{
/* Try to find something that does not have the after flag set
if at all possible */
@@ -925,7 +956,6 @@ bool pkgOrderList::CheckDep(DepIterator D)
continue;
}
- delete [] List;
return true;
}
else
@@ -933,11 +963,9 @@ bool pkgOrderList::CheckDep(DepIterator D)
if (IsFlag(Pkg,After) == true)
Flag(D.ParentPkg(),After);
- delete [] List;
return false;
}
}
- delete [] List;
// We found a hit, but it had the after flag set
if (Hit == true && D->Type == pkgCache::Dep::PreDepends)
@@ -948,7 +976,8 @@ bool pkgOrderList::CheckDep(DepIterator D)
/* Conflicts requires that all versions are not present, depends
just needs one */
- if (D->Type == pkgCache::Dep::Conflicts)
+ if (D->Type == pkgCache::Dep::Conflicts ||
+ D->Type == pkgCache::Dep::Obsoletes)
return true;
return false;
}
diff --git a/apt-pkg/orderlist.h b/apt-pkg/orderlist.h
index 59949f106..d13301bcf 100644
--- a/apt-pkg/orderlist.h
+++ b/apt-pkg/orderlist.h
@@ -1,19 +1,18 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: orderlist.h,v 1.8 2000/01/16 08:45:47 jgg Exp $
+// $Id: orderlist.h,v 1.9 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Order List - Represents and Manipulates an ordered list of packages.
A list of packages can be ordered by a number of conflicting criteria
each given a specific priority. Each package also has a set of flags
- indicating some usefull things about it that are derived in the
+ indicating some useful things about it that are derived in the
course of sorting. The pkgPackageManager class uses this class for
all of it's installation ordering needs.
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_ORDERLIST_H
#define PKGLIB_ORDERLIST_H
@@ -24,19 +23,11 @@
#include <apt-pkg/pkgcache.h>
class pkgDepCache;
-class pkgOrderList
+class pkgOrderList : protected pkgCache::Namespace
{
protected:
- pkgDepCache &Cache;
-
- // Bring some usefull types into the local scope
- typedef pkgCache::PkgIterator PkgIterator;
- typedef pkgCache::VerIterator VerIterator;
- typedef pkgCache::DepIterator DepIterator;
- typedef pkgCache::PrvIterator PrvIterator;
- typedef pkgCache::Package Package;
- typedef pkgCache::Version Version;
+ pkgDepCache &Cache;
typedef bool (pkgOrderList::*DepFunc)(DepIterator D);
// These are the currently selected ordering functions
@@ -48,13 +39,13 @@ class pkgOrderList
// State
Package **End;
Package **List;
- Package **AfterList;
Package **AfterEnd;
string *FileList;
DepIterator Loops[20];
int LoopCount;
int Depth;
unsigned short *Flags;
+ bool Debug;
// Main visit function
bool VisitNode(PkgIterator Pkg);
@@ -122,7 +113,7 @@ class pkgOrderList
int Score(PkgIterator Pkg);
- pkgOrderList(pkgDepCache &Cache);
+ pkgOrderList(pkgDepCache *Cache);
~pkgOrderList();
};
diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc
index 99710469b..6101b618f 100644
--- a/apt-pkg/packagemanager.cc
+++ b/apt-pkg/packagemanager.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: packagemanager.cc,v 1.25 2000/05/12 04:26:42 jgg Exp $
+// $Id: packagemanager.cc,v 1.26 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Package Manager - Abstacts the package manager
@@ -16,6 +16,7 @@
#ifdef __GNUG__
#pragma implementation "apt-pkg/packagemanager.h"
#endif
+
#include <apt-pkg/packagemanager.h>
#include <apt-pkg/orderlist.h>
#include <apt-pkg/depcache.h>
@@ -24,12 +25,15 @@
#include <apt-pkg/acquire-item.h>
#include <apt-pkg/algorithms.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/sptr.h>
+
+#include <apti18n.h>
/*}}}*/
// PM::PackageManager - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgPackageManager::pkgPackageManager(pkgDepCache &Cache) : Cache(Cache)
+pkgPackageManager::pkgPackageManager(pkgDepCache *pCache) : Cache(*pCache)
{
FileNames = new string[Cache.Head().PackageCount];
List = 0;
@@ -88,7 +92,7 @@ bool pkgPackageManager::GetArchives(pkgAcquire *Owner,pkgSourceList *Sources,
be downloaded. */
bool pkgPackageManager::FixMissing()
{
- pkgProblemResolver Resolve(Cache);
+ pkgProblemResolver Resolve(&Cache);
List->SetFileList(FileNames);
bool Bad = false;
@@ -124,7 +128,7 @@ bool pkgPackageManager::CreateOrderList()
return true;
delete List;
- List = new pkgOrderList(Cache);
+ List = new pkgOrderList(&Cache);
bool NoImmConfigure = _config->FindB("APT::Immediate-Configure",false);
@@ -193,7 +197,8 @@ bool pkgPackageManager::CheckRConflicts(PkgIterator Pkg,DepIterator D,
{
for (;D.end() == false; D++)
{
- if (D->Type != pkgCache::Dep::Conflicts)
+ if (D->Type != pkgCache::Dep::Conflicts &&
+ D->Type != pkgCache::Dep::Obsoletes)
continue;
// The package hasnt been changed
@@ -204,9 +209,9 @@ bool pkgPackageManager::CheckRConflicts(PkgIterator Pkg,DepIterator D,
if (D.ParentPkg() == Pkg || D.ParentVer() != D.ParentPkg().CurrentVer())
continue;
- if (pkgCheckDep(D.TargetVer(),Ver,D->CompareOp) == false)
+ if (Cache.VS().CheckDep(Ver,D->CompareOp,D.TargetVer()) == false)
continue;
-
+
if (EarlyRemove(D.ParentPkg()) == false)
return _error->Error("Reverse conflicts early remove for package '%s' failed",
Pkg.Name());
@@ -220,7 +225,7 @@ bool pkgPackageManager::CheckRConflicts(PkgIterator Pkg,DepIterator D,
that the final configuration is valid. */
bool pkgPackageManager::ConfigureAll()
{
- pkgOrderList OList(Cache);
+ pkgOrderList OList(&Cache);
// Populate the order list
for (pkgOrderList::iterator I = List->begin(); I != List->end(); I++)
@@ -251,14 +256,14 @@ bool pkgPackageManager::ConfigureAll()
of it's dependents. */
bool pkgPackageManager::SmartConfigure(PkgIterator Pkg)
{
- pkgOrderList OList(Cache);
+ pkgOrderList OList(&Cache);
if (DepAdd(OList,Pkg) == false)
return false;
if (OList.OrderConfigure() == false)
return false;
-
+
// Perform the configuring
for (pkgOrderList::iterator I = OList.begin(); I != OList.end(); I++)
{
@@ -288,8 +293,7 @@ bool pkgPackageManager::DepAdd(pkgOrderList &OList,PkgIterator Pkg,int Depth)
return true;
if (List->IsFlag(Pkg,pkgOrderList::UnPacked) == false)
return false;
-
-
+
// Put the package on the list
OList.push_back(Pkg);
OList.Flag(Pkg,pkgOrderList::Added);
@@ -314,7 +318,7 @@ bool pkgPackageManager::DepAdd(pkgOrderList &OList,PkgIterator Pkg,int Depth)
if (Bad == false)
continue;
- Version **VList = D.AllTargets();
+ SPtrArray<Version *> VList = D.AllTargets();
for (Version **I = VList; *I != 0 && Bad == true; I++)
{
VerIterator Ver(Cache,*I);
@@ -332,12 +336,12 @@ bool pkgPackageManager::DepAdd(pkgOrderList &OList,PkgIterator Pkg,int Depth)
if (Cache[Pkg].InstallVer != *I ||
(Cache[Pkg].Keep() == true && Pkg.State() == PkgIterator::NeedsNothing))
continue;
+
if (List->IsFlag(Pkg,pkgOrderList::UnPacked) == true)
Bad = !DepAdd(OList,Pkg,Depth);
if (List->IsFlag(Pkg,pkgOrderList::Configured) == true)
Bad = false;
}
- delete [] VList;
}
if (Bad == true)
@@ -388,11 +392,11 @@ bool pkgPackageManager::EarlyRemove(PkgIterator Pkg)
if (IsEssential == true)
{
if (_config->FindB("APT::Force-LoopBreak",false) == false)
- return _error->Error("This installation run will require temporarily "
- "removing the essential package %s due to a "
- "Conflicts/Pre-Depends loop. This is often bad, "
- "but if you really want to do it, activate the "
- "APT::Force-LoopBreak option.",Pkg.Name());
+ return _error->Error(_("This installation run will require temporarily "
+ "removing the essential package %s due to a "
+ "Conflicts/Pre-Depends loop. This is often bad, "
+ "but if you really want to do it, activate the "
+ "APT::Force-LoopBreak option."),Pkg.Name());
}
bool Res = SmartRemove(Pkg);
@@ -426,7 +430,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg)
List->Flag(Pkg,pkgOrderList::UnPacked,pkgOrderList::States);
if (List->IsFlag(Pkg,pkgOrderList::Immediate) == true)
if (SmartConfigure(Pkg) == false)
- return _error->Error("Internal Error, Could not perform immediate configuration");
+ return _error->Error("Internal Error, Could not perform immediate configuration (1) on %s",Pkg.Name());
return true;
}
@@ -443,7 +447,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg)
while (End->Type == pkgCache::Dep::PreDepends)
{
// Look for possible ok targets.
- Version **VList = Start.AllTargets();
+ SPtrArray<Version *> VList = Start.AllTargets();
bool Bad = true;
for (Version **I = VList; *I != 0 && Bad == true; I++)
{
@@ -473,8 +477,6 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg)
Bad = !SmartConfigure(Pkg);
}
- delete [] VList;
-
/* If this or element did not match then continue on to the
next or element until a matching element is found*/
if (Bad == true)
@@ -487,11 +489,12 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg)
break;
}
- if (End->Type == pkgCache::Dep::Conflicts)
+ if (End->Type == pkgCache::Dep::Conflicts ||
+ End->Type == pkgCache::Dep::Obsoletes)
{
/* Look for conflicts. Two packages that are both in the install
state cannot conflict so we don't check.. */
- Version **VList = End.AllTargets();
+ SPtrArray<Version *> VList = End.AllTargets();
for (Version **I = VList; *I != 0; I++)
{
VerIterator Ver(Cache,*I);
@@ -504,7 +507,6 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg)
return _error->Error("Internal Error, Could not early remove %s",Pkg.Name());
}
}
- delete [] VList;
}
}
@@ -525,7 +527,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg)
// Perform immedate configuration of the package.
if (List->IsFlag(Pkg,pkgOrderList::Immediate) == true)
if (SmartConfigure(Pkg) == false)
- return _error->Error("Internal Error, Could not perform immediate configuration");
+ return _error->Error("Internal Error, Could not perform immediate configuration (2) on %s",Pkg.Name());
return true;
}
diff --git a/apt-pkg/packagemanager.h b/apt-pkg/packagemanager.h
index d8a09f65a..e46f8808b 100644
--- a/apt-pkg/packagemanager.h
+++ b/apt-pkg/packagemanager.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: packagemanager.h,v 1.10 1999/07/20 05:53:33 jgg Exp $
+// $Id: packagemanager.h,v 1.11 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Package Manager - Abstacts the package manager
@@ -20,7 +20,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_PACKAGEMANAGER_H
#define PKGLIB_PACKAGEMANAGER_H
@@ -36,7 +35,7 @@ class pkgDepCache;
class pkgSourceList;
class pkgOrderList;
class pkgRecords;
-class pkgPackageManager
+class pkgPackageManager : protected pkgCache::Namespace
{
public:
@@ -47,15 +46,7 @@ class pkgPackageManager
pkgDepCache &Cache;
pkgOrderList *List;
bool Debug;
-
- // Bring some usefull types into the local scope
- typedef pkgCache::PkgIterator PkgIterator;
- typedef pkgCache::VerIterator VerIterator;
- typedef pkgCache::DepIterator DepIterator;
- typedef pkgCache::PrvIterator PrvIterator;
- typedef pkgCache::Version Version;
- typedef pkgCache::Package Package;
-
+
bool DepAdd(pkgOrderList &Order,PkgIterator P,int Depth = 0);
OrderResult OrderInstall();
bool CheckRConflicts(PkgIterator Pkg,DepIterator Dep,const char *Ver);
@@ -71,10 +62,10 @@ class pkgPackageManager
bool SmartRemove(PkgIterator Pkg);
bool EarlyRemove(PkgIterator Pkg);
- // The Actuall installation implementation
- virtual bool Install(PkgIterator /*Pkg*/,string /*File*/) {return false;};
- virtual bool Configure(PkgIterator /*Pkg*/) {return false;};
- virtual bool Remove(PkgIterator /*Pkg*/,bool /*Purge*/=false) {return false;};
+ // The Actual installation implementation
+ virtual bool Install(PkgIterator Pkg,string File) {return false;};
+ virtual bool Configure(PkgIterator Pkg) {return false;};
+ virtual bool Remove(PkgIterator Pkg,bool Purge=false) {return false;};
virtual bool Go() {return true;};
virtual void Reset() {};
@@ -86,7 +77,7 @@ class pkgPackageManager
OrderResult DoInstall();
bool FixMissing();
- pkgPackageManager(pkgDepCache &Cache);
+ pkgPackageManager(pkgDepCache *Cache);
virtual ~pkgPackageManager();
};
diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc
index 37a9c3aab..aa3e8565e 100644
--- a/apt-pkg/pkgcache.cc
+++ b/apt-pkg/pkgcache.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: pkgcache.cc,v 1.31 1999/12/10 23:40:29 jgg Exp $
+// $Id: pkgcache.cc,v 1.32 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Package Cache - Accessor code for the cache
@@ -24,11 +24,15 @@
#pragma implementation "apt-pkg/pkgcache.h"
#pragma implementation "apt-pkg/cacheiterators.h"
#endif
+
#include <apt-pkg/pkgcache.h>
#include <apt-pkg/version.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/configuration.h>
+#include <apti18n.h>
+
#include <string>
#include <sys/stat.h>
#include <unistd.h>
@@ -48,7 +52,7 @@ pkgCache::Header::Header()
whenever the generator changes the minor version should be bumped. */
MajorVersion = 3;
MinorVersion = 5;
- Dirty = true;
+ Dirty = false;
HeaderSz = sizeof(pkgCache::Header);
PackageSz = sizeof(pkgCache::Package);
@@ -68,6 +72,8 @@ pkgCache::Header::Header()
FileList = 0;
StringList = 0;
+ VerSysName = 0;
+ Architecture = 0;
memset(HashTable,0,sizeof(HashTable));
memset(Pools,0,sizeof(Pools));
}
@@ -92,9 +98,10 @@ bool pkgCache::Header::CheckSizes(Header &Against) const
// Cache::pkgCache - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgCache::pkgCache(MMap &Map) : Map(Map)
+pkgCache::pkgCache(MMap *Map, bool DoMap) : Map(*Map)
{
- ReMap();
+ if (DoMap == true)
+ ReMap();
}
/*}}}*/
// Cache::ReMap - Reopen the cache file /*{{{*/
@@ -113,20 +120,29 @@ bool pkgCache::ReMap()
StringItemP = (StringItem *)Map.Data();
StrP = (char *)Map.Data();
- if (Map.Size() == 0)
- return false;
+ if (Map.Size() == 0 || HeaderP == 0)
+ return _error->Error(_("Empty package cache"));
// Check the header
Header DefHeader;
if (HeaderP->Signature != DefHeader.Signature ||
HeaderP->Dirty == true)
- return _error->Error("The package cache file is corrupted");
+ return _error->Error(_("The package cache file is corrupted"));
if (HeaderP->MajorVersion != DefHeader.MajorVersion ||
HeaderP->MinorVersion != DefHeader.MinorVersion ||
HeaderP->CheckSizes(DefHeader) == false)
- return _error->Error("The package cache file is an incompatible version");
-
+ return _error->Error(_("The package cache file is an incompatible version"));
+
+ // Locate our VS..
+ if (HeaderP->VerSysName == 0 ||
+ (VS = pkgVersioningSystem::GetVS(StrP + HeaderP->VerSysName)) == 0)
+ return _error->Error(_("This APT does not support the Versioning System '%s'"),StrP + HeaderP->VerSysName);
+
+ // Chcek the arhcitecture
+ if (HeaderP->Architecture == 0 ||
+ _config->Find("APT::Architecture") != StrP + HeaderP->Architecture)
+ return _error->Error(_("The package cache was build for a different architecture"));
return true;
}
/*}}}*/
@@ -168,54 +184,55 @@ pkgCache::PkgIterator pkgCache::FindPkg(string Name)
return PkgIterator(*this,0);
}
/*}}}*/
+// Cache::CompTypeDeb - Return a string describing the compare type /*{{{*/
+// ---------------------------------------------------------------------
+/* This returns a string representation of the dependency compare
+ type in the weird debian style.. */
+const char *pkgCache::CompTypeDeb(unsigned char Comp)
+{
+ const char *Ops[] = {"","<=",">=","<<",">>","=","!="};
+ if ((unsigned)(Comp & 0xF) < 7)
+ return Ops[Comp & 0xF];
+ return "";
+}
+ /*}}}*/
+// Cache::CompType - Return a string describing the compare type /*{{{*/
+// ---------------------------------------------------------------------
+/* This returns a string representation of the dependency compare
+ type */
+const char *pkgCache::CompType(unsigned char Comp)
+{
+ const char *Ops[] = {"","<=",">=","<",">","=","!="};
+ if ((unsigned)(Comp & 0xF) < 7)
+ return Ops[Comp & 0xF];
+ return "";
+}
+ /*}}}*/
+// Cache::DepType - Return a string describing the dep type /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+const char *pkgCache::DepType(unsigned char Type)
+{
+ const char *Types[] = {"",_("Depends"),_("PreDepends"),_("Suggests"),
+ _("Recommends"),_("Conflicts"),_("Replaces"),
+ _("Obsoletes")};
+ if (Type < 8)
+ return Types[Type];
+ return "";
+}
+ /*}}}*/
// Cache::Priority - Convert a priority value to a string /*{{{*/
// ---------------------------------------------------------------------
/* */
const char *pkgCache::Priority(unsigned char Prio)
{
- const char *Mapping[] = {0,"important","required","standard","optional","extra"};
+ const char *Mapping[] = {0,_("important"),_("required"),_("standard"),
+ _("optional"),_("extra")};
if (Prio < _count(Mapping))
return Mapping[Prio];
return 0;
}
/*}}}*/
-// Cache::GetCandidateVer - Returns the Candidate install version /*{{{*/
-// ---------------------------------------------------------------------
-/* The default just returns the highest available version that is not
- a source and automatic */
-pkgCache::VerIterator pkgCache::GetCandidateVer(PkgIterator Pkg,
- bool AllowCurrent)
-{
- /* Not source/not automatic versions cannot be a candidate version
- unless they are already installed */
- VerIterator Last(*this,0);
-
- for (VerIterator I = Pkg.VersionList(); I.end() == false; I++)
- {
- if (Pkg.CurrentVer() == I && AllowCurrent == true)
- return I;
-
- for (VerFileIterator J = I.FileList(); J.end() == false; J++)
- {
- if ((J.File()->Flags & Flag::NotSource) != 0)
- continue;
-
- /* Stash the highest version of a not-automatic source, we use it
- if there is nothing better */
- if ((J.File()->Flags & Flag::NotAutomatic) != 0)
- {
- if (Last.end() == true)
- Last = I;
- continue;
- }
-
- return I;
- }
- }
-
- return Last;
-}
- /*}}}*/
// Bases for iterator classes /*{{{*/
void pkgCache::VerIterator::_dummy() {}
@@ -230,9 +247,9 @@ void pkgCache::PkgIterator::operator ++(int)
// Follow the current links
if (Pkg != Owner->PkgP)
Pkg = Owner->PkgP + Pkg->NextPackage;
-
+
// Follow the hash table
- while (Pkg == Owner->PkgP && HashIndex < (signed)_count(Owner->HeaderP->HashTable))
+ while (Pkg == Owner->PkgP && (HashIndex+1) < (signed)_count(Owner->HeaderP->HashTable))
{
HashIndex++;
Pkg = Owner->PkgP + Owner->HeaderP->HashTable[HashIndex];
@@ -265,7 +282,8 @@ pkgCache::PkgIterator::OkState pkgCache::PkgIterator::State() const
conflicts. */
bool pkgCache::DepIterator::IsCritical()
{
- if (Dep->Type == pkgCache::Dep::Conflicts ||
+ if (Dep->Type == pkgCache::Dep::Conflicts ||
+ Dep->Type == pkgCache::Dep::Obsoletes ||
Dep->Type == pkgCache::Dep::Depends ||
Dep->Type == pkgCache::Dep::PreDepends)
return true;
@@ -280,7 +298,11 @@ bool pkgCache::DepIterator::IsCritical()
then it returned. Otherwise the providing list is looked at to
see if there is one one unique providing package if so it is returned.
Otherwise true is returned and the target package is set. The return
- result indicates whether the node should be expandable */
+ result indicates whether the node should be expandable
+
+ In Conjunction with the DepCache the value of Result may not be
+ super-good since the policy may have made it uninstallable. Using
+ AllTargets is better in this case. */
bool pkgCache::DepIterator::SmartTargetPkg(PkgIterator &Result)
{
Result = TargetPkg();
@@ -314,17 +336,19 @@ bool pkgCache::DepIterator::SmartTargetPkg(PkgIterator &Result)
if (PStart.OwnerPkg() != P.OwnerPkg())
break;
}
+
+ Result = PStart.OwnerPkg();
// Check for non dups
if (P.end() != true)
return true;
- Result = PStart.OwnerPkg();
+
return false;
}
/*}}}*/
// DepIterator::AllTargets - Returns the set of all possible targets /*{{{*/
// ---------------------------------------------------------------------
-/* This is a more usefull version of TargetPkg() that follows versioned
+/* This is a more useful version of TargetPkg() that follows versioned
provides. It includes every possible package-version that could satisfy
the dependency. The last item in the list has a 0. The resulting pointer
must be delete [] 'd */
@@ -340,10 +364,11 @@ pkgCache::Version **pkgCache::DepIterator::AllTargets()
// Walk along the actual package providing versions
for (VerIterator I = DPkg.VersionList(); I.end() == false; I++)
{
- if (pkgCheckDep(TargetVer(),I.VerStr(),Dep->CompareOp) == false)
+ if (Owner->VS->CheckDep(I.VerStr(),Dep->CompareOp,TargetVer()) == false)
continue;
- if (Dep->Type == pkgCache::Dep::Conflicts &&
+ if ((Dep->Type == pkgCache::Dep::Conflicts ||
+ Dep->Type == pkgCache::Dep::Obsoletes) &&
ParentPkg() == I.ParentPkg())
continue;
@@ -355,10 +380,11 @@ pkgCache::Version **pkgCache::DepIterator::AllTargets()
// Follow all provides
for (PrvIterator I = DPkg.ProvidesList(); I.end() == false; I++)
{
- if (pkgCheckDep(TargetVer(),I.ProvideVersion(),Dep->CompareOp) == false)
+ if (Owner->VS->CheckDep(I.ProvideVersion(),Dep->CompareOp,TargetVer()) == false)
continue;
- if (Dep->Type == pkgCache::Dep::Conflicts &&
+ if ((Dep->Type == pkgCache::Dep::Conflicts ||
+ Dep->Type == pkgCache::Dep::Obsoletes) &&
ParentPkg() == I.OwnerPkg())
continue;
@@ -383,30 +409,6 @@ pkgCache::Version **pkgCache::DepIterator::AllTargets()
return Res;
}
/*}}}*/
-// DepIterator::CompType - Return a string describing the compare type /*{{{*/
-// ---------------------------------------------------------------------
-/* This returns a string representation of the dependency compare
- type */
-const char *pkgCache::DepIterator::CompType()
-{
- const char *Ops[] = {"","<=",">=","<",">","=","!="};
- if ((unsigned)(Dep->CompareOp & 0xF) < 7)
- return Ops[Dep->CompareOp & 0xF];
- return "";
-}
- /*}}}*/
-// DepIterator::DepType - Return a string describing the dep type /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-const char *pkgCache::DepIterator::DepType()
-{
- const char *Types[] = {"","Depends","PreDepends","Suggests",
- "Recommends","Conflicts","Replaces"};
- if (Dep->Type < 7)
- return Types[Dep->Type];
- return "";
-}
- /*}}}*/
// DepIterator::GlobOr - Compute an OR group /*{{{*/
// ---------------------------------------------------------------------
/* This Takes an iterator, iterates past the current dependency grouping
@@ -462,18 +464,6 @@ bool pkgCache::VerIterator::Downloadable() const
return false;
}
/*}}}*/
-// VerIterator::PriorityType - Return a string describing the priority /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-const char *pkgCache::VerIterator::PriorityType()
-{
- const char *Types[] = {"","Important","Required","Standard",
- "Optional","Extra"};
- if (Ver->Priority < 6)
- return Types[Ver->Priority];
- return "";
-}
- /*}}}*/
// VerIterator::Automatic - Check if this version is 'automatic' /*{{{*/
// ---------------------------------------------------------------------
/* This checks to see if any of the versions files are not NotAutomatic.
@@ -497,13 +487,80 @@ pkgCache::VerFileIterator pkgCache::VerIterator::NewestFile() const
VerFileIterator Highest = Files;
for (; Files.end() == false; Files++)
{
- if (pkgVersionCompare(Files.File().Version(),Highest.File().Version()) > 0)
+ if (Owner->VS->CmpReleaseVer(Files.File().Version(),Highest.File().Version()) > 0)
Highest = Files;
}
return Highest;
}
/*}}}*/
+// VerIterator::RelStr - Release description string /*{{{*/
+// ---------------------------------------------------------------------
+/* This describes the version from a release-centric manner. The output is a
+ list of Label:Version/Archive */
+string pkgCache::VerIterator::RelStr()
+{
+ bool First = true;
+ string Res;
+ for (pkgCache::VerFileIterator I = this->FileList(); I.end() == false; I++)
+ {
+ // Do not print 'not source' entries'
+ pkgCache::PkgFileIterator File = I.File();
+ if ((File->Flags & pkgCache::Flag::NotSource) == pkgCache::Flag::NotSource)
+ continue;
+
+ // See if we have already printed this out..
+ bool Seen = false;
+ for (pkgCache::VerFileIterator J = this->FileList(); I != J; J++)
+ {
+ pkgCache::PkgFileIterator File2 = J.File();
+ if (File2->Label == 0 || File->Label == 0)
+ continue;
+
+ if (strcmp(File.Label(),File2.Label()) != 0)
+ continue;
+
+ if (File2->Version == File->Version)
+ {
+ Seen = true;
+ break;
+ }
+ if (File2->Version == 0)
+ break;
+ if (strcmp(File.Version(),File2.Version()) == 0)
+ Seen = true;
+ }
+
+ if (Seen == true)
+ continue;
+
+ if (First == false)
+ Res += ", ";
+ else
+ First = false;
+
+ if (File->Label != 0)
+ Res = Res + File.Label() + ':';
+
+ if (File->Archive != 0)
+ {
+ if (File->Version == 0)
+ Res += File.Archive();
+ else
+ Res = Res + File.Version() + '/' + File.Archive();
+ }
+ else
+ {
+ // No release file, print the host name that this came from
+ if (File->Site == 0 || File.Site()[0] == 0)
+ Res += "localhost";
+ else
+ Res += File.Site();
+ }
+ }
+ return Res;
+}
+ /*}}}*/
// PkgFileIterator::IsOk - Checks if the cache is in sync with the file /*{{{*/
// ---------------------------------------------------------------------
/* This stats the file and compares its stats with the ones that were
diff --git a/apt-pkg/pkgcache.h b/apt-pkg/pkgcache.h
index 4d2401570..f2b1c39fa 100644
--- a/apt-pkg/pkgcache.h
+++ b/apt-pkg/pkgcache.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: pkgcache.h,v 1.22 1999/07/30 02:54:25 jgg Exp $
+// $Id: pkgcache.h,v 1.23 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Cache - Structure definitions for the cache file
@@ -16,7 +16,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_PKGCACHE_H
#define PKGLIB_PKGCACHE_H
@@ -27,7 +26,8 @@
#include <string>
#include <time.h>
#include <apt-pkg/mmap.h>
-
+
+class pkgVersioningSystem;
class pkgCache
{
public:
@@ -48,18 +48,20 @@ class pkgCache
class PrvIterator;
class PkgFileIterator;
class VerFileIterator;
- friend PkgIterator;
- friend VerIterator;
- friend DepIterator;
- friend PrvIterator;
- friend PkgFileIterator;
- friend VerFileIterator;
+ friend class PkgIterator;
+ friend class VerIterator;
+ friend class DepIterator;
+ friend class PrvIterator;
+ friend class PkgFileIterator;
+ friend class VerFileIterator;
+
+ class Namespace;
// These are all the constants used in the cache structures
struct Dep
{
enum DepType {Depends=1,PreDepends=2,Suggests=3,Recommends=4,
- Conflicts=5,Replaces=6};
+ Conflicts=5,Replaces=6,Obsoletes=7};
enum DepCompareOp {Or=0x10,NoOp=0,LessEq=0x1,GreaterEq=0x2,Less=0x3,
Greater=0x4,Equals=0x5,NotEquals=0x6};
};
@@ -104,7 +106,8 @@ class pkgCache
virtual bool ReMap();
inline bool Sync() {return Map.Sync();};
inline MMap &GetMap() {return Map;};
-
+ inline void *DataEnd() {return ((unsigned char *)Map.Data()) + Map.Size();};
+
// String hashing function (512 range)
inline unsigned long Hash(string S) const {return sHash(S);};
inline unsigned long Hash(const char *S) const {return sHash(S);};
@@ -119,9 +122,16 @@ class pkgCache
inline PkgIterator PkgEnd();
inline PkgFileIterator FileBegin();
inline PkgFileIterator FileEnd();
- VerIterator GetCandidateVer(PkgIterator Pkg,bool AllowCurrent = true);
+
+ // Make me a function
+ pkgVersioningSystem *VS;
+
+ // Converters
+ static const char *CompTypeDeb(unsigned char Comp);
+ static const char *CompType(unsigned char Comp);
+ static const char *DepType(unsigned char Dep);
- pkgCache(MMap &Map);
+ pkgCache(MMap *Map,bool DoMap = true);
virtual ~pkgCache() {};
};
@@ -154,6 +164,8 @@ struct pkgCache::Header
// Offsets
map_ptrloc FileList; // struct PackageFile
map_ptrloc StringList; // struct StringItem
+ map_ptrloc VerSysName; // StringTable
+ map_ptrloc Architecture; // StringTable
unsigned long MaxVerFileSize;
/* Allocation pools, there should be one of these for each structure
@@ -172,9 +184,7 @@ struct pkgCache::Package
// Pointers
map_ptrloc Name; // Stringtable
map_ptrloc VersionList; // Version
- map_ptrloc TargetVer; // Version
map_ptrloc CurrentVer; // Version
- map_ptrloc TargetDist; // StringTable (StringItem)
map_ptrloc Section; // StringTable (StringItem)
// Linked list
@@ -201,6 +211,8 @@ struct pkgCache::PackageFile
map_ptrloc Origin; // Stringtable
map_ptrloc Label; // Stringtable
map_ptrloc Architecture; // Stringtable
+ map_ptrloc Site; // Stringtable
+ map_ptrloc IndexType; // Stringtable
unsigned long Size;
unsigned long Flags;
@@ -249,7 +261,7 @@ struct pkgCache::Dependency
// Specific types of depends
unsigned char Type;
unsigned char CompareOp;
- unsigned short ID;
+ unsigned short ID;
};
struct pkgCache::Provides
@@ -274,8 +286,26 @@ inline pkgCache::PkgIterator pkgCache::PkgBegin()
inline pkgCache::PkgIterator pkgCache::PkgEnd()
{return PkgIterator(*this,PkgP);};
inline pkgCache::PkgFileIterator pkgCache::FileBegin()
- {return PkgFileIterator(*this);};
+ {return PkgFileIterator(*this,PkgFileP + HeaderP->FileList);};
inline pkgCache::PkgFileIterator pkgCache::FileEnd()
{return PkgFileIterator(*this,PkgFileP);};
+// Oh I wish for Real Name Space Support
+class pkgCache::Namespace
+{
+ public:
+
+ typedef pkgCache::PkgIterator PkgIterator;
+ typedef pkgCache::VerIterator VerIterator;
+ typedef pkgCache::DepIterator DepIterator;
+ typedef pkgCache::PrvIterator PrvIterator;
+ typedef pkgCache::PkgFileIterator PkgFileIterator;
+ typedef pkgCache::VerFileIterator VerFileIterator;
+ typedef pkgCache::Version Version;
+ typedef pkgCache::Package Package;
+ typedef pkgCache::Header Header;
+ typedef pkgCache::Dep Dep;
+ typedef pkgCache::Flag Flag;
+};
+
#endif
diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc
index c3cddd615..2fcccaf4a 100644
--- a/apt-pkg/pkgcachegen.cc
+++ b/apt-pkg/pkgcachegen.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: pkgcachegen.cc,v 1.45 2000/01/14 06:26:36 jgg Exp $
+// $Id: pkgcachegen.cc,v 1.46 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Package Cache Generator - Generator for the cache structure.
@@ -14,15 +14,20 @@
#pragma implementation "apt-pkg/pkgcachegen.h"
#endif
+#define APT_COMPATIBILITY 986
+
#include <apt-pkg/pkgcachegen.h>
#include <apt-pkg/error.h>
#include <apt-pkg/version.h>
#include <apt-pkg/progress.h>
#include <apt-pkg/sourcelist.h>
#include <apt-pkg/configuration.h>
-#include <apt-pkg/deblistparser.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/sptr.h>
+#include <apt-pkg/pkgsystem.h>
+#include <apti18n.h>
+
#include <sys/stat.h>
#include <unistd.h>
#include <errno.h>
@@ -33,23 +38,42 @@
// CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* We set the diry flag and make sure that is written to the disk */
-pkgCacheGenerator::pkgCacheGenerator(DynamicMMap &Map,OpProgress &Prog) :
- Map(Map), Cache(Map), Progress(&Prog)
+pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
+ Map(*pMap), Cache(pMap,false), Progress(Prog)
{
CurrentFile = 0;
+ memset(UniqHash,0,sizeof(UniqHash));
if (_error->PendingError() == true)
return;
-
+
if (Map.Size() == 0)
{
+ // Setup the map interface..
+ Cache.HeaderP = (pkgCache::Header *)Map.Data();
Map.RawAllocate(sizeof(pkgCache::Header));
+ Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
+
+ // Starting header
*Cache.HeaderP = pkgCache::Header();
+ Cache.HeaderP->VerSysName = Map.WriteString(_system->VS->Label);
+ Cache.HeaderP->Architecture = Map.WriteString(_config->Find("APT::Architecture"));
+ Cache.ReMap();
}
+ else
+ {
+ // Map directly from the existing file
+ Cache.ReMap();
+ Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
+ if (Cache.VS != _system->VS)
+ {
+ _error->Error(_("Cache has an incompatible versioning system"));
+ return;
+ }
+ }
+
Cache.HeaderP->Dirty = true;
Map.Sync(0,sizeof(pkgCache::Header));
- Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
- memset(UniqHash,0,sizeof(UniqHash));
}
/*}}}*/
// CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
@@ -86,7 +110,7 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
pkgCache::PkgIterator Pkg;
if (NewPackage(Pkg,PackageName) == false)
- return _error->Error("Error occured while processing %s (NewPackage)",PackageName.c_str());
+ return _error->Error(_("Error occured while processing %s (NewPackage)"),PackageName.c_str());
Counter++;
if (Counter % 100 == 0 && Progress != 0)
Progress->Progress(List.Offset());
@@ -98,7 +122,7 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
if (Version.empty() == true)
{
if (List.UsePackage(Pkg,pkgCache::VerIterator(Cache)) == false)
- return _error->Error("Error occured while processing %s (UsePackage1)",PackageName.c_str());
+ return _error->Error(_("Error occured while processing %s (UsePackage1)"),PackageName.c_str());
continue;
}
@@ -107,7 +131,7 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
int Res = 1;
for (; Ver.end() == false; Last = &Ver->NextVer, Ver++)
{
- Res = pkgVersionCompare(Version.begin(),Version.end(),Ver.VerStr(),
+ Res = Cache.VS->DoCmpVersion(Version.begin(),Version.end(),Ver.VerStr(),
Ver.VerStr() + strlen(Ver.VerStr()));
if (Res >= 0)
break;
@@ -119,10 +143,10 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
if (Res == 0 && Ver->Hash == Hash)
{
if (List.UsePackage(Pkg,Ver) == false)
- return _error->Error("Error occured while processing %s (UsePackage2)",PackageName.c_str());
+ return _error->Error(_("Error occured while processing %s (UsePackage2)"),PackageName.c_str());
if (NewFileVer(Ver,List) == false)
- return _error->Error("Error occured while processing %s (NewFileVer1)",PackageName.c_str());
+ return _error->Error(_("Error occured while processing %s (NewFileVer1)"),PackageName.c_str());
// Read only a single record and return
if (OutVer != 0)
@@ -139,7 +163,7 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
{
for (; Ver.end() == false; Last = &Ver->NextVer, Ver++)
{
- Res = pkgVersionCompare(Version.begin(),Version.end(),Ver.VerStr(),
+ Res = Cache.VS->DoCmpVersion(Version.begin(),Version.end(),Ver.VerStr(),
Ver.VerStr() + strlen(Ver.VerStr()));
if (Res != 0)
break;
@@ -151,13 +175,13 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
Ver->ParentPkg = Pkg.Index();
Ver->Hash = Hash;
if (List.NewVersion(Ver) == false)
- return _error->Error("Error occured while processing %s (NewVersion1)",PackageName.c_str());
+ return _error->Error(_("Error occured while processing %s (NewVersion1)"),PackageName.c_str());
if (List.UsePackage(Pkg,Ver) == false)
- return _error->Error("Error occured while processing %s (UsePackage3)",PackageName.c_str());
+ return _error->Error(_("Error occured while processing %s (UsePackage3)"),PackageName.c_str());
if (NewFileVer(Ver,List) == false)
- return _error->Error("Error occured while processing %s (NewVersion2)",PackageName.c_str());
+ return _error->Error(_("Error occured while processing %s (NewVersion2)"),PackageName.c_str());
// Read only a single record and return
if (OutVer != 0)
@@ -288,7 +312,7 @@ bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator Ver,
// Probe the reverse dependency list for a version string that matches
if (Version.empty() == false)
{
-/* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++, Hit++)
+/* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
if (I->Version != 0 && I.TargetVer() == Version)
Dep->Version = I->Version;*/
if (Dep->Version == 0)
@@ -342,7 +366,7 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator Ver,
Prv->Version = Ver.Index();
Prv->NextPkgProv = Ver->ProvidesList;
Ver->ProvidesList = Prv.Index();
- if (Version.empty() == false && (Prv->Version = WriteString(Version)) == 0)
+ if (Version.empty() == false && (Prv->ProvideVersion = WriteString(Version)) == 0)
return false;
// Locate the target package
@@ -361,13 +385,11 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator Ver,
// CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
// ---------------------------------------------------------------------
/* This is used to select which file is to be associated with all newly
- added versions. */
-bool pkgCacheGenerator::SelectFile(string File,unsigned long Flags)
+ added versions. The caller is responsible for setting the IMS fields. */
+bool pkgCacheGenerator::SelectFile(string File,string Site,
+ const pkgIndexFile &Index,
+ unsigned long Flags)
{
- struct stat Buf;
- if (stat(File.c_str(),&Buf) == -1)
- return _error->Errno("stat","Couldn't stat ",File.c_str());
-
// Get some space for the structure
CurrentFile = Cache.PkgFileP + Map.Allocate(sizeof(*CurrentFile));
if (CurrentFile == Cache.PkgFileP)
@@ -375,20 +397,20 @@ bool pkgCacheGenerator::SelectFile(string File,unsigned long Flags)
// Fill it in
CurrentFile->FileName = Map.WriteString(File);
- CurrentFile->Size = Buf.st_size;
- CurrentFile->mtime = Buf.st_mtime;
+ CurrentFile->Site = WriteUniqString(Site);
CurrentFile->NextFile = Cache.HeaderP->FileList;
CurrentFile->Flags = Flags;
CurrentFile->ID = Cache.HeaderP->PackageFileCount;
+ CurrentFile->IndexType = WriteUniqString(Index.GetType()->Label);
PkgFileName = File;
Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
Cache.HeaderP->PackageFileCount++;
-
+
if (CurrentFile->FileName == 0)
return false;
if (Progress != 0)
- Progress->SubProgress(Buf.st_size);
+ Progress->SubProgress(Index.Size());
return true;
}
/*}}}*/
@@ -443,491 +465,279 @@ unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
}
/*}}}*/
-// SrcCacheCheck - Check if the source package cache is uptodate /*{{{*/
+// CheckValidity - Check that a cache is up-to-date /*{{{*/
// ---------------------------------------------------------------------
-/* The source cache is checked against the source list and the files
- on disk, any difference results in a false. */
-bool pkgSrcCacheCheck(pkgSourceList &List)
+/* This just verifies that each file in the list of index files exists,
+ has matching attributes with the cache and the cache does not have
+ any extra files. */
+static bool CheckValidity(string CacheFile,pkgIndexFile **Start,
+ pkgIndexFile **End,MMap **OutMap = 0)
{
- if (_error->PendingError() == true)
- return false;
-
- string CacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
- string ListDir = _config->FindDir("Dir::State::lists");
-
- // Count the number of missing files
- int Missing = 0;
- for (pkgSourceList::const_iterator I = List.begin(); I != List.end(); I++)
- {
- // Only cache deb source types.
- if (I->Type != pkgSourceList::Item::Deb)
- {
- Missing++;
- continue;
- }
-
- string File = ListDir + URItoFileName(I->PackagesURI());
- struct stat Buf;
- if (stat(File.c_str(),&Buf) != 0)
- {
- // Old format file name.. rename it
- if (File[0] == '_' && stat(File.c_str()+1,&Buf) == 0)
- {
- if (rename(File.c_str()+1,File.c_str()) != 0)
- return _error->Errno("rename","Failed to rename %s to %s",
- File.c_str()+1,File.c_str());
- continue;
- }
-
- _error->WarningE("stat","Couldn't stat source package list '%s' (%s)",
- I->PackagesInfo().c_str(),File.c_str());
- Missing++;
- }
- }
-
- // Open the source package cache
- if (FileExists(CacheFile) == false)
+ // No file, certainly invalid
+ if (CacheFile.empty() == true || FileExists(CacheFile) == false)
return false;
+ // Map it
FileFd CacheF(CacheFile,FileFd::ReadOnly);
- if (_error->PendingError() == true)
- {
- _error->Discard();
- return false;
- }
-
- MMap Map(CacheF,MMap::Public | MMap::ReadOnly);
- if (_error->PendingError() == true || Map.Size() == 0)
- {
- _error->Discard();
- return false;
- }
-
+ SPtr<MMap> Map = new MMap(CacheF,MMap::Public | MMap::ReadOnly);
pkgCache Cache(Map);
- if (_error->PendingError() == true)
+ if (_error->PendingError() == true || Map->Size() == 0)
{
_error->Discard();
return false;
}
-
- // They are certianly out of sync
- if (Cache.Head().PackageFileCount != List.size() - Missing)
- return false;
- for (pkgCache::PkgFileIterator F(Cache); F.end() == false; F++)
- {
- // Search for a match in the source list
- bool Bad = true;
- for (pkgSourceList::const_iterator I = List.begin();
- I != List.end(); I++)
+ /* Now we check every index file, see if it is in the cache,
+ verify the IMS data and check that it is on the disk too.. */
+ SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
+ memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
+ for (; Start != End; Start++)
+ {
+ if ((*Start)->HasPackages() == false)
+ continue;
+
+ if ((*Start)->Exists() == false)
{
- // Only cache deb source types.
- if (I->Type != pkgSourceList::Item::Deb)
- continue;
-
- string File = ListDir + URItoFileName(I->PackagesURI());
- if (F.FileName() == File)
- {
- Bad = false;
- break;
- }
+ _error->WarningE("stat",_("Couldn't stat source package list %s"),
+ (*Start)->Describe().c_str());
+ continue;
}
-
- // Check if the file matches what was cached
- Bad |= !F.IsOk();
- if (Bad == true)
+
+ // FindInCache is also expected to do an IMS check.
+ pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
+ if (File.end() == true)
return false;
+
+ Visited[File->ID] = true;
}
- return true;
-}
- /*}}}*/
-// PkgCacheCheck - Check if the package cache is uptodate /*{{{*/
-// ---------------------------------------------------------------------
-/* This does a simple check of all files used to compose the cache */
-bool pkgPkgCacheCheck(string CacheFile)
-{
- if (_error->PendingError() == true)
- return false;
-
- // Open the source package cache
- if (FileExists(CacheFile) == false)
- return false;
-
- FileFd CacheF(CacheFile,FileFd::ReadOnly);
- if (_error->PendingError() == true)
- {
- _error->Discard();
- return false;
- }
-
- MMap Map(CacheF,MMap::Public | MMap::ReadOnly);
- if (_error->PendingError() == true || Map.Size() == 0)
- {
- _error->Discard();
- return false;
- }
+ for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
+ if (Visited[I] == false)
+ return false;
- pkgCache Cache(Map);
if (_error->PendingError() == true)
{
_error->Discard();
return false;
}
-
- // Status files that must be in the cache
- string Status[3];
- Status[0] = _config->FindFile("Dir::State::xstatus");
- Status[1]= _config->FindFile("Dir::State::userstatus");
- Status[2] = _config->FindFile("Dir::State::status");
-
- // Cheack each file
- for (pkgCache::PkgFileIterator F(Cache); F.end() == false; F++)
- {
- if (F.IsOk() == false)
- return false;
-
- // See if this is one of the status files
- for (int I = 0; I != 3; I++)
- if (F.FileName() == Status[I])
- Status[I] = string();
- }
-
- // Make sure all the status files are loaded.
- for (int I = 0; I != 3; I++)
- {
- if (Status[I].empty() == false && FileExists(Status[I]) == true)
- return false;
- }
-
- return true;
-}
- /*}}}*/
-// AddStatusSize - Add the size of the status files /*{{{*/
-// ---------------------------------------------------------------------
-/* This adds the size of all the status files to the size counter */
-bool pkgAddStatusSize(unsigned long &TotalSize)
-{
- // Grab the file names
- string xstatus = _config->FindFile("Dir::State::xstatus");
- string userstatus = _config->FindFile("Dir::State::userstatus");
- string status = _config->FindFile("Dir::State::status");
-
- // Grab the sizes
- struct stat Buf;
- if (stat(xstatus.c_str(),&Buf) == 0)
- TotalSize += Buf.st_size;
- if (stat(userstatus.c_str(),&Buf) == 0)
- TotalSize += Buf.st_size;
- if (stat(status.c_str(),&Buf) != 0)
- return _error->Errno("stat","Couldn't stat the status file %s",status.c_str());
- TotalSize += Buf.st_size;
+ if (OutMap != 0)
+ *OutMap = Map.UnGuard();
return true;
}
/*}}}*/
-// MergeStatus - Add the status files to the cache /*{{{*/
+// ComputeSize - Compute the total size of a bunch of files /*{{{*/
// ---------------------------------------------------------------------
-/* This adds the status files to the map */
-bool pkgMergeStatus(OpProgress &Progress,pkgCacheGenerator &Gen,
- unsigned long &CurrentSize,unsigned long TotalSize)
+/* Size is kind of an abstract notion that is only used for the progress
+ meter */
+static unsigned long ComputeSize(pkgIndexFile **Start,pkgIndexFile **End)
{
- // Grab the file names
- string Status[3];
- Status[0] = _config->FindFile("Dir::State::xstatus");
- Status[1]= _config->FindFile("Dir::State::userstatus");
- Status[2] = _config->FindFile("Dir::State::status");
-
- for (int I = 0; I != 3; I++)
+ unsigned long TotalSize = 0;
+ for (; Start != End; Start++)
{
- // Check if the file exists and it is not the primary status file.
- string File = Status[I];
- if (I != 2 && FileExists(File) == false)
- continue;
-
- FileFd Pkg(File,FileFd::ReadOnly);
- debListParser Parser(Pkg);
- Progress.OverallProgress(CurrentSize,TotalSize,Pkg.Size(),"Reading Package Lists");
- if (_error->PendingError() == true)
- return _error->Error("Problem opening %s",File.c_str());
- CurrentSize += Pkg.Size();
-
- Progress.SubProgress(0,"Local Package State - " + flNotDir(File));
- if (Gen.SelectFile(File,pkgCache::Flag::NotSource) == false)
- return _error->Error("Problem with SelectFile %s",File.c_str());
-
- if (Gen.MergeList(Parser) == false)
- return _error->Error("Problem with MergeList %s",File.c_str());
- Progress.Progress(Pkg.Size());
+ if ((*Start)->HasPackages() == false)
+ continue;
+ TotalSize += (*Start)->Size();
}
-
- return true;
+ return TotalSize;
}
/*}}}*/
-// GenerateSrcCache - Write the source package lists to the map /*{{{*/
+// BuildCache - Merge the list of index files into the cache /*{{{*/
// ---------------------------------------------------------------------
-/* This puts the source package cache into the given generator. */
-bool pkgGenerateSrcCache(pkgSourceList &List,OpProgress &Progress,
- pkgCacheGenerator &Gen,
- unsigned long &CurrentSize,unsigned long &TotalSize)
+/* */
+static bool BuildCache(pkgCacheGenerator &Gen,
+ OpProgress &Progress,
+ unsigned long &CurrentSize,unsigned long TotalSize,
+ pkgIndexFile **Start,pkgIndexFile **End)
{
- string ListDir = _config->FindDir("Dir::State::lists");
-
- // Prepare the progress indicator
- TotalSize = 0;
- struct stat Buf;
- for (pkgSourceList::const_iterator I = List.begin(); I != List.end(); I++)
- {
- string File = ListDir + URItoFileName(I->PackagesURI());
- if (stat(File.c_str(),&Buf) != 0)
- continue;
- TotalSize += Buf.st_size;
- }
-
- if (pkgAddStatusSize(TotalSize) == false)
- return false;
-
- // Generate the pkg source cache
- CurrentSize = 0;
- for (pkgSourceList::const_iterator I = List.begin(); I != List.end(); I++)
+ for (; Start != End; Start++)
{
- // Only cache deb source types.
- if (I->Type != pkgSourceList::Item::Deb)
+ if ((*Start)->HasPackages() == false)
continue;
- string File = ListDir + URItoFileName(I->PackagesURI());
-
- if (FileExists(File) == false)
+ if ((*Start)->Exists() == false)
continue;
+
+ unsigned long Size = (*Start)->Size();
+ Progress.OverallProgress(CurrentSize,TotalSize,Size,_("Reading Package Lists"));
+ CurrentSize += Size;
- FileFd Pkg(File,FileFd::ReadOnly);
- debListParser Parser(Pkg);
- Progress.OverallProgress(CurrentSize,TotalSize,Pkg.Size(),"Reading Package Lists");
- if (_error->PendingError() == true)
- return _error->Error("Problem opening %s",File.c_str());
- CurrentSize += Pkg.Size();
-
- Progress.SubProgress(0,I->PackagesInfo());
- if (Gen.SelectFile(File) == false)
- return _error->Error("Problem with SelectFile %s",File.c_str());
-
- if (Gen.MergeList(Parser) == false)
- return _error->Error("Problem with MergeList %s",File.c_str());
-
- // Check the release file
- string RFile = ListDir + URItoFileName(I->ReleaseURI());
- if (FileExists(RFile) == true)
- {
- FileFd Rel(RFile,FileFd::ReadOnly);
- if (_error->PendingError() == true)
- return false;
- Parser.LoadReleaseInfo(Gen.GetCurFile(),Rel);
- }
- }
+ if ((*Start)->Merge(Gen,Progress) == false)
+ return false;
+ }
return true;
}
/*}}}*/
-// MakeStatusCache - Generates a cache that includes the status files /*{{{*/
+// MakeStatusCache - Construct the status cache /*{{{*/
// ---------------------------------------------------------------------
-/* This copies the package source cache and then merges the status and
- xstatus files into it. */
-bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress)
+/* This makes sure that the status cache (the cache that has all
+ index files from the sources list and all local ones) is ready
+ to be mmaped. If OutMap is not zero then a MMap object representing
+ the cache will be stored there. This is pretty much mandetory if you
+ are using AllowMem. AllowMem lets the function be run as non-root
+ where it builds the cache 'fast' into a memory buffer. */
+bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
+ MMap **OutMap,bool AllowMem)
{
unsigned long MapSize = _config->FindI("APT::Cache-Limit",4*1024*1024);
- Progress.OverallProgress(0,1,1,"Reading Package Lists");
+ vector<pkgIndexFile *> Files(List.begin(),List.end());
+ unsigned long EndOfSource = Files.size();
+ if (_system->AddStatusFiles(Files) == false)
+ return false;
+ // Decide if we can write to the files..
string CacheFile = _config->FindFile("Dir::Cache::pkgcache");
- bool SrcOk = pkgSrcCacheCheck(List);
- bool PkgOk = SrcOk && pkgPkgCacheCheck(CacheFile);
-
- // Rebuild the source and package caches
- if (SrcOk == false)
- {
- string SCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
- FileFd SCacheF(SCacheFile,FileFd::WriteEmpty);
-
- /* Open the pkgcache, we want a new inode here so we do no corrupt
- existing mmaps */
- unlink(CacheFile.c_str());
- FileFd CacheF(CacheFile,FileFd::WriteEmpty);
- DynamicMMap Map(CacheF,MMap::Public,MapSize);
+ string SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
+
+ // Decide if we can write to the cache
+ bool Writeable = false;
+ if (CacheFile.empty() == false)
+ Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
+ else
+ if (SrcCacheFile.empty() == false)
+ Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
+
+ if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
+ return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
+
+ Progress.OverallProgress(0,1,1,_("Reading Package Lists"));
+
+ // Cache is OK, Fin.
+ if (CheckValidity(CacheFile,Files.begin(),Files.end(),OutMap) == true)
+ {
+ Progress.OverallProgress(1,1,1,_("Reading Package Lists"));
+ return true;
+ }
+
+ /* At this point we know we need to reconstruct the package cache,
+ begin. */
+ SPtr<FileFd> CacheF;
+ SPtr<DynamicMMap> Map;
+ if (Writeable == true && CacheFile.empty() == false)
+ {
+ unlink(CacheFile.c_str());
+ CacheF = new FileFd(CacheFile,FileFd::WriteEmpty);
+ Map = new DynamicMMap(*CacheF,MMap::Public,MapSize);
if (_error->PendingError() == true)
return false;
-
- pkgCacheGenerator Gen(Map,Progress);
- unsigned long CurrentSize = 0;
- unsigned long TotalSize = 0;
- if (pkgGenerateSrcCache(List,Progress,Gen,CurrentSize,TotalSize) == false)
- return false;
-
- // Write the src cache
- Gen.GetCache().HeaderP->Dirty = false;
- if (SCacheF.Write(Map.Data(),Map.Size()) == false)
- return _error->Error("IO Error saving source cache");
- Gen.GetCache().HeaderP->Dirty = true;
-
- // Merge in the source caches
- return pkgMergeStatus(Progress,Gen,CurrentSize,TotalSize);
}
-
- if (PkgOk == true)
+ else
{
- Progress.OverallProgress(1,1,1,"Reading Package Lists");
- return true;
+ // Just build it in memory..
+ Map = new DynamicMMap(MMap::Public,MapSize);
}
- // We use the source cache to generate the package cache
- string SCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
- FileFd SCacheF(SCacheFile,FileFd::ReadOnly);
-
- /* Open the pkgcache, we want a new inode here so we do no corrupt
- existing mmaps */
- unlink(CacheFile.c_str());
- FileFd CacheF(CacheFile,FileFd::WriteEmpty);
- DynamicMMap Map(CacheF,MMap::Public,MapSize);
- if (_error->PendingError() == true)
- return false;
-
- // Preload the map with the source cache
- if (SCacheF.Read((unsigned char *)Map.Data() + Map.RawAllocate(SCacheF.Size()),
- SCacheF.Size()) == false)
- return false;
-
- pkgCacheGenerator Gen(Map,Progress);
-
- // Compute the progress
- unsigned long TotalSize = 0;
- if (pkgAddStatusSize(TotalSize) == false)
- return false;
-
+ // Lets try the source cache.
unsigned long CurrentSize = 0;
- return pkgMergeStatus(Progress,Gen,CurrentSize,TotalSize);
-}
- /*}}}*/
-// MakeStatusCacheMem - Returns a map for the status cache /*{{{*/
-// ---------------------------------------------------------------------
-/* This creates a map object for the status cache. If the process has write
- access to the caches then it is the same as MakeStatusCache, otherwise it
- creates a memory block and puts the cache in there. */
-MMap *pkgMakeStatusCacheMem(pkgSourceList &List,OpProgress &Progress)
-{
- unsigned long MapSize = _config->FindI("APT::Cache-Limit",4*1024*1024);
-
- /* If the cache file is writeable this is just a wrapper for
- MakeStatusCache */
- string CacheFile = _config->FindFile("Dir::Cache::pkgcache");
- bool Writeable = (access(CacheFile.c_str(),W_OK) == 0) ||
- (errno == ENOENT);
-
- if (Writeable == true)
+ unsigned long TotalSize = 0;
+ if (CheckValidity(SrcCacheFile,Files.begin(),
+ Files.begin()+EndOfSource) == true)
{
- if (pkgMakeStatusCache(List,Progress) == false)
- return 0;
-
- // Open the cache file
- FileFd File(_config->FindFile("Dir::Cache::pkgcache"),FileFd::ReadOnly);
- if (_error->PendingError() == true)
- return 0;
+ // Preload the map with the source cache
+ FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
+ if (SCacheF.Read((unsigned char *)Map->Data() + Map->RawAllocate(SCacheF.Size()),
+ SCacheF.Size()) == false)
+ return false;
+
+ TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
- MMap *Map = new MMap(File,MMap::Public | MMap::ReadOnly);
+ // Build the status cache
+ pkgCacheGenerator Gen(Map.Get(),&Progress);
if (_error->PendingError() == true)
- {
- delete Map;
- return 0;
- }
- return Map;
- }
-
- // Mostly from MakeStatusCache..
- Progress.OverallProgress(0,1,1,"Reading Package Lists");
-
- bool SrcOk = pkgSrcCacheCheck(List);
- bool PkgOk = SrcOk && pkgPkgCacheCheck(CacheFile);
-
- // Rebuild the source and package caches
- if (SrcOk == false)
+ return false;
+ if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
+ Files.begin()+EndOfSource,Files.end()) == false)
+ return false;
+ }
+ else
{
- DynamicMMap *Map = new DynamicMMap(MMap::Public,MapSize);
- if (_error->PendingError() == true)
- {
- delete Map;
- return 0;
- }
+ TotalSize = ComputeSize(Files.begin(),Files.end());
- pkgCacheGenerator Gen(*Map,Progress);
- unsigned long CurrentSize = 0;
- unsigned long TotalSize = 0;
- if (pkgGenerateSrcCache(List,Progress,Gen,CurrentSize,TotalSize) == false)
- {
- delete Map;
- return 0;
- }
+ // Build the source cache
+ pkgCacheGenerator Gen(Map.Get(),&Progress);
+ if (_error->PendingError() == true)
+ return false;
+ if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
+ Files.begin(),Files.begin()+EndOfSource) == false)
+ return false;
- // Merge in the source caches
- if (pkgMergeStatus(Progress,Gen,CurrentSize,TotalSize) == false)
+ // Write it back
+ if (Writeable == true && SrcCacheFile.empty() == false)
{
- delete Map;
- return 0;
+ FileFd SCacheF(SrcCacheFile,FileFd::WriteEmpty);
+ if (_error->PendingError() == true)
+ return false;
+ // Write out the main data
+ if (SCacheF.Write(Map->Data(),Map->Size()) == false)
+ return _error->Error(_("IO Error saving source cache"));
+ SCacheF.Sync();
+
+ // Write out the proper header
+ Gen.GetCache().HeaderP->Dirty = false;
+ if (SCacheF.Seek(0) == false ||
+ SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
+ return _error->Error(_("IO Error saving source cache"));
+ SCacheF.Sync();
+ Gen.GetCache().HeaderP->Dirty = true;
}
- return Map;
+ // Build the status cache
+ if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
+ Files.begin()+EndOfSource,Files.end()) == false)
+ return false;
}
- if (PkgOk == true)
+ if (_error->PendingError() == true)
+ return false;
+ if (OutMap != 0)
{
- Progress.OverallProgress(1,1,1,"Reading Package Lists");
-
- // Open the cache file
- FileFd File(_config->FindFile("Dir::Cache::pkgcache"),FileFd::ReadOnly);
- if (_error->PendingError() == true)
- return 0;
-
- MMap *Map = new MMap(File,MMap::Public | MMap::ReadOnly);
- if (_error->PendingError() == true)
+ if (CacheF != 0)
{
- delete Map;
- return 0;
+ delete Map.UnGuard();
+ *OutMap = new MMap(*CacheF,MMap::Public | MMap::ReadOnly);
}
- return Map;
+ else
+ {
+ *OutMap = Map.UnGuard();
+ }
}
- // We use the source cache to generate the package cache
- string SCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
- FileFd SCacheF(SCacheFile,FileFd::ReadOnly);
- DynamicMMap *Map = new DynamicMMap(MMap::Public,MapSize);
+ return true;
+}
+ /*}}}*/
+// MakeOnlyStatusCache - Build a cache with just the status files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
+{
+ unsigned long MapSize = _config->FindI("APT::Cache-Limit",4*1024*1024);
+ vector<pkgIndexFile *> Files;
+ unsigned long EndOfSource = Files.size();
+ if (_system->AddStatusFiles(Files) == false)
+ return false;
+
+ SPtr<DynamicMMap> Map;
+ Map = new DynamicMMap(MMap::Public,MapSize);
+ unsigned long CurrentSize = 0;
+ unsigned long TotalSize = 0;
+
+ TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
+
+ // Build the status cache
+ Progress.OverallProgress(0,1,1,_("Reading Package Lists"));
+ pkgCacheGenerator Gen(Map.Get(),&Progress);
if (_error->PendingError() == true)
- {
- delete Map;
- return 0;
- }
+ return false;
+ if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
+ Files.begin()+EndOfSource,Files.end()) == false)
+ return false;
- // Preload the map with the source cache
- if (SCacheF.Read((unsigned char *)Map->Data() + Map->RawAllocate(SCacheF.Size()),
- SCacheF.Size()) == false)
- {
- delete Map;
- return 0;
- }
-
- pkgCacheGenerator Gen(*Map,Progress);
+ if (_error->PendingError() == true)
+ return false;
+ *OutMap = Map.UnGuard();
- // Compute the progress
- unsigned long TotalSize = 0;
- if (pkgAddStatusSize(TotalSize) == false)
- {
- delete Map;
- return 0;
- }
-
- unsigned long CurrentSize = 0;
- if (pkgMergeStatus(Progress,Gen,CurrentSize,TotalSize) == false)
- {
- delete Map;
- return 0;
- }
-
- return Map;
+ return true;
}
/*}}}*/
diff --git a/apt-pkg/pkgcachegen.h b/apt-pkg/pkgcachegen.h
index f264b314d..7a8c784ff 100644
--- a/apt-pkg/pkgcachegen.h
+++ b/apt-pkg/pkgcachegen.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: pkgcachegen.h,v 1.17 1999/07/26 17:46:08 jgg Exp $
+// $Id: pkgcachegen.h,v 1.18 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Package Cache Generator - Generator for the cache structure.
@@ -16,7 +16,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_PKGCACHEGEN_H
#define PKGLIB_PKGCACHEGEN_H
@@ -29,6 +28,7 @@
class pkgSourceList;
class OpProgress;
class MMap;
+class pkgIndexFile;
class pkgCacheGenerator
{
@@ -39,7 +39,7 @@ class pkgCacheGenerator
public:
class ListParser;
- friend ListParser;
+ friend class ListParser;
protected:
@@ -54,32 +54,28 @@ class pkgCacheGenerator
bool NewFileVer(pkgCache::VerIterator &Ver,ListParser &List);
unsigned long NewVersion(pkgCache::VerIterator &Ver,string VerStr,unsigned long Next);
+ public:
+
unsigned long WriteUniqString(const char *S,unsigned int Size);
inline unsigned long WriteUniqString(string S) {return WriteUniqString(S.c_str(),S.length());};
- public:
-
void DropProgress() {Progress = 0;};
- bool SelectFile(string File,unsigned long Flags = 0);
+ bool SelectFile(string File,string Site,pkgIndexFile const &Index,
+ unsigned long Flags = 0);
bool MergeList(ListParser &List,pkgCache::VerIterator *Ver = 0);
inline pkgCache &GetCache() {return Cache;};
inline pkgCache::PkgFileIterator GetCurFile()
{return pkgCache::PkgFileIterator(Cache,CurrentFile);};
- pkgCacheGenerator(DynamicMMap &Map,OpProgress &Progress);
+ pkgCacheGenerator(DynamicMMap *Map,OpProgress *Progress);
~pkgCacheGenerator();
};
-bool pkgSrcCacheCheck(pkgSourceList &List);
-bool pkgPkgCacheCheck(string CacheFile);
-bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress);
-MMap *pkgMakeStatusCacheMem(pkgSourceList &List,OpProgress &Progress);
-
// This is the abstract package list parser class.
class pkgCacheGenerator::ListParser
{
pkgCacheGenerator *Owner;
- friend pkgCacheGenerator;
+ friend class pkgCacheGenerator;
// Some cache items
pkgCache::VerIterator OldDepVer;
@@ -113,8 +109,21 @@ class pkgCacheGenerator::ListParser
virtual ~ListParser() {};
};
-bool pkgMergeStatus(OpProgress &Progress,pkgCacheGenerator &Gen,
- unsigned long &CurrentSize,unsigned long TotalSize);
-bool pkgAddStatusSize(unsigned long &TotalSize);
+bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
+ MMap **OutMap = 0,bool AllowMem = false);
+bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap);
+
+#ifdef APT_COMPATIBILITY
+#if APT_COMPATIBILITY != 986
+#warning "Using APT_COMPATIBILITY"
+#endif
+MMap *pkgMakeStatusCacheMem(pkgSourceList &List,OpProgress &Progress)
+{
+ MMap *Map = 0;
+ if (pkgMakeStatusCache(List,Progress,&Map,true) == false)
+ return 0;
+ return Map;
+}
+#endif
#endif
diff --git a/apt-pkg/pkgrecords.cc b/apt-pkg/pkgrecords.cc
index 5d112a5c1..69aac6622 100644
--- a/apt-pkg/pkgrecords.cc
+++ b/apt-pkg/pkgrecords.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: pkgrecords.cc,v 1.5 1999/02/22 03:30:06 jgg Exp $
+// $Id: pkgrecords.cc,v 1.6 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Package Records - Allows access to complete package description records
@@ -13,9 +13,11 @@
#pragma implementation "apt-pkg/pkgrecords.h"
#endif
#include <apt-pkg/pkgrecords.h>
-#include <apt-pkg/debrecords.h>
+#include <apt-pkg/indexfile.h>
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
+
+#include <apti18n.h>
/*}}}*/
// Records::pkgRecords - Constructor /*{{{*/
@@ -23,25 +25,21 @@
/* This will create the necessary structures to access the status files */
pkgRecords::pkgRecords(pkgCache &Cache) : Cache(Cache), Files(0)
{
- Files = new PkgFile[Cache.HeaderP->PackageFileCount];
+ Files = new Parser *[Cache.HeaderP->PackageFileCount];
+ memset(Files,0,sizeof(*Files)*Cache.HeaderP->PackageFileCount);
+
for (pkgCache::PkgFileIterator I = Cache.FileBegin();
I.end() == false; I++)
{
- // We can not initialize if the cache is out of sync.
- if (I.IsOk() == false)
+ const pkgIndexFile::Type *Type = pkgIndexFile::Type::GetType(I.IndexType());
+ if (Type == 0)
{
- _error->Error("Package file %s is out of sync.",I.FileName());
+ _error->Error(_("Index file type '%s' is not supported"),I.IndexType());
return;
}
-
- // Create the file
- Files[I->ID].File = new FileFd(I.FileName(),FileFd::ReadOnly);
- if (_error->PendingError() == true)
- return;
-
- // Create the parser
- Files[I->ID].Parse = new debRecordParser(*Files[I->ID].File,Cache);
- if (_error->PendingError() == true)
+
+ Files[I->ID] = Type->CreatePkgParser(I);
+ if (Files[I->ID] == 0)
return;
}
}
@@ -51,6 +49,8 @@ pkgRecords::pkgRecords(pkgCache &Cache) : Cache(Cache), Files(0)
/* */
pkgRecords::~pkgRecords()
{
+ for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
+ delete Files[I];
delete [] Files;
}
/*}}}*/
@@ -59,18 +59,7 @@ pkgRecords::~pkgRecords()
/* */
pkgRecords::Parser &pkgRecords::Lookup(pkgCache::VerFileIterator const &Ver)
{
- PkgFile &File = Files[Ver.File()->ID];
- File.Parse->Jump(Ver);
-
- return *File.Parse;
-}
- /*}}}*/
-// Records::Pkgfile::~PkgFile - Destructor /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-pkgRecords::PkgFile::~PkgFile()
-{
- delete Parse;
- delete File;
+ Files[Ver.File()->ID]->Jump(Ver);
+ return *Files[Ver.File()->ID];
}
/*}}}*/
diff --git a/apt-pkg/pkgrecords.h b/apt-pkg/pkgrecords.h
index b5205f1ac..af5fac646 100644
--- a/apt-pkg/pkgrecords.h
+++ b/apt-pkg/pkgrecords.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: pkgrecords.h,v 1.4 1999/04/07 05:30:17 jgg Exp $
+// $Id: pkgrecords.h,v 1.5 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Package Records - Allows access to complete package description records
@@ -14,7 +14,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_PKGRECORDS_H
#define PKGLIB_PKGRECORDS_H
@@ -33,18 +32,8 @@ class pkgRecords
private:
pkgCache &Cache;
-
- // List of package files
- struct PkgFile
- {
- FileFd *File;
- Parser *Parse;
-
- PkgFile() : File(0), Parse(0) {};
- ~PkgFile();
- };
- PkgFile *Files;
-
+ Parser **Files;
+
public:
// Lookup function
@@ -62,18 +51,22 @@ class pkgRecords::Parser
virtual bool Jump(pkgCache::VerFileIterator const &Ver) = 0;
public:
- friend pkgRecords;
+ friend class pkgRecords;
// These refer to the archive file for the Version
virtual string FileName() {return string();};
virtual string MD5Hash() {return string();};
virtual string SourcePkg() {return string();};
-
+
// These are some general stats about the package
virtual string Maintainer() {return string();};
virtual string ShortDesc() {return string();};
virtual string LongDesc() {return string();};
-
+ virtual string Name() {return string();};
+
+ // The record in binary form
+ virtual void GetRec(const char *&Start,const char *&Stop) {Start = Stop = 0;};
+
virtual ~Parser() {};
};
diff --git a/apt-pkg/pkgsystem.cc b/apt-pkg/pkgsystem.cc
new file mode 100644
index 000000000..530f69c91
--- /dev/null
+++ b/apt-pkg/pkgsystem.cc
@@ -0,0 +1,45 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: pkgsystem.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ System - Abstraction for running on different systems.
+
+ Basic general structure..
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/pkgsystem.h"
+#endif
+
+#include <apt-pkg/pkgsystem.h>
+#include <apt-pkg/policy.h>
+ /*}}}*/
+
+pkgSystem *_system = 0;
+static pkgSystem *SysList[10];
+pkgSystem **pkgSystem::GlobalList = SysList;
+unsigned long pkgSystem::GlobalListLen = 0;
+
+// System::pkgSystem - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Add it to the global list.. */
+pkgSystem::pkgSystem()
+{
+ SysList[GlobalListLen] = this;
+ GlobalListLen++;
+}
+ /*}}}*/
+// System::GetSystem - Get the named system /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgSystem *pkgSystem::GetSystem(const char *Label)
+{
+ for (unsigned I = 0; I != GlobalListLen; I++)
+ if (strcmp(SysList[I]->Label,Label) == 0)
+ return SysList[I];
+ return 0;
+}
+ /*}}}*/
diff --git a/apt-pkg/pkgsystem.h b/apt-pkg/pkgsystem.h
new file mode 100644
index 000000000..7d0e42d13
--- /dev/null
+++ b/apt-pkg/pkgsystem.h
@@ -0,0 +1,95 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: pkgsystem.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ System - Abstraction for running on different systems.
+
+ Instances of this class can be thought of as factories or meta-classes
+ for a variety of more specialized classes. Together this class and
+ it's speciallized offspring completely define the environment and how
+ to access resources for a specific system. There are several sub
+ areas that are all orthogonal - each system has a unique combination of
+ these sub areas:
+ - Versioning. Different systems have different ideas on versions.
+ Within a system all sub classes must follow the same versioning
+ rules.
+ - Local tool locking to prevent multiple tools from accessing the
+ same database.
+ - Candidate Version selection policy - this is probably almost always
+ managed using a standard APT class
+ - Actual Package installation
+ * Indication of what kind of binary formats are supported
+ - Selection of local 'status' indexes that make up the pkgCache.
+
+ It is important to note that the handling of index files is not a
+ function of the system. Index files are handled through a seperate
+ abstraction - the only requirement is that the index files have the
+ same idea of versioning as the target system.
+
+ Upon startup each supported system instantiates an instance of the
+ pkgSystem class (using a global constructor) which will make itself
+ available to the main APT init routine. That routine will select the
+ proper system and make it the global default.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_PKGSYSTEM_H
+#define PKGLIB_PKGSYSTEM_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/pkgsystem.h"
+#endif
+
+#include <apt-pkg/depcache.h>
+#include <vector.h>
+
+class pkgPackageManager;
+class pkgVersioningSystem;
+class Configuration;
+class pkgIndexFile;
+
+class pkgSystem
+{
+ public:
+
+ // Global list of supported systems
+ static pkgSystem **GlobalList;
+ static unsigned long GlobalListLen;
+ static pkgSystem *GetSystem(const char *Label);
+
+ const char *Label;
+ pkgVersioningSystem *VS;
+
+ /* Prevent other programs from touching shared data not covered by
+ other locks (cache or state locks) */
+ virtual bool Lock() = 0;
+ virtual bool UnLock(bool NoErrors = false) = 0;
+
+ /* Various helper classes to interface with specific bits of this
+ environment */
+ virtual pkgPackageManager *CreatePM(pkgDepCache *Cache) const = 0;
+
+ /* Load environment specific configuration and perform any other setup
+ necessary */
+ virtual bool Initialize(Configuration &Cnf) {return true;};
+
+ /* Type is some kind of Globally Unique way of differentiating
+ archive file types.. */
+ virtual bool ArchiveSupported(const char *Type) = 0;
+
+ // Return a list of system index files..
+ virtual bool AddStatusFiles(vector<pkgIndexFile *> &List) = 0;
+
+ /* Evauluate how 'right' we are for this system based on the filesystem
+ etc.. */
+ virtual signed Score(Configuration const &Cnf) {return 0;};
+
+ pkgSystem();
+ virtual ~pkgSystem() {};
+};
+
+// The environment we are operating in.
+extern pkgSystem *_system;
+
+#endif
diff --git a/apt-pkg/policy.cc b/apt-pkg/policy.cc
new file mode 100644
index 000000000..029a37df7
--- /dev/null
+++ b/apt-pkg/policy.cc
@@ -0,0 +1,275 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: policy.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Package Version Policy implementation
+
+ This is just a really simple wrapper around pkgVersionMatch with
+ some added goodies to manage the list of things..
+
+ Priority Table:
+
+ 1000 -> inf = Downgradeable priorities
+ 1000 = The 'no downgrade' pseduo-status file
+ 100 -> 1000 = Standard priorities
+ 990 = Config file override package files
+ 989 = Start for preference auto-priorities
+ 500 = Default package files
+ 100 = The status file
+ 0 -> 100 = NotAutomatic sources like experimental
+ -inf -> 0 = Never selected
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/policy.h"
+#endif
+#include <apt-pkg/policy.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/tagfile.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/sptr.h>
+
+#include <apti18n.h>
+ /*}}}*/
+
+// Policy::Init - Startup and bind to a cache /*{{{*/
+// ---------------------------------------------------------------------
+/* Set the defaults for operation. The default mode with no loaded policy
+ file matches the V0 policy engine. */
+pkgPolicy::pkgPolicy(pkgCache *Owner) : Pins(0), PFPriority(0), Cache(Owner)
+{
+ PFPriority = new signed short[Owner->Head().PackageFileCount];
+ Pins = new Pin[Owner->Head().PackageCount];
+
+ for (unsigned long I = 0; I != Owner->Head().PackageCount; I++)
+ Pins[I].Type = pkgVersionMatch::None;
+
+ // The config file has a master override.
+ string DefRel = _config->Find("APT::Default-Release");
+ if (DefRel.empty() == false)
+ CreatePin(pkgVersionMatch::Release,"",DefRel,990);
+
+ InitDefaults();
+}
+ /*}}}*/
+// Policy::InitDefaults - Compute the default selections /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgPolicy::InitDefaults()
+{
+ // Initialize the priorities based on the status of the package file
+ for (pkgCache::PkgFileIterator I = Cache->FileBegin(); I != Cache->FileEnd(); I++)
+ {
+ PFPriority[I->ID] = 500;
+ if ((I->Flags & pkgCache::Flag::NotSource) == pkgCache::Flag::NotSource)
+ PFPriority[I->ID] = 100;
+ else
+ if ((I->Flags & pkgCache::Flag::NotAutomatic) == pkgCache::Flag::NotAutomatic)
+ PFPriority[I->ID] = 1;
+ }
+
+ // Apply the defaults..
+ SPtr<bool> Fixed = new bool[Cache->HeaderP->PackageFileCount];
+ memset(Fixed,0,sizeof(*Fixed)*Cache->HeaderP->PackageFileCount);
+ signed Cur = 989;
+ StatusOverride = false;
+ for (vector<Pin>::const_iterator I = Defaults.begin(); I != Defaults.end();
+ I++, Cur--)
+ {
+ pkgVersionMatch Match(I->Data,I->Type);
+ for (pkgCache::PkgFileIterator F = Cache->FileBegin(); F != Cache->FileEnd(); F++)
+ {
+/* hmm?
+ if ((F->Flags & pkgCache::Flag::NotSource) == pkgCache::Flag::NotSource)
+ continue;*/
+
+ if (Match.FileMatch(F) == true && Fixed[F->ID] == false)
+ {
+ if (I->Priority != 0 && I->Priority > 0)
+ Cur = I->Priority;
+
+ if (I->Priority < 0)
+ PFPriority[F->ID] = I->Priority;
+ else
+ PFPriority[F->ID] = Cur;
+
+ if (PFPriority[F->ID] > 1000)
+ StatusOverride = true;
+
+ Fixed[F->ID] = true;
+ }
+ }
+ }
+
+ if (_config->FindB("Debug::pkgPolicy",false) == true)
+ for (pkgCache::PkgFileIterator F = Cache->FileBegin(); F != Cache->FileEnd(); F++)
+ cout << "Prio of " << F.FileName() << ' ' << PFPriority[F->ID] << endl;
+
+ return true;
+}
+ /*}}}*/
+// Policy::GetCandidateVer - Get the candidate install version /*{{{*/
+// ---------------------------------------------------------------------
+/* Evaluate the package pins and the default list to deteremine what the
+ best package is. */
+pkgCache::VerIterator pkgPolicy::GetCandidateVer(pkgCache::PkgIterator Pkg)
+{
+ const Pin &PPkg = Pins[Pkg->ID];
+
+ // Look for a package pin and evaluate it.
+ signed Max = 0;
+ pkgCache::VerIterator Pref(*Cache);
+ if (PPkg.Type != pkgVersionMatch::None)
+ {
+ pkgVersionMatch Match(PPkg.Data,PPkg.Type);
+ Pref = Match.Find(Pkg);
+ Max = PPkg.Priority;
+ if (PPkg.Priority == 0)
+ Max = 989;
+ }
+
+ /* Falling through to the default version.. Setting Max to zero
+ effectively excludes everything <= 0 which are the non-automatic
+ priorities.. The status file is given a prio of 100 which will exclude
+ not-automatic sources, except in a single shot not-installed mode.
+ The second pseduo-status file is at prio 1000, above which will permit
+ the user to force-downgrade things.
+
+ The user pin is subject to the same priority rules as default
+ selections. Thus there are two ways to create a pin - a pin that
+ tracks the default when the default is taken away, and a permanent
+ pin that stays at that setting.
+ */
+ for (pkgCache::VerIterator Ver = Pkg.VersionList(); Ver.end() == false; Ver++)
+ {
+ for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; VF++)
+ {
+ signed Prio = PFPriority[VF.File()->ID];
+ if (Prio > Max)
+ {
+ Pref = Ver;
+ Max = Prio;
+ }
+ }
+
+ if (Pkg.CurrentVer() == Ver && Max < 1000)
+ {
+ /* Elevate our current selection (or the status file itself)
+ to the Pseudo-status priority. */
+ if (Pref.end() == true)
+ Pref = Ver;
+ Max = 1000;
+
+ // Fast path optimize.
+ if (StatusOverride == false)
+ break;
+ }
+ }
+
+ return Pref;
+}
+ /*}}}*/
+// Policy::CreatePin - Create an entry in the pin table.. /*{{{*/
+// ---------------------------------------------------------------------
+/* For performance we have 3 tables, the default table, the main cache
+ table (hashed to the cache). A blank package name indicates the pin
+ belongs to the default table. Order of insertion matters here, the
+ earlier defaults override later ones. */
+void pkgPolicy::CreatePin(pkgVersionMatch::MatchType Type,string Name,
+ string Data,signed short Priority)
+{
+ pkgCache::PkgIterator Pkg = Cache->FindPkg(Name);
+ Pin *P = 0;
+
+ if (Name.empty() == true)
+ P = Defaults.insert(Defaults.end());
+ else
+ {
+ // Get a spot to put the pin
+ if (Pkg.end() == true)
+ {
+ // Check the unmatched table
+ for (vector<PkgPin>::iterator I = Unmatched.begin();
+ I != Unmatched.end() && P == 0; I++)
+ if (I->Pkg == Name)
+ P = I;
+
+ if (P == 0)
+ P = Unmatched.insert(Unmatched.end());
+ }
+ else
+ {
+ P = Pins + Pkg->ID;
+ }
+ }
+
+ // Set..
+ P->Type = Type;
+ P->Priority = Priority;
+ P->Data = Data;
+}
+ /*}}}*/
+
+// ReadPinFile - Load the pin file into a Policy /*{{{*/
+// ---------------------------------------------------------------------
+/* I'd like to see the preferences file store more than just pin information
+ but right now that is the only stuff I have to store. Later there will
+ have to be some kind of combined super parser to get the data into all
+ the right classes.. */
+bool ReadPinFile(pkgPolicy &Plcy,string File)
+{
+ if (File.empty() == true)
+ File = _config->FindFile("Dir::Etc::Preferences");
+
+ if (FileExists(File) == false)
+ return true;
+
+ FileFd Fd(File,FileFd::ReadOnly);
+ pkgTagFile TF(&Fd);
+ if (_error->PendingError() == true)
+ return false;
+
+ pkgTagSection Tags;
+ while (TF.Step(Tags) == true)
+ {
+ string Name = Tags.FindS("Package");
+ if (Name.empty() == true)
+ return _error->Error(_("Invalid record in the preferences file, no Package header"));
+ if (Name == "*")
+ Name = string();
+
+ const char *Start;
+ const char *End;
+ if (Tags.Find("Pin",Start,End) == false)
+ continue;
+
+ const char *Word = Start;
+ for (; Word != End && isspace(*Word) == 0; Word++);
+
+ // Parse the type..
+ pkgVersionMatch::MatchType Type;
+ if (stringcasecmp(Start,Word,"version") == 0 && Name.empty() == false)
+ Type = pkgVersionMatch::Version;
+ else if (stringcasecmp(Start,Word,"release") == 0)
+ Type = pkgVersionMatch::Release;
+ else if (stringcasecmp(Start,Word,"origin") == 0)
+ Type = pkgVersionMatch::Origin;
+ else
+ {
+ _error->Warning(_("Did not understand pin type %s"),string(Start,Word).c_str());
+ continue;
+ }
+ for (; Word != End && isspace(*Word) != 0; Word++);
+
+ Plcy.CreatePin(Type,Name,string(Word,End),
+ Tags.FindI("Pin-Priority"));
+ }
+
+ Plcy.InitDefaults();
+ return true;
+}
+ /*}}}*/
diff --git a/apt-pkg/policy.h b/apt-pkg/policy.h
new file mode 100644
index 000000000..f89916202
--- /dev/null
+++ b/apt-pkg/policy.h
@@ -0,0 +1,83 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: policy.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Package Version Policy implementation
+
+ This implements the more advanced 'Version 4' APT policy engine. The
+ standard 'Version 0' engine is included inside the DepCache which is
+ it's historical location.
+
+ The V4 engine allows the user to completly control all aspects of
+ version selection. There are three primary means to choose a version
+ * Selection by version match
+ * Selection by Release file match
+ * Selection by origin server
+
+ Each package may be 'pinned' with a single criteria, which will ultimately
+ result in the selection of a single version, or no version, for each
+ package.
+
+ Furthermore, the default selection can be influenced by specifying
+ the ordering of package files. The order is derived by reading the
+ package file preferences and assigning a priority to each package
+ file.
+
+ A special flag may be set to indicate if no version should be returned
+ if no matching versions are found, otherwise the default matching
+ rules are used to locate a hit.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_POLICY_H
+#define PKGLIB_POLICY_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/policy.h"
+#endif
+
+#include <apt-pkg/depcache.h>
+#include <apt-pkg/versionmatch.h>
+#include <vector>
+
+class pkgPolicy : public pkgDepCache::Policy
+{
+ struct Pin
+ {
+ pkgVersionMatch::MatchType Type;
+ string Data;
+ signed short Priority;
+ Pin() : Type(pkgVersionMatch::None), Priority(0) {};
+ };
+
+ struct PkgPin : Pin
+ {
+ string Pkg;
+ };
+
+ protected:
+
+ Pin *Pins;
+ signed short *PFPriority;
+ vector<Pin> Defaults;
+ vector<PkgPin> Unmatched;
+ pkgCache *Cache;
+ bool StatusOverride;
+
+ public:
+
+ void CreatePin(pkgVersionMatch::MatchType Type,string Pkg,
+ string Data,signed short Priority);
+
+ virtual pkgCache::VerIterator GetCandidateVer(pkgCache::PkgIterator Pkg);
+ virtual bool IsImportantDep(pkgCache::DepIterator Dep) {return pkgDepCache::Policy::IsImportantDep(Dep);};
+ bool InitDefaults();
+
+ pkgPolicy(pkgCache *Owner);
+ virtual ~pkgPolicy() {delete [] PFPriority; delete [] Pins;};
+};
+
+bool ReadPinFile(pkgPolicy &Plcy,string File = "");
+
+#endif
diff --git a/apt-pkg/sourcelist.cc b/apt-pkg/sourcelist.cc
index 1fdbd834e..da7e38f6e 100644
--- a/apt-pkg/sourcelist.cc
+++ b/apt-pkg/sourcelist.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: sourcelist.cc,v 1.17 1999/10/17 07:30:23 jgg Exp $
+// $Id: sourcelist.cc,v 1.18 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
List of Sources
@@ -18,10 +18,99 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/strutl.h>
+#include <apti18n.h>
+
#include <fstream.h>
-#include <stdio.h>
-#include <unistd.h>
-#include <sys/stat.h>
+ /*}}}*/
+
+// Global list of Item supported
+static pkgSourceList::Type *ItmList[10];
+pkgSourceList::Type **pkgSourceList::Type::GlobalList = ItmList;
+unsigned long pkgSourceList::Type::GlobalListLen = 0;
+
+// Type::Type - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Link this to the global list of items*/
+pkgSourceList::Type::Type()
+{
+ ItmList[GlobalListLen] = this;
+ GlobalListLen++;
+}
+ /*}}}*/
+// Type::GetType - Get a specific meta for a given type /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgSourceList::Type *pkgSourceList::Type::GetType(const char *Type)
+{
+ for (unsigned I = 0; I != GlobalListLen; I++)
+ if (strcmp(GlobalList[I]->Name,Type) == 0)
+ return GlobalList[I];
+ return 0;
+}
+ /*}}}*/
+// Type::FixupURI - Normalize the URI and check it.. /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgSourceList::Type::FixupURI(string &URI) const
+{
+ if (URI.empty() == true)
+ return false;
+
+ if (URI.find(':') == string::npos)
+ return false;
+
+ URI = SubstVar(URI,"$(ARCH)",_config->Find("APT::Architecture"));
+
+ // Make sure that the URN is / postfixed
+ if (URI[URI.size() - 1] != '/')
+ URI += '/';
+
+ return true;
+}
+ /*}}}*/
+// Type::ParseLine - Parse a single line /*{{{*/
+// ---------------------------------------------------------------------
+/* This is a generic one that is the 'usual' format for sources.list
+ Weird types may override this. */
+bool pkgSourceList::Type::ParseLine(vector<pkgIndexFile *> &List,
+ const char *Buffer,
+ unsigned long CurLine,
+ string File) const
+{
+ string URI;
+ string Dist;
+ string Section;
+
+ if (ParseQuoteWord(Buffer,URI) == false)
+ return _error->Error(_("Malformed line %lu in source list %s (URI)"),CurLine,File.c_str());
+ if (ParseQuoteWord(Buffer,Dist) == false)
+ return _error->Error(_("Malformed line %lu in source list %s (dist)"),CurLine,File.c_str());
+
+ if (FixupURI(URI) == false)
+ return _error->Error(_("Malformed line %lu in source list %s (URI parse)"),CurLine,File.c_str());
+
+ // Check for an absolute dists specification.
+ if (Dist.empty() == false && Dist[Dist.size() - 1] == '/')
+ {
+ if (ParseQuoteWord(Buffer,Section) == true)
+ return _error->Error(_("Malformed line %lu in source list %s (Absolute dist)"),CurLine,File.c_str());
+ Dist = SubstVar(Dist,"$(ARCH)",_config->Find("APT::Architecture"));
+ return CreateItem(List,URI,Dist,Section);
+ }
+
+ // Grab the rest of the dists
+ if (ParseQuoteWord(Buffer,Section) == false)
+ return _error->Error(_("Malformed line %lu in source list %s (dist parse)"),CurLine,File.c_str());
+
+ do
+ {
+ if (CreateItem(List,URI,Dist,Section) == false)
+ return false;
+ }
+ while (ParseQuoteWord(Buffer,Section) == true);
+
+ return true;
+}
/*}}}*/
// SourceList::pkgSourceList - Constructors /*{{{*/
@@ -52,7 +141,7 @@ bool pkgSourceList::Read(string File)
// Open the stream for reading
ifstream F(File.c_str(),ios::in | ios::nocreate);
if (!F != 0)
- return _error->Errno("ifstream::ifstream","Opening %s",File.c_str());
+ return _error->Errno("ifstream::ifstream",_("Opening %s"),File.c_str());
List.erase(List.begin(),List.end());
char Buffer[300];
@@ -63,318 +152,59 @@ bool pkgSourceList::Read(string File)
F.getline(Buffer,sizeof(Buffer));
CurLine++;
_strtabexpand(Buffer,sizeof(Buffer));
- _strstrip(Buffer);
+
+
+ char *I;
+ for (I = Buffer; *I != 0 && *I != '#'; I++);
+ *I = 0;
+
+ const char *C = _strstrip(Buffer);
// Comment or blank
- if (Buffer[0] == '#' || Buffer[0] == 0)
+ if (C[0] == '#' || C[0] == 0)
continue;
-
+
// Grok it
- string Type;
- string URI;
- Item Itm;
- const char *C = Buffer;
- if (ParseQuoteWord(C,Type) == false)
- return _error->Error("Malformed line %u in source list %s (type)",CurLine,File.c_str());
- if (ParseQuoteWord(C,URI) == false)
- return _error->Error("Malformed line %u in source list %s (URI)",CurLine,File.c_str());
- if (ParseQuoteWord(C,Itm.Dist) == false)
- return _error->Error("Malformed line %u in source list %s (dist)",CurLine,File.c_str());
- if (Itm.SetType(Type) == false)
- return _error->Error("Malformed line %u in source list %s (type parse)",CurLine,File.c_str());
- if (Itm.SetURI(URI) == false)
- return _error->Error("Malformed line %u in source list %s (URI parse)",CurLine,File.c_str());
-
- // Check for an absolute dists specification.
- if (Itm.Dist.empty() == false && Itm.Dist[Itm.Dist.size() - 1] == '/')
- {
- if (ParseQuoteWord(C,Itm.Section) == true)
- return _error->Error("Malformed line %u in source list %s (Absolute dist)",CurLine,File.c_str());
- Itm.Dist = SubstVar(Itm.Dist,"$(ARCH)",_config->Find("APT::Architecture"));
- List.push_back(Itm);
- continue;
- }
+ string LineType;
+ if (ParseQuoteWord(C,LineType) == false)
+ return _error->Error(_("Malformed line %u in source list %s (type)"),CurLine,File.c_str());
- // Grab the rest of the dists
- if (ParseQuoteWord(C,Itm.Section) == false)
- return _error->Error("Malformed line %u in source list %s (dist parse)",CurLine,File.c_str());
+ Type *Parse = Type::GetType(LineType.c_str());
+ if (Parse == 0)
+ return _error->Error(_("Type '%s' is not known in on line %u in source list %s"),LineType.c_str(),CurLine,File.c_str());
- do
- {
- List.push_back(Itm);
- }
- while (ParseQuoteWord(C,Itm.Section) == true);
+ if (Parse->ParseLine(List,C,CurLine,File) == false)
+ return false;
}
return true;
}
/*}}}*/
-// SourceList::Item << - Writes the item to a stream /*{{{*/
-// ---------------------------------------------------------------------
-/* This is not suitable for rebuilding the sourcelist file but it good for
- debugging. */
-ostream &operator <<(ostream &O,pkgSourceList::Item &Itm)
-{
- O << (int)Itm.Type << ' ' << Itm.URI << ' ' << Itm.Dist << ' ' << Itm.Section;
- return O;
-}
- /*}}}*/
-// SourceList::Item::SetType - Sets the distribution type /*{{{*/
+// SourceList::FindIndex - Get the index associated with a file /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool pkgSourceList::Item::SetType(string S)
+bool pkgSourceList::FindIndex(pkgCache::PkgFileIterator File,
+ pkgIndexFile *&Found) const
{
- if (S == "deb")
+ for (const_iterator I = List.begin(); I != List.end(); I++)
{
- Type = Deb;
- return true;
- }
-
- if (S == "deb-src")
- {
- Type = DebSrc;
- return true;
+ if ((*I)->FindInCache(*File.Cache()) == File)
+ {
+ Found = *I;
+ return true;
+ }
}
-
- return false;
-}
- /*}}}*/
-// SourceList::Item::SetURI - Set the URI /*{{{*/
-// ---------------------------------------------------------------------
-/* For simplicity we strip the scheme off the uri */
-bool pkgSourceList::Item::SetURI(string S)
-{
- if (S.empty() == true)
- return false;
-
- if (S.find(':') == string::npos)
- return false;
-
- S = SubstVar(S,"$(ARCH)",_config->Find("APT::Architecture"));
- // Make sure that the URN is / postfixed
- URI = S;
- if (URI[URI.size() - 1] != '/')
- URI += '/';
-
- return true;
-}
- /*}}}*/
-// SourceList::Item::PackagesURI - Returns a URI to the packages file /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string pkgSourceList::Item::PackagesURI() const
-{
- string Res;
- switch (Type)
- {
- case Deb:
- if (Dist[Dist.size() - 1] == '/')
- {
- if (Dist != "/")
- Res = URI + Dist;
- else
- Res = URI;
- }
- else
- Res = URI + "dists/" + Dist + '/' + Section +
- "/binary-" + _config->Find("APT::Architecture") + '/';
-
- Res += "Packages";
- break;
-
- case DebSrc:
- if (Dist[Dist.size() - 1] == '/')
- Res = URI + Dist;
- else
- Res = URI + "dists/" + Dist + '/' + Section +
- "/source/";
-
- Res += "Sources";
- break;
- };
- return Res;
-}
- /*}}}*/
-// SourceList::Item::PackagesInfo - Shorter version of the URI /*{{{*/
-// ---------------------------------------------------------------------
-/* This is a shorter version that is designed to be < 60 chars or so */
-string pkgSourceList::Item::PackagesInfo() const
-{
- string Res;
- switch (Type)
- {
- case Deb:
- Res += SiteOnly(URI) + ' ';
- if (Dist[Dist.size() - 1] == '/')
- {
- if (Dist != "/")
- Res += Dist;
- }
- else
- Res += Dist + '/' + Section;
-
- Res += " Packages";
- break;
-
- case DebSrc:
- Res += SiteOnly(URI) + ' ';
- if (Dist[Dist.size() - 1] == '/')
- Res += Dist;
- else
- Res += Dist + '/' + Section;
-
- Res += " Sources";
- break;
- };
- return Res;
-}
- /*}}}*/
-// SourceList::Item::ReleaseURI - Returns a URI to the release file /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string pkgSourceList::Item::ReleaseURI() const
-{
- string Res;
- switch (Type)
- {
- case Deb:
- if (Dist[Dist.size() - 1] == '/')
- {
- if (Dist != "/")
- Res = URI + Dist;
- else
- Res = URI;
- }
- else
- Res = URI + "dists/" + Dist + '/' + Section +
- "/binary-" + _config->Find("APT::Architecture") + '/';
-
- Res += "Release";
- break;
-
- case DebSrc:
- if (Dist[Dist.size() - 1] == '/')
- Res = URI + Dist;
- else
- Res = URI + "dists/" + Dist + '/' + Section +
- "/source/";
-
- Res += "Release";
- break;
- };
- return Res;
-}
- /*}}}*/
-// SourceList::Item::ReleaseInfo - Shorter version of the URI /*{{{*/
-// ---------------------------------------------------------------------
-/* This is a shorter version that is designed to be < 60 chars or so */
-string pkgSourceList::Item::ReleaseInfo() const
-{
- string Res;
- switch (Type)
- {
- case Deb:
- case DebSrc:
- Res += SiteOnly(URI) + ' ';
- if (Dist[Dist.size() - 1] == '/')
- {
- if (Dist != "/")
- Res += Dist;
- }
- else
- Res += Dist + '/' + Section;
-
- Res += " Release";
- break;
- };
- return Res;
-}
- /*}}}*/
-// SourceList::Item::ArchiveInfo - Shorter version of the archive spec /*{{{*/
-// ---------------------------------------------------------------------
-/* This is a shorter version that is designed to be < 60 chars or so */
-string pkgSourceList::Item::ArchiveInfo(pkgCache::VerIterator Ver) const
-{
- string Res;
- switch (Type)
- {
- case DebSrc:
- case Deb:
- Res += SiteOnly(URI) + ' ';
- if (Dist[Dist.size() - 1] == '/')
- {
- if (Dist != "/")
- Res += Dist;
- }
- else
- Res += Dist + '/' + Section;
-
- Res += " ";
- Res += Ver.ParentPkg().Name();
- Res += " ";
- Res += Ver.VerStr();
-
- break;
- };
- return Res;
-}
- /*}}}*/
-// SourceList::Item::ArchiveURI - Returns a URI to the given archive /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string pkgSourceList::Item::ArchiveURI(string File) const
-{
- string Res;
- switch (Type)
- {
- case Deb:
- case DebSrc:
- Res = URI + File;
- break;
- };
- return Res;
-}
- /*}}}*/
-// SourceList::Item::SourceInfo - Returns an info line for a source /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-string pkgSourceList::Item::SourceInfo(string Pkg,string Ver,string Comp) const
-{
- string Res;
- switch (Type)
- {
- case DebSrc:
- case Deb:
- Res += SiteOnly(URI) + ' ';
- if (Dist[Dist.size() - 1] == '/')
- {
- if (Dist != "/")
- Res += Dist;
- }
- else
- Res += Dist + '/' + Section;
-
- Res += " ";
- Res += Pkg;
- Res += " ";
- Res += Ver;
- if (Comp.empty() == false)
- Res += " (" + Comp + ")";
- break;
- };
- return Res;
+ return false;
}
/*}}}*/
-// SourceList::Item::SiteOnly - Strip off the path part of a URI /*{{{*/
+// SourceList::GetIndexes - Load the index files into the downloader /*{{{*/
// ---------------------------------------------------------------------
/* */
-string pkgSourceList::Item::SiteOnly(string URI) const
+bool pkgSourceList::GetIndexes(pkgAcquire *Owner) const
{
- ::URI U(URI);
- U.User = string();
- U.Password = string();
- U.Path = string();
- U.Port = 0;
- return U;
+ for (const_iterator I = List.begin(); I != List.end(); I++)
+ if ((*I)->GetIndexes(Owner) == false)
+ return false;
+ return true;
}
/*}}}*/
diff --git a/apt-pkg/sourcelist.h b/apt-pkg/sourcelist.h
index 37accc5ab..3a6b274f6 100644
--- a/apt-pkg/sourcelist.h
+++ b/apt-pkg/sourcelist.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: sourcelist.h,v 1.8 1999/04/07 05:30:18 jgg Exp $
+// $Id: sourcelist.h,v 1.9 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
SourceList - Manage a list of sources
@@ -12,18 +12,22 @@
All sources have a type associated with them that defines the layout
of the archive. The exact format of the file is documented in
files.sgml.
+
+ The types are mapped through a list of type definitions which handle
+ the actual construction of the type. After loading a source list all
+ you have is a list of package index files that have the ability
+ to be Acquired.
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_SOURCELIST_H
#define PKGLIB_SOURCELIST_H
#include <string>
#include <vector>
-#include <iostream.h>
#include <apt-pkg/pkgcache.h>
-
+#include <apt-pkg/indexfile.h>
+
#ifdef __GNUG__
#pragma interface "apt-pkg/sourcelist.h"
#endif
@@ -33,32 +37,35 @@ class pkgSourceList
{
public:
- /* Each item in the source list, each line can have more than one
- item */
- struct Item
+ // List of supported source list types
+ class Type
{
- enum {Deb, DebSrc} Type;
-
- string URI;
- string Dist;
- string Section;
+ public:
- bool SetType(string S);
- bool SetURI(string S);
- string PackagesURI() const;
- string PackagesInfo() const;
- string ReleaseURI() const;
- string ReleaseInfo() const;
- string SourceInfo(string Pkg,string Ver,string Comp) const;
- string SiteOnly(string URI) const;
- string ArchiveInfo(pkgCache::VerIterator Ver) const;
- string ArchiveURI(string File) const;
+ // Global list of Items supported
+ static Type **GlobalList;
+ static unsigned long GlobalListLen;
+ static Type *GetType(const char *Type);
+
+ const char *Name;
+ const char *Label;
+
+ bool FixupURI(string &URI) const;
+ virtual bool ParseLine(vector<pkgIndexFile *> &List,
+ const char *Buffer,
+ unsigned long CurLine,string File) const;
+ virtual bool CreateItem(vector<pkgIndexFile *> &List,string URI,
+ string Dist,string Section) const = 0;
+
+ Type();
+ virtual ~Type() {};
};
- typedef vector<Item>::const_iterator const_iterator;
+
+ typedef vector<pkgIndexFile *>::const_iterator const_iterator;
protected:
- vector<Item> List;
+ vector<pkgIndexFile *> List;
public:
@@ -71,10 +78,12 @@ class pkgSourceList
inline unsigned int size() const {return List.size();};
inline bool empty() const {return List.empty();};
+ bool FindIndex(pkgCache::PkgFileIterator File,
+ pkgIndexFile *&Found) const;
+ bool GetIndexes(pkgAcquire *Owner) const;
+
pkgSourceList();
pkgSourceList(string File);
};
-ostream &operator <<(ostream &O,pkgSourceList::Item &Itm);
-
#endif
diff --git a/apt-pkg/srcrecords.cc b/apt-pkg/srcrecords.cc
index 6da902d98..b9df32e92 100644
--- a/apt-pkg/srcrecords.cc
+++ b/apt-pkg/srcrecords.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: srcrecords.cc,v 1.3 1999/10/18 04:15:24 jgg Exp $
+// $Id: srcrecords.cc,v 1.4 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Source Package Records - Allows access to source package records
@@ -17,9 +17,10 @@
#include <apt-pkg/srcrecords.h>
#include <apt-pkg/error.h>
-#include <apt-pkg/configuration.h>
+#include <apt-pkg/sourcelist.h>
#include <apt-pkg/strutl.h>
-#include <apt-pkg/debsrcrecords.h>
+
+#include <apti18n.h>
/*}}}*/
// SrcRecords::pkgSrcRecords - Constructor /*{{{*/
@@ -27,46 +28,28 @@
/* Open all the source index files */
pkgSrcRecords::pkgSrcRecords(pkgSourceList &List) : Files(0), Current(0)
{
- pkgSourceList::const_iterator I = List.begin();
-
- // Count how many items we will need
+ Files = new Parser *[List.end() - List.begin() + 1];
+
unsigned int Count = 0;
+ pkgSourceList::const_iterator I = List.begin();
for (; I != List.end(); I++)
- if (I->Type == pkgSourceList::Item::DebSrc)
+ {
+ Files[Count] = (*I)->CreateSrcParser();
+ if (_error->PendingError() == true)
+ return;
+ if (Files[Count] != 0)
Count++;
-
+ }
+ Files[Count] = 0;
+
// Doesnt work without any source index files
if (Count == 0)
{
- _error->Error("Sorry, you must put some 'source' uris"
- " in your sources.list");
+ _error->Error(_("Sorry, you must put some 'source' URIs"
+ " in your sources.list"));
return;
}
- Files = new Parser *[Count+1];
- memset(Files,0,sizeof(*Files)*(Count+1));
-
- // Create the parser objects
- Count = 0;
- string Dir = _config->FindDir("Dir::State::lists");
- for (I = List.begin(); I != List.end(); I++)
- {
- if (I->Type != pkgSourceList::Item::DebSrc)
- continue;
-
- // Open the file
- FileFd *FD = new FileFd(Dir + URItoFileName(I->PackagesURI()),
- FileFd::ReadOnly);
- if (_error->PendingError() == true)
- {
- delete FD;
- return;
- }
-
- Files[Count] = new debSrcRecordParser(FD,I);
- Count++;
- }
-
Restart();
}
/*}}}*/
@@ -81,6 +64,7 @@ pkgSrcRecords::~pkgSrcRecords()
// Blow away all the parser objects
for (unsigned int Count = 0; Files[Count] != 0; Count++)
delete Files[Count];
+ delete [] Files;
}
/*}}}*/
// SrcRecords::Restart - Restart the search /*{{{*/
@@ -136,4 +120,20 @@ pkgSrcRecords::Parser *pkgSrcRecords::Find(const char *Package,bool SrcOnly)
}
}
/*}}}*/
+// Parser::BuildDepType - Convert a build dep to a string /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+const char *pkgSrcRecords::Parser::BuildDepType(unsigned char Type)
+{
+ const char *fields[] = {"Build-Depends",
+ "Build-Depends-Indep",
+ "Build-Conflicts",
+ "Build-Conflicts-Indep"};
+ if (Type < 4)
+ return fields[Type];
+ else
+ return "";
+}
+ /*}}}*/
+
diff --git a/apt-pkg/srcrecords.h b/apt-pkg/srcrecords.h
index 8391349dd..a73499aef 100644
--- a/apt-pkg/srcrecords.h
+++ b/apt-pkg/srcrecords.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: srcrecords.h,v 1.5 1999/10/18 03:44:39 jgg Exp $
+// $Id: srcrecords.h,v 1.6 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Source Package Records - Allows access to source package records
@@ -17,9 +17,11 @@
#pragma interface "apt-pkg/srcrecords.h"
#endif
-#include <apt-pkg/fileutl.h>
-#include <apt-pkg/sourcelist.h>
+#include <string>
+#include <vector>
+class pkgSourceList;
+class pkgIndexFile;
class pkgSrcRecords
{
public:
@@ -30,17 +32,30 @@ class pkgSrcRecords
string MD5Hash;
unsigned long Size;
string Path;
+ string Type;
};
// Abstract parser for each source record
class Parser
{
- FileFd *File;
- pkgSourceList::const_iterator SrcItem;
+ protected:
+
+ const pkgIndexFile *iIndex;
public:
- inline pkgSourceList::const_iterator Source() const {return SrcItem;};
+ enum BuildDep {BuildDepend=0x0,BuildDependIndep=0x1,
+ BuildConflict=0x2,BuildConflictIndep=0x3};
+
+ struct BuildDepRec
+ {
+ string Package;
+ string Version;
+ unsigned int Op;
+ unsigned char Type;
+ };
+
+ inline const pkgIndexFile &Index() const {return *iIndex;};
virtual bool Restart() = 0;
virtual bool Step() = 0;
@@ -48,16 +63,19 @@ class pkgSrcRecords
virtual unsigned long Offset() = 0;
virtual string AsStr() = 0;
- virtual string Package() = 0;
- virtual string Version() = 0;
- virtual string Maintainer() = 0;
- virtual string Section() = 0;
- virtual const char **Binaries() = 0;
+ virtual string Package() const = 0;
+ virtual string Version() const = 0;
+ virtual string Maintainer() const = 0;
+ virtual string Section() const = 0;
+ virtual const char **Binaries() = 0; // Ownership does not transfer
+
+ virtual bool BuildDepends(vector<BuildDepRec> &BuildDeps) = 0;
+ static const char *BuildDepType(unsigned char Type);
+
virtual bool Files(vector<pkgSrcRecords::File> &F) = 0;
- Parser(FileFd *File,pkgSourceList::const_iterator SrcItem) : File(File),
- SrcItem(SrcItem) {};
- virtual ~Parser() {delete File;};
+ Parser(const pkgIndexFile *Index) : iIndex(Index) {};
+ virtual ~Parser() {};
};
private:
@@ -78,5 +96,4 @@ class pkgSrcRecords
~pkgSrcRecords();
};
-
#endif
diff --git a/apt-pkg/tagfile.cc b/apt-pkg/tagfile.cc
index 97e5c244a..5d1144629 100644
--- a/apt-pkg/tagfile.cc
+++ b/apt-pkg/tagfile.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: tagfile.cc,v 1.25 1999/07/03 06:45:40 jgg Exp $
+// $Id: tagfile.cc,v 1.26 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Fast scanner for RFC-822 type header information
@@ -19,6 +19,8 @@
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
+#include <apti18n.h>
+
#include <string>
#include <stdio.h>
/*}}}*/
@@ -26,16 +28,17 @@
// TagFile::pkgTagFile - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-pkgTagFile::pkgTagFile(FileFd &Fd,unsigned long Size) : Fd(Fd), Size(Size)
+pkgTagFile::pkgTagFile(FileFd *pFd,unsigned long Size) : Fd(*pFd), Size(Size)
{
Buffer = new char[Size];
Start = End = Buffer;
Left = Fd.Size();
+ TotalSize = Fd.Size();
iOffset = 0;
Fill();
}
/*}}}*/
-// pkgTagFile::~pkgTagFile - Destructor /*{{{*/
+// TagFile::~pkgTagFile - Destructor /*{{{*/
// ---------------------------------------------------------------------
/* */
pkgTagFile::~pkgTagFile()
@@ -54,10 +57,12 @@ bool pkgTagFile::Step(pkgTagSection &Tag)
return false;
if (Tag.Scan(Start,End - Start) == false)
- return _error->Error("Unable to parse package file %s (1)",Fd.Name().c_str());
+ return _error->Error(_("Unable to parse package file %s (1)"),Fd.Name().c_str());
}
Start += Tag.size();
iOffset += Tag.size();
+
+ Tag.Trim();
return true;
}
@@ -118,8 +123,18 @@ bool pkgTagFile::Fill()
that is there */
bool pkgTagFile::Jump(pkgTagSection &Tag,unsigned long Offset)
{
+ // We are within a buffer space of the next hit..
+ if (Offset >= iOffset && iOffset + (End - Start) > Offset)
+ {
+ unsigned long Dist = Offset - iOffset;
+ Start += Dist;
+ iOffset += Dist;
+ return Step(Tag);
+ }
+
+ // Reposition and reload..
iOffset = Offset;
- Left = Fd.Size() - Offset;
+ Left = TotalSize - Offset;
if (Fd.Seek(Offset) == false)
return false;
End = Start = Buffer;
@@ -135,10 +150,7 @@ bool pkgTagFile::Jump(pkgTagSection &Tag,unsigned long Offset)
return false;
if (Tag.Scan(Start,End - Start) == false)
- {
- cout << string(Start,End) << endl;
- return _error->Error("Unable to parse package file %s (2)",Fd.Name().c_str());
- }
+ return _error->Error(_("Unable to parse package file %s (2)"),Fd.Name().c_str());
return true;
}
@@ -148,6 +160,14 @@ bool pkgTagFile::Jump(pkgTagSection &Tag,unsigned long Offset)
/* This looks for the first double new line in the data stream. It also
indexes the tags in the section. This very simple hash function for the
first 3 letters gives very good performance on the debian package files */
+inline static unsigned long AlphaHash(const char *Text, const char *End = 0)
+{
+ unsigned long Res = 0;
+ for (; Text != End && *Text != ':' && *Text != 0; Text++)
+ Res = (unsigned long)(*Text) ^ (Res << 2);
+ return Res & 0xFF;
+}
+
bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength)
{
const char *End = Start + MaxLength;
@@ -164,10 +184,7 @@ bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength)
if (isspace(Stop[0]) == 0)
{
Indexes[TagCount++] = Stop - Section;
- unsigned char A = tolower(Stop[0]) - 'a';
- unsigned char B = tolower(Stop[1]) - 'a';
- unsigned char C = tolower(Stop[3]) - 'a';
- AlphaIndexes[((A + C/3)%26) + 26*((B + C/2)%26)] = TagCount;
+ AlphaIndexes[AlphaHash(Stop,End)] = TagCount;
}
Stop = (const char *)memchr(Stop,'\n',End - Stop);
@@ -191,17 +208,21 @@ bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength)
return false;
}
/*}}}*/
+// TagSection::Trim - Trim off any trailing garbage /*{{{*/
+// ---------------------------------------------------------------------
+/* There should be exactly 1 newline at the end of the buffer, no more. */
+void pkgTagSection::Trim()
+{
+ for (; Stop > Section + 2 && (Stop[-2] == '\n' || Stop[-2] == '\r'); Stop--);
+}
+ /*}}}*/
// TagSection::Find - Locate a tag /*{{{*/
// ---------------------------------------------------------------------
/* This searches the section for a tag that matches the given string. */
-bool pkgTagSection::Find(const char *Tag,const char *&Start,
- const char *&End)
+bool pkgTagSection::Find(const char *Tag,unsigned &Pos) const
{
unsigned int Length = strlen(Tag);
- unsigned char A = tolower(Tag[0]) - 'a';
- unsigned char B = tolower(Tag[1]) - 'a';
- unsigned char C = tolower(Tag[3]) - 'a';
- unsigned int I = AlphaIndexes[((A + C/3)%26) + 26*((B + C/2)%26)];
+ unsigned int I = AlphaIndexes[AlphaHash(Tag)];
if (I == 0)
return false;
I--;
@@ -219,6 +240,39 @@ bool pkgTagSection::Find(const char *Tag,const char *&Start,
for (; isspace(*C) != 0; C++);
if (*C != ':')
continue;
+ Pos = I;
+ return true;
+ }
+
+ Pos = 0;
+ return false;
+}
+ /*}}}*/
+// TagSection::Find - Locate a tag /*{{{*/
+// ---------------------------------------------------------------------
+/* This searches the section for a tag that matches the given string. */
+bool pkgTagSection::Find(const char *Tag,const char *&Start,
+ const char *&End) const
+{
+ unsigned int Length = strlen(Tag);
+ unsigned int I = AlphaIndexes[AlphaHash(Tag)];
+ if (I == 0)
+ return false;
+ I--;
+
+ for (unsigned int Counter = 0; Counter != TagCount; Counter++,
+ I = (I+1)%TagCount)
+ {
+ const char *St;
+ St = Section + Indexes[I];
+ if (strncasecmp(Tag,St,Length) != 0)
+ continue;
+
+ // Make sure the colon is in the right place
+ const char *C = St + Length;
+ for (; isspace(*C) != 0; C++);
+ if (*C != ':')
+ continue;
// Strip off the gunk from the start end
Start = C;
@@ -239,7 +293,7 @@ bool pkgTagSection::Find(const char *Tag,const char *&Start,
// TagSection::FindS - Find a string /*{{{*/
// ---------------------------------------------------------------------
/* */
-string pkgTagSection::FindS(const char *Tag)
+string pkgTagSection::FindS(const char *Tag) const
{
const char *Start;
const char *End;
@@ -251,7 +305,7 @@ string pkgTagSection::FindS(const char *Tag)
// TagSection::FindI - Find an integer /*{{{*/
// ---------------------------------------------------------------------
/* */
-signed int pkgTagSection::FindI(const char *Tag,signed long Default)
+signed int pkgTagSection::FindI(const char *Tag,signed long Default) const
{
const char *Start;
const char *Stop;
@@ -276,7 +330,7 @@ signed int pkgTagSection::FindI(const char *Tag,signed long Default)
// ---------------------------------------------------------------------
/* The bits marked in Flag are masked on/off in Flags */
bool pkgTagSection::FindFlag(const char *Tag,unsigned long &Flags,
- unsigned long Flag)
+ unsigned long Flag) const
{
const char *Start;
const char *Stop;
@@ -294,9 +348,191 @@ bool pkgTagSection::FindFlag(const char *Tag,unsigned long &Flags,
return true;
default:
- _error->Warning("Unknown flag value");
+ _error->Warning("Unknown flag value: %s",string(Start,Stop).c_str());
return true;
}
return true;
}
/*}}}*/
+
+// TFRewrite - Rewrite a control record /*{{{*/
+// ---------------------------------------------------------------------
+/* This writes the control record to stdout rewriting it as necessary. The
+ override map item specificies the rewriting rules to follow. This also
+ takes the time to sort the feild list. */
+
+/* The order of this list is taken from dpkg source lib/parse.c the fieldinfos
+ array. */
+static const char *iTFRewritePackageOrder[] = {
+ "Package",
+ "Essential",
+ "Status",
+ "Priority",
+ "Section",
+ "Installed-Size",
+ "Maintainer",
+ "Architecture",
+ "Source",
+ "Version",
+ "Revision", // Obsolete
+ "Config-Version", // Obsolete
+ "Replaces",
+ "Provides",
+ "Depends",
+ "Pre-Depends",
+ "Recommends",
+ "Suggests",
+ "Conflicts",
+ "Conffiles",
+ "Filename",
+ "Size",
+ "MD5Sum",
+ "MSDOS-Filename", // Obsolete
+ "Description",
+ 0};
+static const char *iTFRewriteSourceOrder[] = {"Package",
+ "Source",
+ "Binary",
+ "Version",
+ "Priority",
+ "Section",
+ "Maintainer",
+ "Build-Depends",
+ "Build-Depends-Indep",
+ "Build-Conflicts",
+ "Build-Conflicts-Indep",
+ "Architecture",
+ "Standards-Version",
+ "Format",
+ "Directory",
+ "Files",
+ 0};
+
+/* Two levels of initialization are used because gcc will set the symbol
+ size of an array to the length of the array, causing dynamic relinking
+ errors. Doing this makes the symbol size constant */
+const char **TFRewritePackageOrder = iTFRewritePackageOrder;
+const char **TFRewriteSourceOrder = iTFRewriteSourceOrder;
+
+bool TFRewrite(FILE *Output,pkgTagSection const &Tags,const char *Order[],
+ TFRewriteData *Rewrite)
+{
+ unsigned char Visited[256]; // Bit 1 is Order, Bit 2 is Rewrite
+ for (unsigned I = 0; I != 256; I++)
+ Visited[I] = 0;
+
+ // Set new tag up as necessary.
+ for (unsigned int J = 0; Rewrite != 0 && Rewrite[J].Tag != 0; J++)
+ {
+ if (Rewrite[J].NewTag == 0)
+ Rewrite[J].NewTag = Rewrite[J].Tag;
+ }
+
+ // Write all all of the tags, in order.
+ for (unsigned int I = 0; Order[I] != 0; I++)
+ {
+ bool Rewritten = false;
+
+ // See if this is a field that needs to be rewritten
+ for (unsigned int J = 0; Rewrite != 0 && Rewrite[J].Tag != 0; J++)
+ {
+ if (strcasecmp(Rewrite[J].Tag,Order[I]) == 0)
+ {
+ Visited[J] |= 2;
+ if (Rewrite[J].Rewrite != 0 && Rewrite[J].Rewrite[0] != 0)
+ {
+ if (isspace(Rewrite[J].Rewrite[0]))
+ fprintf(Output,"%s:%s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite);
+ else
+ fprintf(Output,"%s: %s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite);
+ }
+
+ Rewritten = true;
+ break;
+ }
+ }
+
+ // See if it is in the fragment
+ unsigned Pos;
+ if (Tags.Find(Order[I],Pos) == false)
+ continue;
+ Visited[Pos] |= 1;
+
+ if (Rewritten == true)
+ continue;
+
+ /* Write out this element, taking a moment to rewrite the tag
+ in case of changes of case. */
+ const char *Start;
+ const char *Stop;
+ Tags.Get(Start,Stop,Pos);
+
+ if (fputs(Order[I],Output) < 0)
+ return _error->Errno("fputs","IO Error to output");
+ Start += strlen(Order[I]);
+ if (fwrite(Start,Stop - Start,1,Output) != 1)
+ return _error->Errno("fwrite","IO Error to output");
+ if (Stop[-1] != '\n')
+ fprintf(Output,"\n");
+ }
+
+ // Now write all the old tags that were missed.
+ for (unsigned int I = 0; I != Tags.Count(); I++)
+ {
+ if ((Visited[I] & 1) == 1)
+ continue;
+
+ const char *Start;
+ const char *Stop;
+ Tags.Get(Start,Stop,I);
+ const char *End = Start;
+ for (; End < Stop && *End != ':'; End++);
+
+ // See if this is a field that needs to be rewritten
+ bool Rewritten = false;
+ for (unsigned int J = 0; Rewrite != 0 && Rewrite[J].Tag != 0; J++)
+ {
+ if (stringcasecmp(Start,End,Rewrite[J].Tag) == 0)
+ {
+ Visited[J] |= 2;
+ if (Rewrite[J].Rewrite != 0 && Rewrite[J].Rewrite[0] != 0)
+ {
+ if (isspace(Rewrite[J].Rewrite[0]))
+ fprintf(Output,"%s:%s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite);
+ else
+ fprintf(Output,"%s: %s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite);
+ }
+
+ Rewritten = true;
+ break;
+ }
+ }
+
+ if (Rewritten == true)
+ continue;
+
+ // Write out this element
+ if (fwrite(Start,Stop - Start,1,Output) != 1)
+ return _error->Errno("fwrite","IO Error to output");
+ if (Stop[-1] != '\n')
+ fprintf(Output,"\n");
+ }
+
+ // Now write all the rewrites that were missed
+ for (unsigned int J = 0; Rewrite != 0 && Rewrite[J].Tag != 0; J++)
+ {
+ if ((Visited[J] & 2) == 2)
+ continue;
+
+ if (Rewrite[J].Rewrite != 0 && Rewrite[J].Rewrite[0] != 0)
+ {
+ if (isspace(Rewrite[J].Rewrite[0]))
+ fprintf(Output,"%s:%s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite);
+ else
+ fprintf(Output,"%s: %s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite);
+ }
+ }
+
+ return true;
+}
+ /*}}}*/
diff --git a/apt-pkg/tagfile.h b/apt-pkg/tagfile.h
index 84998629d..948adb426 100644
--- a/apt-pkg/tagfile.h
+++ b/apt-pkg/tagfile.h
@@ -1,12 +1,12 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: tagfile.h,v 1.14 1999/07/03 06:45:40 jgg Exp $
+// $Id: tagfile.h,v 1.15 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Fast scanner for RFC-822 type header information
This parser handles Debian package files (and others). Their form is
- RFC-822 type header fields in groups seperated by a blank line.
+ RFC-822 type header fields in groups separated by a blank line.
The parser reads the file and provides methods to step linearly
over it or to jump to a pre-recorded start point and read that record.
@@ -17,7 +17,6 @@
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_TAGFILE_H
#define PKGLIB_TAGFILE_H
@@ -26,7 +25,8 @@
#endif
#include <apt-pkg/fileutl.h>
-
+#include <stdio.h>
+
class pkgTagSection
{
const char *Section;
@@ -34,7 +34,7 @@ class pkgTagSection
// We have a limit of 256 tags per section.
unsigned short Indexes[256];
- unsigned short AlphaIndexes[26 + 26*26];
+ unsigned short AlphaIndexes[0xff];
unsigned int TagCount;
@@ -43,19 +43,21 @@ class pkgTagSection
inline bool operator ==(const pkgTagSection &rhs) {return Section == rhs.Section;};
inline bool operator !=(const pkgTagSection &rhs) {return Section != rhs.Section;};
- bool Find(const char *Tag,const char *&Start, const char *&End);
- string FindS(const char *Tag);
- signed int FindI(const char *Tag,signed long Default = 0);
+ bool Find(const char *Tag,const char *&Start, const char *&End) const;
+ bool Find(const char *Tag,unsigned &Pos) const;
+ string FindS(const char *Tag) const;
+ signed int FindI(const char *Tag,signed long Default = 0) const ;
bool pkgTagSection::FindFlag(const char *Tag,unsigned long &Flags,
- unsigned long Flag);
+ unsigned long Flag) const;
bool Scan(const char *Start,unsigned long MaxLength);
- inline unsigned long size() {return Stop - Section;};
-
- inline unsigned int Count() {return TagCount;};
- inline void Get(const char *&Start,const char *&Stop,unsigned int I)
+ inline unsigned long size() const {return Stop - Section;};
+ void Trim();
+
+ inline unsigned int Count() const {return TagCount;};
+ inline void Get(const char *&Start,const char *&Stop,unsigned int I) const
{Start = Section + Indexes[I]; Stop = Section + Indexes[I+1];}
- inline void GetSection(const char *&Start,const char *&Stop)
+ inline void GetSection(const char *&Start,const char *&Stop) const
{
Start = Section;
Stop = this->Stop;
@@ -73,6 +75,7 @@ class pkgTagFile
unsigned long Left;
unsigned long iOffset;
unsigned long Size;
+ unsigned long TotalSize;
bool Fill();
@@ -82,8 +85,24 @@ class pkgTagFile
inline unsigned long Offset() {return iOffset;};
bool Jump(pkgTagSection &Tag,unsigned long Offset);
- pkgTagFile(FileFd &F,unsigned long Size = 32*1024);
+ pkgTagFile(FileFd *F,unsigned long Size = 32*1024);
~pkgTagFile();
};
+/* This is the list of things to rewrite. The rewriter
+ goes through and changes or adds each of these headers
+ to suit. A zero forces the header to be erased, an empty string
+ causes the old value to be used. (rewrite rule ignored) */
+struct TFRewriteData
+{
+ const char *Tag;
+ const char *Rewrite;
+ const char *NewTag;
+};
+extern const char **TFRewritePackageOrder;
+extern const char **TFRewriteSourceOrder;
+
+bool TFRewrite(FILE *Output,pkgTagSection const &Tags,const char *Order[],
+ TFRewriteData *Rewrite);
+
#endif
diff --git a/apt-pkg/version.cc b/apt-pkg/version.cc
index 4aad581f8..db340c11f 100644
--- a/apt-pkg/version.cc
+++ b/apt-pkg/version.cc
@@ -1,18 +1,9 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: version.cc,v 1.9 1999/04/19 06:03:09 jgg Exp $
+// $Id: version.cc,v 1.10 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
- Version - Version string
-
- Version comparing is done using the == and < operators. STL's
- function.h provides the remaining set of comparitors. A directly
- callable non-string class version is provided for functions manipulating
- the cache file (esp the sort function).
-
- A version is defined to be equal if a case sensitive compare returns
- that the two strings are the same. For compatibility with the QSort
- function this version returns -1,0,1.
+ Version - Versioning system..
##################################################################### */
/*}}}*/
@@ -26,246 +17,28 @@
#include <stdlib.h>
/*}}}*/
+
+static pkgVersioningSystem *VSList[10];
+pkgVersioningSystem **pkgVersioningSystem::GlobalList = VSList;
+unsigned long pkgVersioningSystem::GlobalListLen = 0;
-// StrToLong - Convert the string between two iterators to a long /*{{{*/
+// pkgVS::pkgVersioningSystem - Constructor /*{{{*/
// ---------------------------------------------------------------------
-/* */
-static unsigned long StrToLong(const char *begin,const char *end)
+/* Link to the global list of versioning systems supported */
+pkgVersioningSystem::pkgVersioningSystem()
{
- char S[40];
- char *I = S;
- for (; begin != end && I < S + 40;)
- *I++ = *begin++;
- *I = 0;
- return strtoul(S,0,10);
+ VSList[GlobalListLen] = this;
+ GlobalListLen++;
}
/*}}}*/
-// VersionCompare (op) - Greater than comparison for versions /*{{{*/
+// pkgVS::GetVS - Find a VS by name /*{{{*/
// ---------------------------------------------------------------------
/* */
-int pkgVersionCompare(const char *A, const char *B)
-{
- return pkgVersionCompare(A,A + strlen(A),B,B + strlen(B));
-}
-int pkgVersionCompare(string A,string B)
-{
- return pkgVersionCompare(A.begin(),A.end(),B.begin(),B.end());
-}
-
- /*}}}*/
-// iVersionCompare - Compare versions /*{{{*/
-// ---------------------------------------------------------------------
-/* This compares a fragment of the version. */
-static int iVersionCompare(const char *A, const char *AEnd, const char *B,
- const char *BEnd)
-{
- if (A >= AEnd && B >= BEnd)
- return 0;
- if (A >= AEnd)
- return -1;
- if (B >= BEnd)
- return 1;
-
- /* Iterate over the whole string
- What this does is to spilt the whole string into groups of
- numeric and non numeric portions. For instance:
- a67bhgs89
- Has 4 portions 'a', '67', 'bhgs', '89'. A more normal:
- 2.7.2-linux-1
- Has '2', '.', '7', '.' ,'-linux-','1' */
- const char *lhs = A;
- const char *rhs = B;
- while (lhs != AEnd && rhs != BEnd)
- {
- // Starting points
- const char *Slhs = lhs;
- const char *Srhs = rhs;
-
- // Compute ending points were we have passed over the portion
- bool Digit = (isdigit(*lhs) > 0?true:false);
- for (;lhs != AEnd && (isdigit(*lhs) > 0?true:false) == Digit; lhs++);
- for (;rhs != BEnd && (isdigit(*rhs) > 0?true:false) == Digit; rhs++);
-
- if (Digit == true)
- {
- // If the lhs has a digit and the rhs does not then <
- if (rhs - Srhs == 0)
- return -1;
-
- // Generate integers from the strings.
- unsigned long Ilhs = StrToLong(Slhs,lhs);
- unsigned long Irhs = StrToLong(Srhs,rhs);
- if (Ilhs != Irhs)
- {
- if (Ilhs > Irhs)
- return 1;
- return -1;
- }
- }
- else
- {
- // They are equal length so do a straight text compare
- for (;Slhs != lhs && Srhs != rhs; Slhs++, Srhs++)
- {
- if (*Slhs != *Srhs)
- {
- /* We need to compare non alpha chars as higher than alpha
- chars (a < !) */
- int lc = *Slhs;
- int rc = *Srhs;
- if (isalpha(lc) == 0) lc += 256;
- if (isalpha(rc) == 0) rc += 256;
- if (lc > rc)
- return 1;
- return -1;
- }
- }
-
- // If the lhs is shorter than the right it is 'less'
- if (lhs - Slhs < rhs - Srhs)
- return -1;
-
- // If the lhs is longer than the right it is 'more'
- if (lhs - Slhs > rhs - Srhs)
- return 1;
- }
- }
-
- // The strings must be equal
- if (lhs == AEnd && rhs == BEnd)
- return 0;
-
- // lhs is shorter
- if (lhs == AEnd)
- return -1;
-
- // rhs is shorter
- if (rhs == BEnd)
- return 1;
-
- // Shouldnt happen
- return 1;
-}
- /*}}}*/
-// VersionCompare - Comparison for versions /*{{{*/
-// ---------------------------------------------------------------------
-/* This fragments the version into E:V-R triples and compares each
- portion seperately. */
-int pkgVersionCompare(const char *A, const char *AEnd, const char *B,
- const char *BEnd)
+pkgVersioningSystem *pkgVersioningSystem::GetVS(const char *Label)
{
- // Strip off the epoch and compare it
- const char *lhs = A;
- const char *rhs = B;
- for (;lhs != AEnd && *lhs != ':'; lhs++);
- for (;rhs != BEnd && *rhs != ':'; rhs++);
- if (lhs == AEnd)
- lhs = A;
- if (rhs == BEnd)
- rhs = B;
-
- // Compare the epoch
- int Res = iVersionCompare(A,lhs,B,rhs);
- if (Res != 0)
- return Res;
-
- // Skip the :
- if (lhs != A)
- lhs++;
- if (rhs != B)
- rhs++;
-
- // Find the last -
- const char *dlhs = AEnd-1;
- const char *drhs = BEnd-1;
- for (;dlhs > lhs && *dlhs != '-'; dlhs--);
- for (;drhs > rhs && *drhs != '-'; drhs--);
-
- if (dlhs == lhs)
- dlhs = AEnd;
- if (drhs == rhs)
- drhs = BEnd;
-
- // Compare the main version
- Res = iVersionCompare(lhs,dlhs,rhs,drhs);
- if (Res != 0)
- return Res;
-
- // Skip the -
- if (dlhs != lhs)
- dlhs++;
- if (drhs != rhs)
- drhs++;
- return iVersionCompare(dlhs,AEnd,drhs,BEnd);
-}
- /*}}}*/
-// CheckDep - Check a single dependency /*{{{*/
-// ---------------------------------------------------------------------
-/* This simply preforms the version comparison and switch based on
- operator. */
-bool pkgCheckDep(const char *DepVer,const char *PkgVer,int Op)
-{
- if (DepVer == 0)
- return true;
- if (PkgVer == 0)
- return false;
-
- // Perform the actuall comparision.
- int Res = pkgVersionCompare(PkgVer,DepVer);
- switch (Op & 0x0F)
- {
- case pkgCache::Dep::LessEq:
- if (Res <= 0)
- return true;
- break;
-
- case pkgCache::Dep::GreaterEq:
- if (Res >= 0)
- return true;
- break;
-
- case pkgCache::Dep::Less:
- if (Res < 0)
- return true;
- break;
-
- case pkgCache::Dep::Greater:
- if (Res > 0)
- return true;
- break;
-
- case pkgCache::Dep::Equals:
- if (Res == 0)
- return true;
- break;
-
- case pkgCache::Dep::NotEquals:
- if (Res != 0)
- return true;
- break;
- }
-
- return false;
-}
- /*}}}*/
-// BaseVersion - Return the upstream version string /*{{{*/
-// ---------------------------------------------------------------------
-/* This strips all the debian specific information from the version number */
-string pkgBaseVersion(const char *Ver)
-{
- // Strip off the bit before the first colon
- const char *I = Ver;
- for (; *I != 0 && *I != ':'; I++);
- if (*I == ':')
- Ver = I + 1;
-
- // Chop off the trailing -
- I = Ver;
- unsigned Last = strlen(Ver);
- for (; *I != 0; I++)
- if (*I == '-')
- Last = I - Ver;
-
- return string(Ver,Last);
+ for (unsigned I = 0; I != GlobalListLen; I++)
+ if (strcmp(VSList[I]->Label,Label) == 0)
+ return VSList[I];
+ return 0;
}
/*}}}*/
diff --git a/apt-pkg/version.h b/apt-pkg/version.h
index 127519583..27e8e1f1b 100644
--- a/apt-pkg/version.h
+++ b/apt-pkg/version.h
@@ -1,16 +1,22 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: version.h,v 1.5 1999/04/19 06:03:09 jgg Exp $
+// $Id: version.h,v 1.6 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
- Version - Version comparison routines
+ Version - Versioning system..
+
+ The versioning system represents how versions are compared, represented
+ and how dependencies are evaluated. As a general rule versioning
+ systems are not compatible unless specifically allowed by the
+ TestCompatibility query.
- These routines provide some means to compare versions and check
- dependencies.
+ The versions are stored in a global list of versions, but that is just
+ so that they can be queried when someone does 'apt-get -v'.
+ pkgSystem provides the proper means to access the VS for the active
+ system.
##################################################################### */
/*}}}*/
-// Header section: pkglib
#ifndef PKGLIB_VERSION_H
#define PKGLIB_VERSION_H
@@ -20,11 +26,52 @@
#include <string>
-int pkgVersionCompare(const char *A, const char *B);
-int pkgVersionCompare(const char *A, const char *AEnd, const char *B,
- const char *BEnd);
-int pkgVersionCompare(string A,string B);
-bool pkgCheckDep(const char *DepVer,const char *PkgVer,int Op);
-string pkgBaseVersion(const char *Ver);
+class pkgVersioningSystem
+{
+ public:
+ // Global list of VS's
+ static pkgVersioningSystem **GlobalList;
+ static unsigned long GlobalListLen;
+ static pkgVersioningSystem *GetVS(const char *Label);
+
+ const char *Label;
+
+ // Compare versions..
+ virtual int DoCmpVersion(const char *A,const char *Aend,
+ const char *B,const char *Bend) = 0;
+ virtual bool CheckDep(const char *PkgVer,int Op,const char *DepVer) = 0;
+ virtual int DoCmpReleaseVer(const char *A,const char *Aend,
+ const char *B,const char *Bend) = 0;
+ virtual string UpstreamVersion(const char *A) = 0;
+
+ // See if the given VS is compatible with this one..
+ virtual bool TestCompatibility(pkgVersioningSystem const &Against)
+ {return this == &Against;};
+
+ // Shortcuts
+ inline int CmpVersion(const char *A, const char *B)
+ {
+ return DoCmpVersion(A,A+strlen(A),B,B+strlen(B));
+ };
+ inline int CmpVersion(string A,string B)
+ {
+ return DoCmpVersion(A.begin(),A.end(),B.begin(),B.end());
+ };
+ inline int CmpReleaseVer(const char *A, const char *B)
+ {
+ return DoCmpReleaseVer(A,A+strlen(A),B,B+strlen(B));
+ };
+ inline int CmpReleaseVer(string A,string B)
+ {
+ return DoCmpReleaseVer(A.begin(),A.end(),B.begin(),B.end());
+ };
+
+ pkgVersioningSystem();
+ virtual ~pkgVersioningSystem() {};
+};
+
+#ifdef APT_COMPATIBILITY
+#include <apt-pkg/debversion.h>
+#endif
#endif
diff --git a/apt-pkg/versionmatch.cc b/apt-pkg/versionmatch.cc
new file mode 100644
index 000000000..45cdb117e
--- /dev/null
+++ b/apt-pkg/versionmatch.cc
@@ -0,0 +1,210 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: versionmatch.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Version Matching
+
+ This module takes a matching string and a type and locates the version
+ record that satisfies the constraint described by the matching string.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-pkg/versionmatch.h"
+#endif
+#include <apt-pkg/versionmatch.h>
+
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/error.h>
+
+#include <stdio.h>
+ /*}}}*/
+
+// VersionMatch::pkgVersionMatch - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Break up the data string according to the selected type */
+pkgVersionMatch::pkgVersionMatch(string Data,MatchType Type) : Type(Type)
+{
+ if (Type == None || Data.length() < 1)
+ return;
+
+ // Cut up the version representation
+ if (Type == Version)
+ {
+ if (Data.end()[-1] == '*')
+ {
+ VerPrefixMatch = true;
+ VerStr = string(Data.begin(),Data.end()-1);
+ }
+ else
+ VerStr = Data;
+ return;
+ }
+
+ if (Type == Release)
+ {
+ // All empty = match all
+ if (Data == "*")
+ return;
+
+ // Are we a simple specification?
+ const char *I = Data.begin();
+ for (; I < Data.end() && *I != '='; I++);
+ if (I == Data.end())
+ {
+ // Temporary
+ if (isdigit(Data[0]))
+ RelVerStr = Data;
+ else
+ RelArchive = Data;
+
+ if (RelVerStr.end()[-1] == '*')
+ {
+ RelVerPrefixMatch = true;
+ RelVerStr = string(RelVerStr.begin(),RelVerStr.end()-1);
+ }
+ return;
+ }
+
+ char Spec[300];
+ char *Fragments[20];
+ snprintf(Spec,sizeof(Spec),"%s",Data.c_str());
+ if (TokSplitString(',',Spec,Fragments,
+ sizeof(Fragments)/sizeof(Fragments[0])) == false)
+ {
+ Type = None;
+ return;
+ }
+
+ for (unsigned J = 0; Fragments[J] != 0; J++)
+ {
+ if (strlen(Fragments[J]) < 3)
+ continue;
+
+ if (stringcasecmp(Fragments[J],Fragments[J]+2,"v=") == 0)
+ RelVerStr = Fragments[J]+2;
+ else if (stringcasecmp(Fragments[J],Fragments[J]+2,"o=") == 0)
+ RelOrigin = Fragments[J]+2;
+ else if (stringcasecmp(Fragments[J],Fragments[J]+2,"a=") == 0)
+ RelArchive = Fragments[J]+2;
+ else if (stringcasecmp(Fragments[J],Fragments[J]+2,"l=") == 0)
+ RelLabel = Fragments[J]+2;
+ else if (stringcasecmp(Fragments[J],Fragments[J]+2,"c=") == 0)
+ RelComponent = Fragments[J]+2;
+ }
+
+ if (RelVerStr.end()[-1] == '*')
+ {
+ RelVerPrefixMatch = true;
+ RelVerStr = string(RelVerStr.begin(),RelVerStr.end()-1);
+ }
+ return;
+ }
+
+ if (Type == Origin)
+ {
+ OrSite = Data;
+ return;
+ }
+}
+ /*}}}*/
+// VersionMatch::MatchVer - Match a version string with prefixing /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgVersionMatch::MatchVer(const char *A,string B,bool Prefix)
+{
+ const char *Ab = A;
+ const char *Ae = Ab + strlen(A);
+
+ // Strings are not a compatible size.
+ if ((unsigned)(Ae - Ab) != B.length() && Prefix == false ||
+ (unsigned)(Ae - Ab) < B.length())
+ return false;
+
+ // Match (leading?)
+ if (stringcasecmp(B.begin(),B.end(),
+ Ab,Ab + B.length()) == 0)
+ return true;
+
+ return false;
+}
+ /*}}}*/
+// VersionMatch::Find - Locate the best match for the select type /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgCache::VerIterator pkgVersionMatch::Find(pkgCache::PkgIterator Pkg)
+{
+ pkgCache::VerIterator Ver = Pkg.VersionList();
+ for (; Ver.end() == false; Ver++)
+ {
+ if (Type == Version)
+ {
+ if (MatchVer(Ver.VerStr(),VerStr,VerPrefixMatch) == true)
+ return Ver;
+ continue;
+ }
+
+ for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; VF++)
+ if (FileMatch(VF.File()) == true)
+ return Ver;
+ }
+
+ // This will be Ended by now.
+ return Ver;
+}
+ /*}}}*/
+// VersionMatch::FileMatch - Match against an index file /*{{{*/
+// ---------------------------------------------------------------------
+/* This matcher checks against the release file and the origin location
+ to see if the constraints are met. */
+bool pkgVersionMatch::FileMatch(pkgCache::PkgFileIterator File)
+{
+ if (Type == Release)
+ {
+/* cout << RelVerStr << ',' << RelOrigin << ',' << RelArchive << ',' << RelLabel << endl;
+ cout << File.Version() << ',' << File.Origin() << ',' << File.Archive() << ',' << File.Label() << endl;
+*/
+ if (RelVerStr.empty() == true && RelOrigin.empty() == true &&
+ RelArchive.empty() == true && RelLabel.empty() == true &&
+ RelComponent.empty() == true)
+ return false;
+
+ if (RelVerStr.empty() == false)
+ if (File->Version == 0 ||
+ MatchVer(File.Version(),RelVerStr,RelVerPrefixMatch) == false)
+ return false;
+ if (RelOrigin.empty() == false)
+ if (File->Origin == 0 ||
+ stringcasecmp(RelOrigin,File.Origin()) != 0)
+ return false;
+ if (RelArchive.empty() == false)
+ {
+ if (File->Archive == 0 ||
+ stringcasecmp(RelArchive,File.Archive()) != 0)
+ return false;
+ }
+ if (RelLabel.empty() == false)
+ if (File->Label == 0 ||
+ stringcasecmp(RelLabel,File.Label()) != 0)
+ return false;
+ if (RelComponent.empty() == false)
+ if (File->Component == 0 ||
+ stringcasecmp(RelLabel,File.Component()) != 0)
+ return false;
+ return true;
+ }
+
+ if (Type == Origin)
+ {
+ if (OrSite.empty() == false)
+ if (File->Site == 0 ||
+ OrSite != File.Site())
+ return false;
+ return true;
+ }
+
+ return false;
+}
+ /*}}}*/
diff --git a/apt-pkg/versionmatch.h b/apt-pkg/versionmatch.h
new file mode 100644
index 000000000..f8f236a2f
--- /dev/null
+++ b/apt-pkg/versionmatch.h
@@ -0,0 +1,69 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: versionmatch.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ Version Matching
+
+ This module takes a matching string and a type and locates the version
+ record that satisfies the constraint described by the matching string.
+
+ Version: 1.2*
+ Release: o=Debian,v=2.1*,c=main
+ Release: v=2.1*
+ Release: *
+ Origin: ftp.debian.org
+
+ Release may be a complex type that can specify matches for any of:
+ Version (v= with prefix)
+ Origin (o=)
+ Archive (a=)
+ Label (l=)
+ Component (c=)
+ If there are no equals signs in the string then it is scanned in short
+ form - if it starts with a number it is Version otherwise it is an
+ Archive.
+
+ Release may be a '*' to match all releases.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef PKGLIB_VERSIONMATCH_H
+#define PKGLIB_VERSIONMATCH_H
+
+#ifdef __GNUG__
+#pragma interface "apt-pkg/versionmatch.h"
+#endif
+
+#include <string>
+#include <apt-pkg/pkgcache.h>
+
+class pkgVersionMatch
+{
+ // Version Matching
+ string VerStr;
+ bool VerPrefixMatch;
+
+ // Release Matching
+ string RelVerStr;
+ bool RelVerPrefixMatch;
+ string RelOrigin;
+ string RelArchive;
+ string RelLabel;
+ string RelComponent;
+
+ // Origin Matching
+ string OrSite;
+
+ public:
+
+ enum MatchType {None = 0,Version,Release,Origin} Type;
+
+ bool MatchVer(const char *A,string B,bool Prefix);
+ bool FileMatch(pkgCache::PkgFileIterator File);
+ pkgCache::VerIterator Find(pkgCache::PkgIterator Pkg);
+
+ pkgVersionMatch(string Data,MatchType Type);
+};
+
+#endif
diff --git a/buildlib/apti18n.h.in b/buildlib/apti18n.h.in
new file mode 100644
index 000000000..ad7f2b585
--- /dev/null
+++ b/buildlib/apti18n.h.in
@@ -0,0 +1 @@
+#define _(x) x
diff --git a/buildlib/archtable b/buildlib/archtable
index a5fdb646a..dc511fb31 100644
--- a/buildlib/archtable
+++ b/buildlib/archtable
@@ -1,34 +1,24 @@
# This file contains a table of known architecture strings, with
-# things to map them to. `configure' will take the output of gcc
-# --print-libgcc-file-name, strip off leading directories up to and
-# including gcc-lib, strip off trailing /libgcc.a and trailing version
-# number directory, and then strip off everything after the first
-# hyphen. The idea is that you're left with this bit:
-# $ gcc --print-libgcc-file-name
-# /usr/lib/gcc-lib/i486-linux/2.7.2/libgcc.a
-# ^^^^
-# This is then looked up in the table below, to find out what to map
-# it to. If it isn't found then configure will print a warning and
-# continue. You can override configure's ideas using --with-arch.
-# The third field is the GNU configure architecture to use with
-# this build architecture.
-#
-# This file is mirrored from dpkg.
-#
+# things to map them to. `configure' will take the output of the
+# autoconf cannon macros and look in here. This only deals with architecture
+# (CPU) names.
-i386 i386 i486
-i486 i386 i486
-i586 i386 i486
-i686 i386 i486
-pentium i386 i486
-sparc sparc sparc
-sparc64 sparc64 sparc64
-alpha alpha alpha
-m68k m68k m68k
-arm arm arm
-armv4l arm arm
-powerpc powerpc powerpc
-ppc powerpc powerpc
-mipsel mipsel mipsel
-mips mips mips
-hppa1.1 hppa hppa
+# The left side is a regex for awk
+
+i.86 i386
+pentium i386
+sparc sparc
+sparc64 sparc
+alpha.* alpha
+m68k m68k
+arm.* arm
+powerpc powerpc
+ppc powerpc
+mipsel mipsel
+mipseb mips
+mips mips
+sheb sheb
+shel sh
+sh sh
+hppa.* hppa
+ia64 ia64
diff --git a/buildlib/config.guess b/buildlib/config.guess
index a28a21411..a2093afa8 100755
--- a/buildlib/config.guess
+++ b/buildlib/config.guess
@@ -1,8 +1,10 @@
#! /bin/sh
# Attempt to guess a canonical system name.
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
# Free Software Foundation, Inc.
-#
+
+version='2000-09-27'
+
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
@@ -23,8 +25,7 @@
# the same distribution terms that you use for the rest of that program.
# Written by Per Bothner <bothner@cygnus.com>.
-# The master version of this file is at the FSF in /home/gd/gnu/lib.
-# Please send patches to the Autoconf mailing list <autoconf@gnu.org>.
+# Please send patches to <config-patches@gnu.org>.
#
# This script attempts to guess a canonical system name similar to
# config.sub. If it succeeds, it prints the system name on stdout, and
@@ -37,6 +38,46 @@
# (but try to keep the structure clean).
#
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of this system.
+
+Operation modes:
+ -h, --help print this help, then exit
+ -V, --version print version number, then exit"
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+ case "$1" in
+ --version | --vers* | -V )
+ echo "$version" ; exit 0 ;;
+ --help | --h* | -h )
+ echo "$usage"; exit 0 ;;
+ -- ) # Stop option processing
+ shift; break ;;
+ - ) # Use stdin as input.
+ break ;;
+ -* )
+ exec >&2
+ echo "$me: invalid option $1"
+ echo "$help"
+ exit 1 ;;
+ * )
+ break ;;
+ esac
+done
+
+if test $# != 0; then
+ echo "$me: too many arguments$help" >&2
+ exit 1
+fi
+
# Use $HOST_CC if defined. $CC may point to a cross-compiler
if test x"$CC_FOR_BUILD" = x; then
if test x"$HOST_CC" != x; then
@@ -68,6 +109,43 @@ trap 'rm -f $dummy.c $dummy.o $dummy; exit 1' 1 2 15
# Note: order is significant - the case branches are not exclusive.
case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
+ *:NetBSD:*:*)
+ # Netbsd (nbsd) targets should (where applicable) match one or
+ # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*,
+ # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently
+ # switched to ELF, *-*-netbsd* would select the old
+ # object file format. This provides both forward
+ # compatibility and a consistent mechanism for selecting the
+ # object file format.
+ # Determine the machine/vendor (is the vendor relevant).
+ case "${UNAME_MACHINE}" in
+ amiga) machine=m68k-unknown ;;
+ arm32) machine=arm-unknown ;;
+ atari*) machine=m68k-atari ;;
+ sun3*) machine=m68k-sun ;;
+ mac68k) machine=m68k-apple ;;
+ macppc) machine=powerpc-apple ;;
+ hp3[0-9][05]) machine=m68k-hp ;;
+ ibmrt|romp-ibm) machine=romp-ibm ;;
+ *) machine=${UNAME_MACHINE}-unknown ;;
+ esac
+ # The Operating System including object format.
+ if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep __ELF__ >/dev/null
+ then
+ # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+ # Return netbsd for either. FIX?
+ os=netbsd
+ else
+ os=netbsdelf
+ fi
+ # The OS release
+ release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
+ # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+ # contains redundant information, the shorter form:
+ # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+ echo "${machine}-${os}${release}"
+ exit 0 ;;
alpha:OSF1:*:*)
if test $UNAME_RELEASE = "V4.0"; then
UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
@@ -77,41 +155,51 @@ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
# A Xn.n version is an unreleased experimental baselevel.
# 1.2 uses "1.2" for uname -r.
cat <<EOF >$dummy.s
+ .data
+\$Lformat:
+ .byte 37,100,45,37,120,10,0 # "%d-%x\n"
+
+ .text
.globl main
+ .align 4
.ent main
main:
- .frame \$30,0,\$26,0
- .prologue 0
- .long 0x47e03d80 # implver $0
- lda \$2,259
- .long 0x47e20c21 # amask $2,$1
- srl \$1,8,\$2
- sll \$2,2,\$2
- sll \$0,3,\$0
- addl \$1,\$0,\$0
- addl \$2,\$0,\$0
- ret \$31,(\$26),1
+ .frame \$30,16,\$26,0
+ ldgp \$29,0(\$27)
+ .prologue 1
+ .long 0x47e03d80 # implver \$0
+ lda \$2,-1
+ .long 0x47e20c21 # amask \$2,\$1
+ lda \$16,\$Lformat
+ mov \$0,\$17
+ not \$1,\$18
+ jsr \$26,printf
+ ldgp \$29,0(\$26)
+ mov 0,\$16
+ jsr \$26,exit
.end main
EOF
$CC_FOR_BUILD $dummy.s -o $dummy 2>/dev/null
if test "$?" = 0 ; then
- ./$dummy
- case "$?" in
- 7)
+ case `./$dummy` in
+ 0-0)
UNAME_MACHINE="alpha"
;;
- 15)
+ 1-0)
UNAME_MACHINE="alphaev5"
;;
- 14)
+ 1-1)
UNAME_MACHINE="alphaev56"
;;
- 10)
+ 1-101)
UNAME_MACHINE="alphapca56"
;;
- 16)
+ 2-303)
UNAME_MACHINE="alphaev6"
;;
+ 2-307)
+ UNAME_MACHINE="alphaev67"
+ ;;
esac
fi
rm -f $dummy.s $dummy
@@ -127,11 +215,8 @@ EOF
echo alpha-dec-winnt3.5
exit 0 ;;
Amiga*:UNIX_System_V:4.0:*)
- echo m68k-cbm-sysv4
+ echo m68k-unknown-sysv4
exit 0;;
- amiga:NetBSD:*:*)
- echo m68k-cbm-netbsd${UNAME_RELEASE}
- exit 0 ;;
amiga:OpenBSD:*:*)
echo m68k-unknown-openbsd${UNAME_RELEASE}
exit 0 ;;
@@ -156,12 +241,12 @@ EOF
wgrisc:OpenBSD:*:*)
echo mipsel-unknown-openbsd${UNAME_RELEASE}
exit 0 ;;
+ *:OS/390:*:*)
+ echo i370-ibm-openedition
+ exit 0 ;;
arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
echo arm-acorn-riscix${UNAME_RELEASE}
exit 0;;
- arm32:NetBSD:*:*)
- echo arm-unknown-netbsd`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
- exit 0 ;;
SR2?01:HI-UX/MPP:*:*)
echo hppa1.1-hitachi-hiuxmpp
exit 0;;
@@ -218,15 +303,12 @@ EOF
aushp:SunOS:*:*)
echo sparc-auspex-sunos${UNAME_RELEASE}
exit 0 ;;
- atari*:NetBSD:*:*)
- echo m68k-atari-netbsd${UNAME_RELEASE}
- exit 0 ;;
atari*:OpenBSD:*:*)
echo m68k-unknown-openbsd${UNAME_RELEASE}
exit 0 ;;
# The situation for MiNT is a little confusing. The machine name
# can be virtually everything (everything which is not
- # "atarist" or "atariste" at least should have a processor
+ # "atarist" or "atariste" at least should have a processor
# > m68000). The system name ranges from "MiNT" over "FreeMiNT"
# to the lowercase version "mint" (or "freemint"). Finally
# the system name "TOS" denotes a system which is actually not
@@ -250,15 +332,9 @@ EOF
*:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
echo m68k-unknown-mint${UNAME_RELEASE}
exit 0 ;;
- sun3*:NetBSD:*:*)
- echo m68k-sun-netbsd${UNAME_RELEASE}
- exit 0 ;;
sun3*:OpenBSD:*:*)
echo m68k-unknown-openbsd${UNAME_RELEASE}
exit 0 ;;
- mac68k:NetBSD:*:*)
- echo m68k-apple-netbsd${UNAME_RELEASE}
- exit 0 ;;
mac68k:OpenBSD:*:*)
echo m68k-unknown-openbsd${UNAME_RELEASE}
exit 0 ;;
@@ -271,9 +347,6 @@ EOF
powerpc:machten:*:*)
echo powerpc-apple-machten${UNAME_RELEASE}
exit 0 ;;
- macppc:NetBSD:*:*)
- echo powerpc-apple-netbsd${UNAME_RELEASE}
- exit 0 ;;
RISC*:Mach:*:*)
echo mips-dec-mach_bsd4.3
exit 0 ;;
@@ -289,6 +362,7 @@ EOF
mips:*:*:UMIPS | mips:*:*:RISCos)
sed 's/^ //' << EOF >$dummy.c
#ifdef __cplusplus
+#include <stdio.h> /* for printf() prototype */
int main (int argc, char *argv[]) {
#else
int main (argc, argv) int argc; char *argv[]; {
@@ -328,15 +402,18 @@ EOF
AViiON:dgux:*:*)
# DG/UX returns AViiON for all architectures
UNAME_PROCESSOR=`/usr/bin/uname -p`
- if [ $UNAME_PROCESSOR = mc88100 -o $UNAME_PROCESSOR = mc88110 ] ; then
- if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx \
- -o ${TARGET_BINARY_INTERFACE}x = x ] ; then
+ if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
+ then
+ if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
+ [ ${TARGET_BINARY_INTERFACE}x = x ]
+ then
echo m88k-dg-dgux${UNAME_RELEASE}
- else
+ else
echo m88k-dg-dguxbcs${UNAME_RELEASE}
+ fi
+ else
+ echo i586-dg-dgux${UNAME_RELEASE}
fi
- else echo i586-dg-dgux${UNAME_RELEASE}
- fi
exit 0 ;;
M88*:DolphinOS:*:*) # DolphinOS (SVR3)
echo m88k-dolphin-sysv3
@@ -402,7 +479,7 @@ EOF
ibmrt:4.4BSD:*|romp-ibm:BSD:*)
echo romp-ibm-bsd4.4
exit 0 ;;
- ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC NetBSD and
+ ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and
echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to
exit 0 ;; # report: romp-ibm BSD 4.3
*:BOSX:*:*)
@@ -423,6 +500,8 @@ EOF
9000/[34]?? ) HP_ARCH=m68k ;;
9000/[678][0-9][0-9])
sed 's/^ //' << EOF >$dummy.c
+
+ #define _HPUX_SOURCE
#include <stdlib.h>
#include <unistd.h>
@@ -453,7 +532,7 @@ EOF
exit (0);
}
EOF
- ($CC_FOR_BUILD $dummy.c -o $dummy 2>/dev/null ) && HP_ARCH=`./$dummy`
+ (CCOPTS= $CC_FOR_BUILD $dummy.c -o $dummy 2>/dev/null ) && HP_ARCH=`./$dummy`
rm -f $dummy.c $dummy
esac
HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
@@ -547,10 +626,13 @@ EOF
-e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/
exit 0 ;;
CRAY*TS:*:*:*)
- echo t90-cray-unicos${UNAME_RELEASE}
+ echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
exit 0 ;;
CRAY*T3E:*:*:*)
- echo alpha-cray-unicosmk${UNAME_RELEASE}
+ echo alpha-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+ exit 0 ;;
+ CRAY*SV1:*:*:*)
+ echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
exit 0 ;;
CRAY-2:*:*:*)
echo cray2-cray-unicos
@@ -563,13 +645,10 @@ EOF
F301:UNIX_System_V:*:*)
echo f301-fujitsu-uxpv`echo $UNAME_RELEASE | sed 's/ .*//'`
exit 0 ;;
- hp3[0-9][05]:NetBSD:*:*)
- echo m68k-hp-netbsd${UNAME_RELEASE}
- exit 0 ;;
hp300:OpenBSD:*:*)
echo m68k-unknown-openbsd${UNAME_RELEASE}
exit 0 ;;
- i?86:BSD/386:*:* | i?86:BSD/OS:*:*)
+ i?86:BSD/386:*:* | i?86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
exit 0 ;;
sparc*:BSD/OS:*:*)
@@ -579,17 +658,8 @@ EOF
echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
exit 0 ;;
*:FreeBSD:*:*)
- if test -x /usr/bin/objformat; then
- if test "elf" = "`/usr/bin/objformat`"; then
- echo ${UNAME_MACHINE}-unknown-freebsdelf`echo ${UNAME_RELEASE}|sed -e 's/[-_].*//'`
- exit 0
- fi
- fi
echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
exit 0 ;;
- *:NetBSD:*:*)
- echo ${UNAME_MACHINE}-unknown-netbsd`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
- exit 0 ;;
*:OpenBSD:*:*)
echo ${UNAME_MACHINE}-unknown-openbsd`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
exit 0 ;;
@@ -599,6 +669,9 @@ EOF
i*:MINGW*:*)
echo ${UNAME_MACHINE}-pc-mingw32
exit 0 ;;
+ i*:PW*:*)
+ echo ${UNAME_MACHINE}-pc-pw32
+ exit 0 ;;
i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
# How do we know it's Interix rather than the generic POSIX subsystem?
# It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
@@ -617,13 +690,10 @@ EOF
*:GNU:*:*)
echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
exit 0 ;;
+ i*86:Minix:*:*)
+ echo ${UNAME_MACHINE}-pc-minix
+ exit 0 ;;
*:Linux:*:*)
- # uname on the ARM produces all sorts of strangeness, and we need to
- # filter it out.
- case "$UNAME_MACHINE" in
- armv*) UNAME_MACHINE=$UNAME_MACHINE ;;
- arm* | sa110*) UNAME_MACHINE="arm" ;;
- esac
# The BFD linker knows what the default object file format is, so
# first see if it will tell us. cd to the root directory to prevent
@@ -636,12 +706,41 @@ EOF
s/ .*//
p'`
case "$ld_supported_emulations" in
- *ia64) echo "${UNAME_MACHINE}-unknown-linux" ; exit 0 ;;
- i?86linux) echo "${UNAME_MACHINE}-pc-linux-gnuaout" ; exit 0 ;;
- i?86coff) echo "${UNAME_MACHINE}-pc-linux-gnucoff" ; exit 0 ;;
- sparclinux) echo "${UNAME_MACHINE}-unknown-linux-gnuaout" ; exit 0 ;;
- armlinux) echo "${UNAME_MACHINE}-unknown-linux-gnuaout" ; exit 0 ;;
- m68klinux) echo "${UNAME_MACHINE}-unknown-linux-gnuaout" ; exit 0 ;;
+ *ia64)
+ echo "${UNAME_MACHINE}-unknown-linux"
+ exit 0
+ ;;
+ i?86linux)
+ echo "${UNAME_MACHINE}-pc-linux-gnuaout"
+ exit 0
+ ;;
+ elf_i?86)
+ TENTATIVE="${UNAME_MACHINE}-pc-linux-gnu"
+ ;;
+ i?86coff)
+ echo "${UNAME_MACHINE}-pc-linux-gnucoff"
+ exit 0
+ ;;
+ sparclinux)
+ echo "${UNAME_MACHINE}-unknown-linux-gnuaout"
+ exit 0
+ ;;
+ armlinux)
+ echo "${UNAME_MACHINE}-unknown-linux-gnuaout"
+ exit 0
+ ;;
+ elf32arm*)
+ echo "${UNAME_MACHINE}-unknown-linux-gnuoldld"
+ exit 0
+ ;;
+ armelf_linux*)
+ echo "${UNAME_MACHINE}-unknown-linux-gnu"
+ exit 0
+ ;;
+ m68klinux)
+ echo "${UNAME_MACHINE}-unknown-linux-gnuaout"
+ exit 0
+ ;;
elf32ppc | elf32ppclinux)
# Determine Lib Version
cat >$dummy.c <<EOF
@@ -669,49 +768,65 @@ EOF
if test "$?" = 0 ; then
LIBC="libc1"
fi
- fi
+ fi
rm -f $dummy.c $dummy
- echo powerpc-unknown-linux-gnu${LIBC} ; exit 0 ;;
+ echo powerpc-unknown-linux-gnu${LIBC}
+ exit 0
+ ;;
+ shelf_linux)
+ echo "${UNAME_MACHINE}-unknown-linux-gnu"
+ exit 0
+ ;;
esac
if test "${UNAME_MACHINE}" = "alpha" ; then
- sed 's/^ //' <<EOF >$dummy.s
- .globl main
- .ent main
- main:
- .frame \$30,0,\$26,0
- .prologue 0
- .long 0x47e03d80 # implver $0
- lda \$2,259
- .long 0x47e20c21 # amask $2,$1
- srl \$1,8,\$2
- sll \$2,2,\$2
- sll \$0,3,\$0
- addl \$1,\$0,\$0
- addl \$2,\$0,\$0
- ret \$31,(\$26),1
- .end main
+ cat <<EOF >$dummy.s
+ .data
+ \$Lformat:
+ .byte 37,100,45,37,120,10,0 # "%d-%x\n"
+
+ .text
+ .globl main
+ .align 4
+ .ent main
+ main:
+ .frame \$30,16,\$26,0
+ ldgp \$29,0(\$27)
+ .prologue 1
+ .long 0x47e03d80 # implver \$0
+ lda \$2,-1
+ .long 0x47e20c21 # amask \$2,\$1
+ lda \$16,\$Lformat
+ mov \$0,\$17
+ not \$1,\$18
+ jsr \$26,printf
+ ldgp \$29,0(\$26)
+ mov 0,\$16
+ jsr \$26,exit
+ .end main
EOF
LIBC=""
$CC_FOR_BUILD $dummy.s -o $dummy 2>/dev/null
if test "$?" = 0 ; then
- ./$dummy
- case "$?" in
- 7)
+ case `./$dummy` in
+ 0-0)
UNAME_MACHINE="alpha"
;;
- 15)
+ 1-0)
UNAME_MACHINE="alphaev5"
;;
- 14)
+ 1-1)
UNAME_MACHINE="alphaev56"
;;
- 10)
+ 1-101)
UNAME_MACHINE="alphapca56"
;;
- 16)
+ 2-303)
UNAME_MACHINE="alphaev6"
;;
+ 2-307)
+ UNAME_MACHINE="alphaev67"
+ ;;
esac
objdump --private-headers $dummy | \
@@ -725,6 +840,7 @@ EOF
elif test "${UNAME_MACHINE}" = "mips" ; then
cat >$dummy.c <<EOF
#ifdef __cplusplus
+#include <stdio.h> /* for printf() prototype */
int main (int argc, char *argv[]) {
#else
int main (argc, argv) int argc; char *argv[]; {
@@ -740,6 +856,24 @@ EOF
EOF
$CC_FOR_BUILD $dummy.c -o $dummy 2>/dev/null && ./$dummy "${UNAME_MACHINE}" && rm $dummy.c $dummy && exit 0
rm -f $dummy.c $dummy
+ elif test "${UNAME_MACHINE}" = "s390"; then
+ echo s390-ibm-linux && exit 0
+ elif test "${UNAME_MACHINE}" = "x86_64"; then
+ echo x86_64-unknown-linux-gnu && exit 0
+ elif test "${UNAME_MACHINE}" = "parisc" -o "${UNAME_MACHINE}" = "hppa"; then
+ # Look for CPU level
+ case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+ PA7*)
+ echo hppa1.1-unknown-linux-gnu
+ ;;
+ PA8*)
+ echo hppa2.0-unknown-linux-gnu
+ ;;
+ *)
+ echo hppa-unknown-linux-gnu
+ ;;
+ esac
+ exit 0;
else
# Either a pre-BFD a.out linker (linux-gnuoldld)
# or one that does not give us useful --help.
@@ -761,6 +895,7 @@ EOF
cat >$dummy.c <<EOF
#include <features.h>
#ifdef __cplusplus
+#include <stdio.h> /* for printf() prototype */
int main (int argc, char *argv[]) {
#else
int main (argc, argv) int argc; char *argv[]; {
@@ -783,6 +918,7 @@ EOF
EOF
$CC_FOR_BUILD $dummy.c -o $dummy 2>/dev/null && ./$dummy "${UNAME_MACHINE}" && rm $dummy.c $dummy && exit 0
rm -f $dummy.c $dummy
+ test x"${TENTATIVE}" != x && echo "${TENTATIVE}" && exit 0
fi ;;
# ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. earlier versions
# are messed up and put the nodename in both sysname and nodename.
@@ -798,19 +934,21 @@ EOF
echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
exit 0 ;;
i?86:*:4.*:* | i?86:SYSTEM_V:4.*:*)
+ UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
- echo ${UNAME_MACHINE}-univel-sysv${UNAME_RELEASE}
+ echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
else
- echo ${UNAME_MACHINE}-pc-sysv${UNAME_RELEASE}
+ echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
fi
exit 0 ;;
i?86:*:5:7*)
- UNAME_REL=`(/bin/uname -X|egrep Release|sed -e 's/.*= //')`
- (/bin/uname -X|egrep i80486 >/dev/null) && UNAME_MACHINE=i486
- (/bin/uname -X|egrep '^Machine.*Pentium' >/dev/null) && UNAME_MACHINE=i586
- (/bin/uname -X|egrep '^Machine.*Pent.*II' >/dev/null) && UNAME_MACHINE=i686
- (/bin/uname -X|egrep '^Machine.*Pentium Pro' >/dev/null) && UNAME_MACHINE=i585
- echo ${UNAME_MACHINE}-${UNAME_SYSTEM}${UNAME_VERSION}-sysv${UNAME_RELEASE}
+ # Fixed at (any) Pentium or better
+ UNAME_MACHINE=i586
+ if [ ${UNAME_SYSTEM} = "UnixWare" ] ; then
+ echo ${UNAME_MACHINE}-sco-sysv${UNAME_RELEASE}uw${UNAME_VERSION}
+ else
+ echo ${UNAME_MACHINE}-pc-sysv${UNAME_RELEASE}
+ fi
exit 0 ;;
i?86:*:3.2:*)
if test -f /usr/options/cb.name; then
@@ -830,7 +968,11 @@ EOF
echo ${UNAME_MACHINE}-pc-sysv32
fi
exit 0 ;;
+ i?86:*DOS:*:*)
+ echo ${UNAME_MACHINE}-pc-msdosdjgpp
+ exit 0 ;;
pc:*:*:*)
+ # Left here for compatibility:
# uname -m prints for DJGPP always 'pc', but it prints nothing about
# the processor, so we play safe by assuming i386.
echo i386-pc-msdosdjgpp
@@ -913,7 +1055,7 @@ EOF
mc68*:A/UX:*:*)
echo m68k-apple-aux${UNAME_RELEASE}
exit 0 ;;
- news*:NEWS-OS:*:6*)
+ news*:NEWS-OS:6*:*)
echo mips-sony-newsos6
exit 0 ;;
R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
@@ -944,6 +1086,38 @@ EOF
*:Rhapsody:*:*)
echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
exit 0 ;;
+ *:Darwin:*:*)
+ echo `uname -p`-apple-darwin${UNAME_RELEASE}
+ exit 0 ;;
+ *:procnto*:*:* | *:QNX:[0123456789]*:*)
+ if test "${UNAME_MACHINE}" = "x86pc"; then
+ UNAME_MACHINE=pc
+ fi
+ echo `uname -p`-${UNAME_MACHINE}-nto-qnx
+ exit 0 ;;
+ *:QNX:*:4*)
+ echo i386-pc-qnx
+ exit 0 ;;
+ NSR-[KW]:NONSTOP_KERNEL:*:*)
+ echo nsr-tandem-nsk${UNAME_RELEASE}
+ exit 0 ;;
+ BS2000:POSIX*:*:*)
+ echo bs2000-siemens-sysv
+ exit 0 ;;
+ DS/*:UNIX_System_V:*:*)
+ echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
+ exit 0 ;;
+ *:Plan9:*:*)
+ # "uname -m" is not consistent, so use $cputype instead. 386
+ # is converted to i386 for consistency with other x86
+ # operating systems.
+ if test "$cputype" = "386"; then
+ UNAME_MACHINE=i386
+ else
+ UNAME_MACHINE="$cputype"
+ fi
+ echo ${UNAME_MACHINE}-unknown-plan9
+ exit 0 ;;
esac
#echo '(No uname command or uname output not recognized.)' 1>&2
@@ -1083,6 +1257,47 @@ then
esac
fi
-#echo '(Unable to guess system type)' 1>&2
+cat >&2 <<EOF
+$0: unable to guess system type
+
+The $version version of this script cannot recognize your system type.
+Please download the most up to date version of the config scripts:
+
+ ftp://ftp.gnu.org/pub/gnu/config/
+
+If the version you run ($0) is already up to date, please
+send the following data and any information you think might be
+pertinent to <config-patches@gnu.org> in order to provide the needed
+information to handle your system.
+
+config.guess version = $version
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo = `(hostinfo) 2>/dev/null`
+/bin/universe = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = ${UNAME_MACHINE}
+UNAME_RELEASE = ${UNAME_RELEASE}
+UNAME_SYSTEM = ${UNAME_SYSTEM}
+UNAME_VERSION = ${UNAME_VERSION}
+EOF
exit 1
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "version='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/buildlib/config.h.in b/buildlib/config.h.in
index 6606a84d4..961dc4261 100644
--- a/buildlib/config.h.in
+++ b/buildlib/config.h.in
@@ -16,15 +16,6 @@
/* The number of bytes in a unsigned short. */
#undef SIZEOF_SHORT
-/* Define if we have libgpm. */
-#undef HAVE_LIBGPM
-
-/* Define if we have the SLang library from Davis. */
-#undef HAVE_LIBSLANG
-
-/* Define if we have the X11 windowing system. */
-#undef HAVE_X11
-
/* These two are used by the statvfs shim for glibc2.0 and bsd */
/* Define if we have sys/vfs.h */
#undef HAVE_VFS_H
@@ -38,8 +29,11 @@
/* If there is no socklen_t, define this for the netdb shim */
#undef NEED_SOCKLEN_T_DEFINE
-/* Define the architecture name string */
-#undef ARCHITECTURE
+/* Define the cpu name string */
+#undef COMMON_CPU
+
+/* Define the on name string */
+#undef COMMON_OS
/* The version number string */
#undef VERSION
diff --git a/buildlib/config.sub b/buildlib/config.sub
index e4944414b..4dfe12db0 100755
--- a/buildlib/config.sub
+++ b/buildlib/config.sub
@@ -1,6 +1,10 @@
#! /bin/sh
# Configuration validation subroutine script, version 1.1.
-# Copyright (C) 1991, 92-97, 1998, 1999 Free Software Foundation, Inc.
+# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
+# Free Software Foundation, Inc.
+
+version='2000-09-05'
+
# This file is (in principle) common to ALL GNU software.
# The presence of a machine in this file suggests that SOME GNU software
# can handle that machine. It does not imply ALL GNU software can.
@@ -25,6 +29,8 @@
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
+# Please send patches to <config-patches@gnu.org>.
+#
# Configuration subroutine to validate and canonicalize a configuration type.
# Supply the specified configuration type as an argument.
# If it is invalid, we print an error message on stderr and exit with code 1.
@@ -45,30 +51,61 @@
# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
# It is wrong to echo any other type of specification.
-if [ x$1 = x ]
-then
- echo Configuration name missing. 1>&2
- echo "Usage: $0 CPU-MFR-OPSYS" 1>&2
- echo "or $0 ALIAS" 1>&2
- echo where ALIAS is a recognized configuration type. 1>&2
- exit 1
-fi
+me=`echo "$0" | sed -e 's,.*/,,'`
-# First pass through any local machine types.
-case $1 in
- *local*)
- echo $1
- exit 0
- ;;
- *)
- ;;
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS
+ $0 [OPTION] ALIAS
+
+Canonicalize a configuration name.
+
+Operation modes:
+ -h, --help print this help, then exit
+ -V, --version print version number, then exit"
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+ case "$1" in
+ --version | --vers* | -V )
+ echo "$version" ; exit 0 ;;
+ --help | --h* | -h )
+ echo "$usage"; exit 0 ;;
+ -- ) # Stop option processing
+ shift; break ;;
+ - ) # Use stdin as input.
+ break ;;
+ -* )
+ exec >&2
+ echo "$me: invalid option $1"
+ echo "$help"
+ exit 1 ;;
+
+ *local*)
+ # First pass through any local machine types.
+ echo $1
+ exit 0;;
+
+ * )
+ break ;;
+ esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+ exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+ exit 1;;
esac
# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
# Here we must recognize all the valid KERNEL-OS combinations.
maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
case $maybe_os in
- linux-gnu*)
+ nto-qnx* | linux-gnu*)
os=-$maybe_os
basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
;;
@@ -94,7 +131,7 @@ case $os in
-convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
-c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
-harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
- -apple)
+ -apple | -axis)
os=
basic_machine=$1
;;
@@ -105,7 +142,7 @@ case $os in
-scout)
;;
-wrs)
- os=vxworks
+ os=-vxworks
basic_machine=$1
;;
-hiux*)
@@ -156,6 +193,10 @@ case $os in
-psos*)
os=-psos
;;
+ -mint | -mint[0-9]*)
+ basic_machine=m68k-atari
+ os=-mint
+ ;;
esac
# Decode aliases for certain CPU-COMPANY combinations.
@@ -163,26 +204,36 @@ case $basic_machine in
# Recognize the basic CPU types without company name.
# Some are omitted here because they have special meanings below.
tahoe | i860 | ia64 | m32r | m68k | m68000 | m88k | ns32k | arc | arm \
- | arme[lb] | pyramid | mn10200 | mn10300 | tron | a29k \
+ | arme[lb] | armv* | pyramid | mn10200 | mn10300 | tron | a29k \
| 580 | i960 | h8300 \
+ | x86 | ppcbe | mipsbe | mipsle | shbe | shle | armbe | armle \
| hppa | hppa1.0 | hppa1.1 | hppa2.0 | hppa2.0w | hppa2.0n \
- | alpha | alphaev[4-7] | alphaev56 | alphapca5[67] \
- | we32k | ns16k | clipper | i370 | sh | powerpc | powerpcle \
+ | hppa64 \
+ | parisc | parisc1.1 | parisc2.0 | parisc64 \
+ | alpha | alphaev[4-8] | alphaev56 | alphapca5[67] \
+ | alphaev6[78] \
+ | we32k | ns16k | clipper | i370 | sh | sh[34] \
+ | powerpc | powerpcle \
| 1750a | dsp16xx | pdp11 | mips16 | mips64 | mipsel | mips64el \
| mips64orion | mips64orionel | mipstx39 | mipstx39el \
| mips64vr4300 | mips64vr4300el | mips64vr4100 | mips64vr4100el \
| mips64vr5000 | miprs64vr5000el | mcore \
| sparc | sparclet | sparclite | sparc64 | sparcv9 | v850 | c4x \
- | thumb | d10v)
+ | thumb | d10v | d30v | fr30 | avr)
basic_machine=$basic_machine-unknown
;;
- m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | z8k | v70 | h8500 | w65)
+ m6811 | m68hc11 | m6812 | m68hc12)
+ # Motorola 68HC11/12.
+ basic_machine=$basic_machine-unknown
+ os=-none
+ ;;
+ m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | z8k | v70 | h8500 | w65 | pj | pjl)
;;
# We use `pc' rather than `unknown'
# because (1) that's what they normally are, and
# (2) the word "unknown" tends to confuse beginning users.
- i[34567]86)
+ i[234567]86 | x86_64)
basic_machine=$basic_machine-pc
;;
# Object if more than one company name word.
@@ -192,13 +243,17 @@ case $basic_machine in
;;
# Recognize the basic CPU types with company name.
# FIXME: clean up the formatting here.
- vax-* | tahoe-* | i[34567]86-* | i860-* | ia64-* | m32r-* | m68k-* | m68000-* \
+ vax-* | tahoe-* | i[234567]86-* | i860-* | ia64-* | m32r-* | m68k-* | m68000-* \
| m88k-* | sparc-* | ns32k-* | fx80-* | arc-* | arm-* | c[123]* \
| mips-* | pyramid-* | tron-* | a29k-* | romp-* | rs6000-* \
| power-* | none-* | 580-* | cray2-* | h8300-* | h8500-* | i960-* \
| xmp-* | ymp-* \
- | hppa-* | hppa1.0-* | hppa1.1-* | hppa2.0-* | hppa2.0w-* | hppa2.0n-* \
- | alpha-* | alphaev[4-7]-* | alphaev56-* | alphapca5[67]-* \
+ | x86-* | ppcbe-* | mipsbe-* | mipsle-* | shbe-* | shle-* | armbe-* | armle-* \
+ | hppa-* | hppa1.0-* | hppa1.1-* | hppa2.0-* | hppa2.0w-* \
+ | hppa2.0n-* | hppa64-* \
+ | parisc-* | parisc1.1-* | parisc2.0-* | parisc64-* \
+ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphapca5[67]-* \
+ | alphaev6[78]-* \
| we32k-* | cydra-* | ns16k-* | pn-* | np1-* | xps100-* \
| clipper-* | orion-* \
| sparclite-* | pdp11-* | sh-* | powerpc-* | powerpcle-* \
@@ -206,9 +261,10 @@ case $basic_machine in
| mips64el-* | mips64orion-* | mips64orionel-* \
| mips64vr4100-* | mips64vr4100el-* | mips64vr4300-* | mips64vr4300el-* \
| mipstx39-* | mipstx39el-* | mcore-* \
- | f301-* | armv*-* | t3e-* \
+ | f301-* | armv*-* | s390-* | sv1-* | t3e-* \
| m88110-* | m680[01234]0-* | m683?2-* | m68360-* | z8k-* | d10v-* \
- | thumb-* | v850-* | d30v-* | tic30-* | c30-* )
+ | thumb-* | v850-* | d30v-* | tic30-* | c30-* | fr30-* \
+ | bs2000-* | tic54x-* | c54x-* | x86_64-*)
;;
# Recognize the various machine names and aliases which stand
# for a CPU type and a company and sometimes even an OS.
@@ -245,14 +301,14 @@ case $basic_machine in
os=-sysv
;;
amiga | amiga-*)
- basic_machine=m68k-cbm
+ basic_machine=m68k-unknown
;;
amigaos | amigados)
- basic_machine=m68k-cbm
+ basic_machine=m68k-unknown
os=-amigaos
;;
amigaunix | amix)
- basic_machine=m68k-cbm
+ basic_machine=m68k-unknown
os=-sysv4
;;
apollo68)
@@ -306,6 +362,9 @@ case $basic_machine in
crds | unos)
basic_machine=m68k-crds
;;
+ cris | cris-* | etrax*)
+ basic_machine=cris-axis
+ ;;
da30 | da30-*)
basic_machine=m68k-da30
;;
@@ -426,7 +485,6 @@ case $basic_machine in
;;
i370-ibm* | ibm*)
basic_machine=i370-ibm
- os=-mvs
;;
# I'm not sure what "Sysv32" means. Should this be sysv3.2?
i[34567]86v32)
@@ -461,6 +519,10 @@ case $basic_machine in
basic_machine=i386-unknown
os=-mingw32
;;
+ i[34567]86-pw32 | pw32)
+ basic_machine=i586-unknown
+ os=-pw32
+ ;;
iris | iris4d)
basic_machine=mips-sgi
case $os in
@@ -489,7 +551,7 @@ case $basic_machine in
miniframe)
basic_machine=m68000-convergent
;;
- *mint | *MiNT)
+ *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
basic_machine=m68k-atari
os=-mint
;;
@@ -507,6 +569,10 @@ case $basic_machine in
mips3*)
basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
;;
+ mmix*)
+ basic_machine=mmix-knuth
+ os=-mmixware
+ ;;
monitor)
basic_machine=m68k-rom68k
os=-coff
@@ -515,6 +581,10 @@ case $basic_machine in
basic_machine=i386-unknown
os=-msdos
;;
+ mvs)
+ basic_machine=i370-ibm
+ os=-mvs
+ ;;
ncr3000)
basic_machine=i486-ncr
os=-sysv4
@@ -524,7 +594,7 @@ case $basic_machine in
os=-netbsd
;;
netwinder)
- basic_machine=armv4l-corel
+ basic_machine=armv4l-rebel
os=-linux
;;
news | news700 | news800 | news900)
@@ -575,6 +645,9 @@ case $basic_machine in
np1)
basic_machine=np1-gould
;;
+ nsr-tandem)
+ basic_machine=nsr-tandem
+ ;;
op50n-* | op60c-*)
basic_machine=hppa1.1-oki
os=-proelf
@@ -607,7 +680,7 @@ case $basic_machine in
pentium | p5 | k5 | k6 | nexen)
basic_machine=i586-pc
;;
- pentiumpro | p6 | 6x86)
+ pentiumpro | p6 | 6x86 | athlon)
basic_machine=i686-pc
;;
pentiumii | pentium2)
@@ -616,7 +689,7 @@ case $basic_machine in
pentium-* | p5-* | k5-* | k6-* | nexen-*)
basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
;;
- pentiumpro-* | p6-* | 6x86-*)
+ pentiumpro-* | p6-* | 6x86-* | athlon-*)
basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
;;
pentiumii-* | pentium2-*)
@@ -719,6 +792,10 @@ case $basic_machine in
sun386 | sun386i | roadrunner)
basic_machine=i386-sun
;;
+ sv1)
+ basic_machine=sv1-cray
+ os=-unicos
+ ;;
symmetry)
basic_machine=i386-sequent
os=-dynix
@@ -727,6 +804,10 @@ case $basic_machine in
basic_machine=t3e-cray
os=-unicos
;;
+ tic54x | c54x*)
+ basic_machine=tic54x-unknown
+ os=-coff
+ ;;
tx39)
basic_machine=mipstx39-unknown
;;
@@ -828,6 +909,9 @@ case $basic_machine in
we32k)
basic_machine=we32k-att
;;
+ sh3 | sh4)
+ base_machine=sh-unknown
+ ;;
sparc | sparcv9)
basic_machine=sparc-sun
;;
@@ -908,12 +992,25 @@ case $os in
| -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
| -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
| -mingw32* | -linux-gnu* | -uxpv* | -beos* | -mpeix* | -udk* \
- | -interix* | -uwin* | -rhapsody* | -openstep* | -oskit*)
+ | -interix* | -uwin* | -rhapsody* | -darwin* | -opened* \
+ | -openstep* | -oskit* | -conix* | -pw32*)
# Remember, each alternative MUST END IN *, to match a version number.
;;
+ -qnx*)
+ case $basic_machine in
+ x86-* | i[34567]86-*)
+ ;;
+ *)
+ os=-nto$os
+ ;;
+ esac
+ ;;
+ -nto*)
+ os=-nto-qnx
+ ;;
-sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
| -windows* | -osx | -abug | -netware* | -os9* | -beos* \
- | -macos* | -mpw* | -magic* | -mon960* | -lnews*)
+ | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
;;
-mac*)
os=`echo $os | sed -e 's|mac|macos|'`
@@ -927,6 +1024,12 @@ case $os in
-sunos6*)
os=`echo $os | sed -e 's|sunos6|solaris3|'`
;;
+ -opened*)
+ os=-openedition
+ ;;
+ -wince*)
+ os=-wince
+ ;;
-osfrose*)
os=-osfrose
;;
@@ -951,6 +1054,9 @@ case $os in
-ns2 )
os=-nextstep2
;;
+ -nsk)
+ os=-nsk
+ ;;
# Preserve the version number of sinix5.
-sinix5.*)
os=`echo $os | sed -e 's|sinix|sysv|'`
@@ -1013,7 +1119,7 @@ case $basic_machine in
*-acorn)
os=-riscix1.2
;;
- arm*-corel)
+ arm*-rebel)
os=-linux
;;
arm*-semi)
@@ -1187,7 +1293,7 @@ case $basic_machine in
-genix*)
vendor=ns
;;
- -mvs*)
+ -mvs* | -opened*)
vendor=ibm
;;
-ptx*)
@@ -1214,3 +1320,11 @@ case $basic_machine in
esac
echo $basic_machine$os
+exit 0
+
+# Local variables:
+# eval: (add-hook 'write-file-hooks 'time-stamp)
+# time-stamp-start: "version='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:
diff --git a/buildlib/configure.mak b/buildlib/configure.mak
index d1e208d05..957568789 100644
--- a/buildlib/configure.mak
+++ b/buildlib/configure.mak
@@ -13,7 +13,7 @@
BUILDDIR=build
.PHONY: startup
-startup: configure $(BUILDDIR)/config.status $(addprefix $(BUILDDIR)/,$(CONVERTED))
+startup: configure $(BUILDDIR)/config.status $(addprefix $(BUILDDIR)/,$(CONVERTED))
configure: aclocal.m4 configure.in
autoconf
diff --git a/buildlib/copy.mak b/buildlib/copy.mak
index 973c4853b..892b74bc4 100644
--- a/buildlib/copy.mak
+++ b/buildlib/copy.mak
@@ -17,6 +17,8 @@ $(LOCAL)-LIST := $(addprefix $(TO)/,$(SOURCE))
doc: $($(LOCAL)-LIST)
veryclean: veryclean/$(LOCAL)
+MKDIRS += $(dir $($(LOCAL)-LIST))
+
$($(LOCAL)-LIST) : $(TO)/% : %
echo Installing $< to $(@D)
cp $< $(@D)
diff --git a/buildlib/debiandoc.mak b/buildlib/debiandoc.mak
index 5e08bda6c..0c20dc911 100644
--- a/buildlib/debiandoc.mak
+++ b/buildlib/debiandoc.mak
@@ -27,7 +27,7 @@ vpath %.sgml $(SUBDIRS)
$(DOC)/%.html: %.sgml
echo Creating html for $< to $@
-rm -rf $@
- (HERE=`pwd`; cd $(@D) && debiandoc2html $$HERE/$<)
+ (HERE=`pwd`; cd $(@D) && $(DEBIANDOC_HTML) $$HERE/$<)
# Clean rule
.PHONY: veryclean/html/$(LOCAL)
@@ -48,7 +48,7 @@ veryclean: veryclean/text/$(LOCAL)
vpath %.sgml $(SUBDIRS)
$(DOC)/%.text: %.sgml
echo Creating text for $< to $@
- debiandoc2text -O $< > $@
+ $(DEBIANDOC_TEXT) -O $< > $@
# Clean rule
.PHONY: veryclean/text/$(LOCAL)
diff --git a/buildlib/defaults.mak b/buildlib/defaults.mak
index 6e504390c..35a3e71de 100644
--- a/buildlib/defaults.mak
+++ b/buildlib/defaults.mak
@@ -51,7 +51,7 @@ error-all/environment.mak:
error-out-and-die
else
error-all/environment.mak:
- echo Can't find the build directory in $(BUILD_POSSIBLE) -- use BUILD=
+ echo Can not find the build directory in $(BUILD_POSSIBLE) -- use BUILD=
error-out-and-die
endif
@@ -76,8 +76,11 @@ LIBRARY_H = $(BASE)/buildlib/library.mak
DEBIANDOC_H = $(BASE)/buildlib/debiandoc.mak
MANPAGE_H = $(BASE)/buildlib/manpage.mak
PROGRAM_H = $(BASE)/buildlib/program.mak
+PYTHON_H = $(BASE)/buildlib/python.mak
COPY_H = $(BASE)/buildlib/copy.mak
YODL_MANPAGE_H = $(BASE)/buildlib/yodl_manpage.mak
+SGML_MANPAGE_H = $(BASE)/buildlib/sgml_manpage.mak
+FAIL_H = $(BASE)/buildlib/fail.mak
include $(BUILD)/environment.mak
@@ -105,9 +108,12 @@ HEADER_TARGETDIRS+=
CPPFLAGS+= -I$(INCLUDE)
LDFLAGS+= -L$(LIB)
+# Directors to create
+MKDIRS := $(BIN)
+
# Phony rules. Other things hook these by appending to the dependency
# list
-.PHONY: headers library clean veryclean all binary program doc
+.PHONY: headers library clean veryclean all binary program doc dirs
.PHONY: maintainer-clean dist-clean distclean pristine sanity
all: binary doc
binary: library program
@@ -118,7 +124,9 @@ veryclean:
echo Very Clean done for $(SUBDIR)
clean:
echo Clean done for $(SUBDIR)
-
+dirs:
+ mkdir -p $(patsubst %/,%,$(sort $(MKDIRS)))
+
# Header file control. We want all published interface headers to go
# into the build directory from thier source dirs. We setup some
# search paths here
diff --git a/buildlib/environment.mak.in b/buildlib/environment.mak.in
index e0f359c54..76d6cda32 100644
--- a/buildlib/environment.mak.in
+++ b/buildlib/environment.mak.in
@@ -5,14 +5,13 @@
CC = @CC@
CPPFLAGS+= @CPPFLAGS@ @DEFS@ -D_REENTRANT
CXX = @CXX@
-CXXFLAGS+= @CXXFLAGS@ @X_CFLAGS@
+CXXFLAGS+= @CXXFLAGS@
NUM_PROCS = @NUM_PROCS@
# Linker stuff
PICFLAGS+= -fPIC -DPIC
LFLAGS+= @LDFLAGS@
LEFLAGS+=
-XLIBS:= @X_LIBS@ @X_PRE_LIBS@ @X11LIB@ @X_EXTRA_LIBS@
SOCKETLIBS:= @SOCKETLIBS@
AR:=@AR@
RANLIB:=@RANLIB@
@@ -24,15 +23,19 @@ INLINEDEPFLAG = -MD
DEBIANDOC_HTML = @DEBIANDOC_HTML@
DEBIANDOC_TEXT = @DEBIANDOC_TEXT@
-# YODL for the man pages
-YODL_MAN = @YODL_MAN@
+# SGML for the man pages
+NSGMLS = @NSGMLS@
+SGMLSPL = @SGMLSPL@
+DOCBOOK2MAN := $(wildcard /usr/lib/perl5/sgmlspl-specs/docbook2man-spec.pl)
# Various library checks
-X11LIB = @X11LIB@
-GPMLIB = @GPMLIB@
-SLANGLIB = @SLANGLIB@
-XPMLIB = @XPMLIB@
PTHREADLIB = @PTHREADLIB@
+PYTHONLIB = @PYTHONLIB@
+PYTHONVER = @PYTHONVER@
+PYTHONPREFIX = @PYTHONPREFIX@
+PYTHONEXECPREFIX = @PYTHONEXECPREFIX@
+PYTHONINCLUDE = @PYTHONINCLUDE@
+DB2LIB = @DB2LIB@
# Shim Headerfile control
HAVE_C9X = @HAVE_C9X@
@@ -41,11 +44,11 @@ NEED_SOCKLEN_T_DEFINE = @NEED_SOCKLEN_T_DEFINE@
# Shared library things
HOST_OS = @host_os@
-ifeq ($(HOST_OS),linux-gnu)
+ifneq ($(words $(filter linux-gnu gnu%,$(HOST_OS))),0)
SONAME_MAGIC=-Wl,-soname -Wl,
LFLAGS_SO=
-else
- # Do not know how to creat shared libraries here.
+else
+ # Do not know how to create shared libraries here.
ONLYSTATICLIBS = yes
endif
diff --git a/buildlib/fail.mak b/buildlib/fail.mak
new file mode 100644
index 000000000..dfc194e1e
--- /dev/null
+++ b/buildlib/fail.mak
@@ -0,0 +1,20 @@
+# -*- make -*-
+
+# This prints a failure message but does not abort the make
+
+# Input
+# $(MESSAGE) - The message to show
+# $(PROGRAM) - The program/libary/whatever.
+
+# See defaults.mak for information about LOCAL
+
+LOCAL := $(PROGRAM)
+$(LOCAL)-MSG := $(MESSAGE)
+
+# Install hooks
+program: $(PROGRAM)
+
+.PHONY: $(PROGRAM)
+$(PROGRAM) :
+ echo $($@-MSG)
+
diff --git a/buildlib/library.mak b/buildlib/library.mak
index caf15606b..229479fa2 100644
--- a/buildlib/library.mak
+++ b/buildlib/library.mak
@@ -29,6 +29,9 @@ library: $(LIB)/lib$(LIBRARY).so $(LIB)/lib$(LIBRARY).so.$(MAJOR)
clean: clean/$(LOCAL)
veryclean: veryclean/$(LOCAL)
+# Make Directories
+MKDIRS += $(OBJ) $(DEP) $(LIB) $(dir $($(LOCAL)-HEADERS))
+
# The clean rules
.PHONY: clean/$(LOCAL) veryclean/$(LOCAL)
clean/$(LOCAL):
diff --git a/buildlib/makefile.in b/buildlib/makefile.in
index fbad1ab07..756565f40 100644
--- a/buildlib/makefile.in
+++ b/buildlib/makefile.in
@@ -8,10 +8,6 @@ endif
include environment.mak
SRCDIR=@top_srcdir@
-DIRS:=./docs ./bin ./obj ./include ./scripts
-SUBDIRS:= $(DIRS) ./docs/examples ./bin/methods ./include/apt-pkg \
- ./include/deity ./obj/apt-pkg ./obj/deity ./obj/gui ./obj/cmdline \
- ./obj/test ./obj/methods ./obj/methods/ftp ./scripts/dselect
BUILD:=$(shell pwd)
export BUILD
@@ -27,14 +23,14 @@ maintainer-clean dist-clean pristine sanity distclean:
-rm -rf $(DIRS)
-rm -f config.cache config.log config.status environment.mak makefile
-# This makes any missing directories
+# Create the required directories and build the shims
+# The configure script fills in below, and then we do the right things..
+# This cannot go in the configure script since the directories have not yet
+# been created.. In any event I like the idea that you can change environment.mak
+# and run make dirs and have the shims updated.
.PHONY: dirs
-MISSING_DIRS:= $(filter-out $(wildcard $(SUBDIRS)),$(SUBDIRS))
-dirs:
- @rm -f include/sys
-ifneq ($(words $(MISSING_DIRS)),0)
- @mkdir $(MISSING_DIRS)
-endif
+dirs:
+ $(MAKE) -C $(SRCDIR) -f Makefile $@
ifeq ($(HAVE_C9X),yes)
@rm -f include/inttypes.h > /dev/null 2>&1
else
@@ -51,3 +47,5 @@ ifeq ($(NEED_SOCKLEN_T_DEFINE),yes)
else
@rm -f include/netdb.h > /dev/null 2>&1
endif
+ rm -f include/python
+ ln -sf $(PYTHONINCLUDE) include/python
diff --git a/buildlib/manpage.mak b/buildlib/manpage.mak
index cfa5fc1a2..1f2644c01 100644
--- a/buildlib/manpage.mak
+++ b/buildlib/manpage.mak
@@ -17,6 +17,8 @@ $(LOCAL)-LIST := $(addprefix $(DOC)/,$(SOURCE))
doc: $($(LOCAL)-LIST)
veryclean: veryclean/$(LOCAL)
+MKDIRS += $(DOC)
+
$($(LOCAL)-LIST) : $(DOC)/% : %
echo Installing man page $< to $(@D)
cp $< $(@D)
diff --git a/buildlib/ostable b/buildlib/ostable
new file mode 100644
index 000000000..433efea37
--- /dev/null
+++ b/buildlib/ostable
@@ -0,0 +1,19 @@
+# This file contains a table of known vendor-os strings, with
+# things to map them to. `configure' will take the output of the
+# autoconf cannon macros and look in here. This only deals with
+# OS names. The right should be a common name like the arch table
+# generates
+# The final bit to build the Debian Architecture is done in init.cc
+# The left side is a regex for awk, and the first match is used.
+
+# These are used by Debian
+[^-]*-linux-.* linux
+[^-]*-gnu[^-]* hurd
+
+# These are samples.
+hp-hpux[^-]* hp-ux
+sun-solaris[^-]* solaris
+[^-]*-openbsd[^-]* openbsd
+
+# Catch all
+.* unknown
diff --git a/buildlib/program.mak b/buildlib/program.mak
index 98bea9aa5..a89dc5ea1 100644
--- a/buildlib/program.mak
+++ b/buildlib/program.mak
@@ -24,6 +24,9 @@ program: $(BIN)/$(PROGRAM)
clean: clean/$(LOCAL)
veryclean: veryclean/$(LOCAL)
+# Make Directories
+MKDIRS += $(OBJ) $(DEP) $(BIN)
+
# The clean rules
.PHONY: clean/$(LOCAL) veryclean/$(LOCAL)
clean/$(LOCAL):
diff --git a/buildlib/python.mak b/buildlib/python.mak
new file mode 100644
index 000000000..02345c2d2
--- /dev/null
+++ b/buildlib/python.mak
@@ -0,0 +1,68 @@
+# -*- make -*-
+
+# This creates a python shared module.
+
+# Input
+# $(SOURCE) - The source code to use
+# $(MODULE) - The name of the module without module or .so
+
+# All output is writtin to .opic files in the build directory to
+# signify the PIC output.
+
+# See defaults.mak for information about LOCAL
+
+# Some local definitions
+LOCAL := $(MODULE)module.so
+$(LOCAL)-OBJS := $(addprefix $(OBJ)/,$(addsuffix .opic,$(notdir $(basename $(SOURCE)))))
+$(LOCAL)-DEP := $(addprefix $(DEP)/,$(addsuffix .opic.d,$(notdir $(basename $(SOURCE)))))
+$(LOCAL)-SLIBS := $(SLIBS)
+$(LOCAL)-MODULE := $(MODULE)
+
+# Install the command hooks
+library: $(LIB)/$(MODULE)module.so
+clean: clean/$(LOCAL)
+veryclean: veryclean/$(LOCAL)
+
+# Make Directories
+MKDIRS += $(OBJ) $(DEP) $(LIB)
+
+# The clean rules
+.PHONY: clean/$(LOCAL) veryclean/$(LOCAL)
+clean/$(LOCAL):
+ -rm -f $($(@F)-OBJS) $($(@F)-DEP)
+veryclean/$(LOCAL): clean/$(LOCAL)
+ -rm -f $($(@F)-HEADERS) $(LIB)/$($(@F)-MODULE)module.so*
+
+# The binary build rule.
+ifdef PYTHONLIB
+ifndef ONLYSTATICLIBS
+$(LIB)/$(MODULE)module.so: $($(LOCAL)-OBJS)
+ -rm -f $(LIB)/$($(@F)-MODULE)module.so* 2> /dev/null
+ echo Building shared Python module $@
+ $(CXX) $(CXXFLAGS) $(LDFLAGS) $(PICFLAGS) $(LFLAGS) $(LFLAGS_SO)\
+ -o $@ -shared \
+ $(filter %.opic,$^) \
+ $($(@F)-SLIBS) $(PYTHONLIB)
+else
+.PHONY: $(LIB)/$(MODULE)module.so
+$(LIB)/$(MODULE)module.so:
+ echo Don't know how to make a python module here, not building $@
+endif # ifndef ONLYSTATICLIBS
+else
+.PHONY: $(LIB)/$(MODULE)module.so
+$(LIB)/$(MODULE)module.so:
+ echo No python support, not building $@
+endif # ifdef PYTHONLIB
+
+# Compilation rules
+vpath %.cc $(SUBDIRS)
+$(OBJ)/%.opic: %.cc
+ echo Compiling $< to $@
+ $(CXX) -c $(INLINEDEPFLAG) $(CPPFLAGS) $(CXXFLAGS) $(PICFLAGS) -o $@ $<
+ $(DoDep)
+
+# Include the dependencies that are available
+The_DFiles = $(wildcard $($(LOCAL)-DEP))
+ifneq ($(words $(The_DFiles)),0)
+include $(The_DFiles)
+endif
diff --git a/buildlib/sgml_manpage.mak b/buildlib/sgml_manpage.mak
new file mode 100644
index 000000000..607ead373
--- /dev/null
+++ b/buildlib/sgml_manpage.mak
@@ -0,0 +1,49 @@
+# -*- make -*-
+
+# This handles man pages in DocBook SGMLL format. We convert to the respective
+# output in the source directory then copy over to the final dest. This
+# means yodl is only needed if compiling from CVS
+
+# Input
+# $(SOURCE) - The documents to use, in the form foo.sect, ie apt-cache.8
+# the sgml files are called apt-cache.8.sgml
+
+# See defaults.mak for information about LOCAL
+
+# Some local definitions
+ifdef NSGMLS
+ifdef SGMLSPL
+ifdef DOCBOOK2MAN
+
+LOCAL := sgml-manpage-$(firstword $(SOURCE))
+$(LOCAL)-LIST := $(SOURCE)
+
+# Install generation hooks
+doc: $($(LOCAL)-LIST)
+veryclean: veryclean/$(LOCAL)
+
+$($(LOCAL)-LIST) :: % : %.sgml $(INCLUDES)
+ echo Creating man page $@
+ $(NSGMLS) $< | $(SGMLSPL) $(DOCBOOK2MAN)
+
+# Clean rule
+.PHONY: veryclean/$(LOCAL)
+veryclean/$(LOCAL):
+ -rm -rf $($(@F)-LIST)
+
+HAVE_SGML=yes
+endif
+endif
+endif
+
+INCLUDES :=
+
+ifndef HAVE_SGML
+# Strip from the source list any man pages we dont have compiled already
+SOURCE := $(wildcard $(SOURCE))
+endif
+
+# Chain to the manpage rule
+ifneq ($(words $(SOURCE)),0)
+include $(MANPAGE_H)
+endif
diff --git a/buildlib/sizetable b/buildlib/sizetable
index 3bd01298c..911180145 100644
--- a/buildlib/sizetable
+++ b/buildlib/sizetable
@@ -9,14 +9,14 @@
#
# This is used primarily for the MD5 algorithm.
# The format is:-
-# CPU ':' endian sizeof: char, int, short, long
-i386: little 1 4 2 4
-arm: little 1 4 2 4
-alpha: little 1 4 2 8
-mipsel: little 1 4 2 4
-sparc: big 1 4 2 4
-sparc64: big 1 4 2 8
-m68k: big 1 4 2 4
-powerpc: big 1 4 2 4
-mips: big 1 4 2 4
-hppa: big 1 4 2 4
+# CPU endian sizeof: char, int, short, long
+i386 little 1 4 2 4
+arm little 1 4 2 4
+alpha little 1 4 2 8
+mipsel little 1 4 2 4
+sparc big 1 4 2 4
+sparc64 big 1 4 2 8
+m68k big 1 4 2 4
+powerpc big 1 4 2 4
+mips big 1 4 2 4
+hppa big 1 4 2 4
diff --git a/buildlib/staticlibrary.mak b/buildlib/staticlibrary.mak
index 0835fe8a0..ce9259dc0 100644
--- a/buildlib/staticlibrary.mak
+++ b/buildlib/staticlibrary.mak
@@ -24,6 +24,9 @@ library: $($(LOCAL)-LIB)
clean: clean/$(LOCAL)
veryclean: veryclean/$(LOCAL)
+# Make Directories
+MKDIRS += $(OBJ) $(DEP) $(LIB) $(dir $($(LOCAL)-HEADERS))
+
# The clean rules
.PHONY: clean/$(LOCAL) veryclean/$(LOCAL)
clean/$(LOCAL):
diff --git a/cmdline/acqprogress.cc b/cmdline/acqprogress.cc
index e2ef83501..e7b6b9ab8 100644
--- a/cmdline/acqprogress.cc
+++ b/cmdline/acqprogress.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acqprogress.cc,v 1.20 2000/05/12 04:03:27 jgg Exp $
+// $Id: acqprogress.cc,v 1.21 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Acquire Progress - Command line progress meter
@@ -14,6 +14,8 @@
#include <apt-pkg/strutl.h>
#include <apt-pkg/error.h>
+#include <apti18n.h>
+
#include <stdio.h>
#include <signal.h>
/*}}}*/
@@ -47,7 +49,7 @@ void AcqTextStatus::IMSHit(pkgAcquire::ItemDesc &Itm)
if (Quiet <= 0)
cout << '\r' << BlankLine << '\r';
- cout << "Hit " << Itm.Description;
+ cout << _("Hit ") << Itm.Description;
if (Itm.Owner->FileSize != 0)
cout << " [" << SizeToStr(Itm.Owner->FileSize) << "B]";
cout << endl;
@@ -71,7 +73,7 @@ void AcqTextStatus::Fetch(pkgAcquire::ItemDesc &Itm)
if (Quiet <= 0)
cout << '\r' << BlankLine << '\r';
- cout << "Get:" << Itm.Owner->ID << ' ' << Itm.Description;
+ cout << _("Get:") << Itm.Owner->ID << ' ' << Itm.Description;
if (Itm.Owner->FileSize != 0)
cout << " [" << SizeToStr(Itm.Owner->FileSize) << "B]";
cout << endl;
@@ -102,11 +104,11 @@ void AcqTextStatus::Fail(pkgAcquire::ItemDesc &Itm)
if (Itm.Owner->Status == pkgAcquire::Item::StatDone)
{
- cout << "Ign " << Itm.Description << endl;
+ cout << _("Ign ") << Itm.Description << endl;
}
else
{
- cout << "Err " << Itm.Description << endl;
+ cout << _("Err ") << Itm.Description << endl;
cout << " " << Itm.Owner->ErrorText << endl;
}
@@ -125,11 +127,12 @@ void AcqTextStatus::Stop()
if (Quiet <= 0)
cout << '\r' << BlankLine << '\r' << flush;
-
+
if (FetchedBytes != 0 && _error->PendingError() == false)
- cout << "Fetched " << SizeToStr(FetchedBytes) << "B in " <<
- TimeToStr(ElapsedTime) << " (" << SizeToStr(CurrentCPS) <<
- "B/s)" << endl;
+ ioprintf(cout,_("Fetched %sB in %s (%sB/s)\n"),
+ SizeToStr(FetchedBytes).c_str(),
+ TimeToStr(ElapsedTime).c_str(),
+ SizeToStr(CurrentCPS).c_str());
}
/*}}}*/
// AcqTextStatus::Pulse - Regular event pulse /*{{{*/
@@ -216,7 +219,7 @@ bool AcqTextStatus::Pulse(pkgAcquire *Owner)
// Show something..
if (Shown == false)
- snprintf(S,End-S," [Working]");
+ snprintf(S,End-S,_(" [Working]"));
/* Put in the ETA and cps meter, block off signals to prevent strangeness
during resizing */
@@ -240,7 +243,7 @@ bool AcqTextStatus::Pulse(pkgAcquire *Owner)
}
Buffer[ScreenWidth] = 0;
BlankLine[ScreenWidth] = 0;
- sigprocmask(SIG_UNBLOCK,&OldSigs,0);
+ sigprocmask(SIG_SETMASK,&OldSigs,0);
// Draw the current status
if (strlen(Buffer) == strlen(BlankLine))
@@ -261,9 +264,10 @@ bool AcqTextStatus::Pulse(pkgAcquire *Owner)
bool AcqTextStatus::MediaChange(string Media,string Drive)
{
if (Quiet <= 0)
- cout << '\r' << BlankLine << '\r';
- cout << "Media Change: Please insert the disc labeled '" << Media << "' in "\
- "the drive '" << Drive << "' and press enter" << endl;
+ cout << '\r' << BlankLine << '\r';
+ ioprintf(cout,_("Media Change: Please insert the disc labeled '%s' in "
+ "the drive '%s' and press enter\n"),
+ Media.c_str(),Drive.c_str());
char C = 0;
while (C != '\n' && C != '\r')
diff --git a/cmdline/apt-cache.cc b/cmdline/apt-cache.cc
index 5928676de..e7b8637be 100644
--- a/cmdline/apt-cache.cc
+++ b/cmdline/apt-cache.cc
@@ -1,14 +1,12 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: apt-cache.cc,v 1.43 2000/05/12 04:00:59 jgg Exp $
+// $Id: apt-cache.cc,v 1.44 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
apt-cache - Manages the cache files
apt-cache provides some functions fo manipulating the cache files.
- It uses the command line interface common to all the APT tools. The
- only really usefull function right now is dumpavail which is used
- by the dselect method. Everything else is meant as a debug aide.
+ It uses the command line interface common to all the APT tools.
Returns 100 on failure, 0 on success.
@@ -17,7 +15,6 @@
// Include Files /*{{{*/
#include <apt-pkg/error.h>
#include <apt-pkg/pkgcachegen.h>
-#include <apt-pkg/deblistparser.h>
#include <apt-pkg/init.h>
#include <apt-pkg/progress.h>
#include <apt-pkg/sourcelist.h>
@@ -26,7 +23,13 @@
#include <apt-pkg/pkgrecords.h>
#include <apt-pkg/srcrecords.h>
#include <apt-pkg/version.h>
+#include <apt-pkg/policy.h>
+#include <apt-pkg/tagfile.h>
+#include <apt-pkg/algorithms.h>
+#include <apt-pkg/sptr.h>
+
#include <config.h>
+#include <apti18n.h>
#include <iostream.h>
#include <unistd.h>
@@ -37,6 +40,32 @@
pkgCache *GCache = 0;
+// LocalitySort - Sort a version list by package file locality /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+int LocalityCompare(const void *a, const void *b)
+{
+ pkgCache::VerFile *A = *(pkgCache::VerFile **)a;
+ pkgCache::VerFile *B = *(pkgCache::VerFile **)b;
+
+ if (A == 0 && B == 0)
+ return 0;
+ if (A == 0)
+ return 1;
+ if (B == 0)
+ return -1;
+
+ if (A->File == B->File)
+ return A->Offset - B->Offset;
+ return A->File - B->File;
+}
+
+void LocalitySort(pkgCache::VerFile **begin,
+ unsigned long Count,size_t Size)
+{
+ qsort(begin,Count,Size,LocalityCompare);
+}
+ /*}}}*/
// UnMet - Show unmet dependencies /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -57,9 +86,6 @@ bool UnMet(CommandLine &CmdL)
pkgCache::DepIterator End;
D.GlobOr(Start,End);
-/* cout << "s: Check " << Start.TargetPkg().Name() << ',' <<
- End.TargetPkg().Name() << endl;*/
-
// Skip conflicts and replaces
if (End->Type != pkgCache::Dep::PreDepends &&
End->Type != pkgCache::Dep::Depends &&
@@ -100,8 +126,8 @@ bool UnMet(CommandLine &CmdL)
// Oops, it failed..
if (Header == false)
- cout << "Package " << P.Name() << " version " <<
- V.VerStr() << " has an unmet dep:" << endl;
+ ioprintf(cout,_("Package %s version %s has an unmet dep:\n"),
+ P.Name(),V.VerStr());
Header = true;
// Print out the dep type
@@ -140,26 +166,32 @@ bool DumpPackage(CommandLine &CmdL)
pkgCache::PkgIterator Pkg = Cache.FindPkg(*I);
if (Pkg.end() == true)
{
- _error->Warning("Unable to locate package %s",*I);
+ _error->Warning(_("Unable to locate package %s"),*I);
continue;
}
cout << "Package: " << Pkg.Name() << endl;
- cout << "Versions: ";
+ cout << "Versions: " << endl;
for (pkgCache::VerIterator Cur = Pkg.VersionList(); Cur.end() != true; Cur++)
{
cout << Cur.VerStr();
for (pkgCache::VerFileIterator Vf = Cur.FileList(); Vf.end() == false; Vf++)
cout << "(" << Vf.File().FileName() << ")";
- cout << ',';
+ cout << endl;
}
cout << endl;
cout << "Reverse Depends: " << endl;
for (pkgCache::DepIterator D = Pkg.RevDependsList(); D.end() != true; D++)
- cout << " " << D.ParentPkg().Name() << ',' << D.TargetPkg().Name() << endl;
-
+ {
+ cout << " " << D.ParentPkg().Name() << ',' << D.TargetPkg().Name();
+ if (D->Version != 0)
+ cout << ' ' << D.TargetVer() << endl;
+ else
+ cout << endl;
+ }
+
cout << "Dependencies: " << endl;
for (pkgCache::VerIterator Cur = Pkg.VersionList(); Cur.end() != true; Cur++)
{
@@ -191,15 +223,15 @@ bool DumpPackage(CommandLine &CmdL)
bool Stats(CommandLine &Cmd)
{
pkgCache &Cache = *GCache;
- cout << "Total Package Names : " << Cache.Head().PackageCount << " (" <<
+ cout << _("Total Package Names : ") << Cache.Head().PackageCount << " (" <<
SizeToStr(Cache.Head().PackageCount*Cache.Head().PackageSz) << ')' << endl;
- pkgCache::PkgIterator I = Cache.PkgBegin();
-
+
int Normal = 0;
int Virtual = 0;
int NVirt = 0;
int DVirt = 0;
int Missing = 0;
+ pkgCache::PkgIterator I = Cache.PkgBegin();
for (;I.end() != true; I++)
{
if (I->VersionList != 0 && I->ProvidesList == 0)
@@ -231,20 +263,20 @@ bool Stats(CommandLine &Cmd)
continue;
}
}
- cout << " Normal Packages: " << Normal << endl;
- cout << " Pure Virtual Packages: " << Virtual << endl;
- cout << " Single Virtual Packages: " << DVirt << endl;
- cout << " Mixed Virtual Packages: " << NVirt << endl;
- cout << " Missing: " << Missing << endl;
+ cout << _(" Normal Packages: ") << Normal << endl;
+ cout << _(" Pure Virtual Packages: ") << Virtual << endl;
+ cout << _(" Single Virtual Packages: ") << DVirt << endl;
+ cout << _(" Mixed Virtual Packages: ") << NVirt << endl;
+ cout << _(" Missing: ") << Missing << endl;
- cout << "Total Distinct Versions: " << Cache.Head().VersionCount << " (" <<
+ cout << _("Total Distinct Versions: ") << Cache.Head().VersionCount << " (" <<
SizeToStr(Cache.Head().VersionCount*Cache.Head().VersionSz) << ')' << endl;
- cout << "Total Dependencies: " << Cache.Head().DependsCount << " (" <<
+ cout << _("Total Dependencies: ") << Cache.Head().DependsCount << " (" <<
SizeToStr(Cache.Head().DependsCount*Cache.Head().DependencySz) << ')' << endl;
- cout << "Total Ver/File relations: " << Cache.Head().VerFileCount << " (" <<
+ cout << _("Total Ver/File relations: ") << Cache.Head().VerFileCount << " (" <<
SizeToStr(Cache.Head().VerFileCount*Cache.Head().VerFileSz) << ')' << endl;
- cout << "Total Provides Mappings: " << Cache.Head().ProvidesCount << " (" <<
+ cout << _("Total Provides Mappings: ") << Cache.Head().ProvidesCount << " (" <<
SizeToStr(Cache.Head().ProvidesCount*Cache.Head().ProvidesSz) << ')' << endl;
// String list stats
@@ -254,14 +286,28 @@ bool Stats(CommandLine &Cmd)
I!= Cache.StringItemP; I = Cache.StringItemP + I->NextItem)
{
Count++;
- Size += strlen(Cache.StrP + I->String);
+ Size += strlen(Cache.StrP + I->String) + 1;
}
- cout << "Total Globbed Strings: " << Count << " (" << SizeToStr(Size) << ')' << endl;
-
+ cout << _("Total Globbed Strings: ") << Count << " (" << SizeToStr(Size) << ')' << endl;
+
+ unsigned long DepVerSize = 0;
+ for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
+ {
+ for (pkgCache::VerIterator V = P.VersionList(); V.end() == false; V++)
+ {
+ for (pkgCache::DepIterator D = V.DependsList(); D.end() == false; D++)
+ {
+ if (D->Version != 0)
+ DepVerSize += strlen(D.TargetVer()) + 1;
+ }
+ }
+ }
+ cout << _("Total Dependency Version space: ") << SizeToStr(DepVerSize) << endl;
+
unsigned long Slack = 0;
for (int I = 0; I != 7; I++)
Slack += Cache.Head().Pools[I].ItemSize*Cache.Head().Pools[I].Count;
- cout << "Total Slack space: " << SizeToStr(Slack) << endl;
+ cout << _("Total Slack space: ") << SizeToStr(Slack) << endl;
unsigned long Total = 0;
Total = Slack + Size + Cache.Head().DependsCount*Cache.Head().DependencySz +
@@ -269,39 +315,19 @@ bool Stats(CommandLine &Cmd)
Cache.Head().PackageCount*Cache.Head().PackageSz +
Cache.Head().VerFileCount*Cache.Head().VerFileSz +
Cache.Head().ProvidesCount*Cache.Head().ProvidesSz;
- cout << "Total Space Accounted for: " << SizeToStr(Total) << endl;
+ cout << _("Total Space Accounted for: ") << SizeToStr(Total) << endl;
return true;
}
/*}}}*/
-// Check - Check some things about the cache /*{{{*/
-// ---------------------------------------------------------------------
-/* Debug aide mostly */
-bool Check(CommandLine &Cmd)
-{
- pkgCache &Cache = *GCache;
- pkgCache::PkgIterator Pkg = Cache.PkgBegin();
- for (;Pkg.end() != true; Pkg++)
- {
- if (Pkg.Section() == 0 && Pkg->VersionList != 0)
- cout << "Bad section " << Pkg.Name() << endl;
-
- for (pkgCache::VerIterator Cur = Pkg.VersionList();
- Cur.end() != true; Cur++)
- {
- if (Cur->Priority < 1 || Cur->Priority > 5)
- cout << "Bad prio " << Pkg.Name() << ',' << Cur.VerStr() << " == " << (int)Cur->Priority << endl;
- }
- }
- return true;
-}
- /*}}}*/
// Dump - show everything /*{{{*/
// ---------------------------------------------------------------------
-/* */
+/* This is worthless except fer debugging things */
bool Dump(CommandLine &Cmd)
{
pkgCache &Cache = *GCache;
+ cout << "Using Versioning System: " << Cache.VS->Label << endl;
+
for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
{
cout << "Package: " << P.Name() << endl;
@@ -314,9 +340,10 @@ bool Dump(CommandLine &Cmd)
}
}
- for (pkgCache::PkgFileIterator F(Cache); F.end() == false; F++)
+ for (pkgCache::PkgFileIterator F = Cache.FileBegin(); F.end() == false; F++)
{
cout << "File: " << F.FileName() << endl;
+ cout << " Type: " << F.IndexType() << endl;
cout << " Size: " << F->Size << endl;
cout << " ID: " << F->ID << endl;
cout << " Flags: " << F->Flags << endl;
@@ -325,6 +352,7 @@ bool Dump(CommandLine &Cmd)
cout << " Component: " << F.Component() << endl;
cout << " Version: " << F.Version() << endl;
cout << " Origin: " << F.Origin() << endl;
+ cout << " Site: " << F.Site() << endl;
cout << " Label: " << F.Label() << endl;
cout << " Architecture: " << F.Architecture() << endl;
}
@@ -334,52 +362,144 @@ bool Dump(CommandLine &Cmd)
/*}}}*/
// DumpAvail - Print out the available list /*{{{*/
// ---------------------------------------------------------------------
-/* This is needed to make dpkg --merge happy */
+/* This is needed to make dpkg --merge happy.. I spent a bit of time to
+ make this run really fast, perhaps I went a little overboard.. */
bool DumpAvail(CommandLine &Cmd)
{
pkgCache &Cache = *GCache;
- unsigned char *Buffer = new unsigned char[Cache.HeaderP->MaxVerFileSize];
- for (pkgCache::PkgFileIterator I = Cache.FileBegin(); I.end() == false; I++)
- {
- if ((I->Flags & pkgCache::Flag::NotSource) != 0)
+ pkgPolicy Plcy(&Cache);
+ if (ReadPinFile(Plcy) == false)
+ return false;
+
+ pkgCache::VerFile **VFList = new pkgCache::VerFile *[Cache.HeaderP->PackageCount];
+ memset(VFList,0,sizeof(*VFList)*Cache.HeaderP->PackageCount);
+
+ // Map versions that we want to write out onto the VerList array.
+ for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
+ {
+ if (P->VersionList == 0)
continue;
- if (I.IsOk() == false)
+ /* Find the proper version to use. If the policy says there are no
+ possible selections we return the installed version, if available..
+ This prevents dselect from making it obsolete. */
+ pkgCache::VerIterator V = Plcy.GetCandidateVer(P);
+ if (V.end() == true)
{
- delete [] Buffer;
- return _error->Error("Package file %s is out of sync.",I.FileName());
+ if (P->CurrentVer == 0)
+ continue;
+ V = P.CurrentVer();
}
- FileFd PkgF(I.FileName(),FileFd::ReadOnly);
- if (_error->PendingError() == true)
+ pkgCache::VerFileIterator VF = V.FileList();
+ for (; VF.end() == false ; VF++)
+ if ((VF.File()->Flags & pkgCache::Flag::NotSource) == 0)
+ break;
+
+ /* Okay, here we have a bit of a problem.. The policy has selected the
+ currently installed package - however it only exists in the
+ status file.. We need to write out something or dselect will mark
+ the package as obsolete! Thus we emit the status file entry, but
+ below we remove the status line to make it valid for the
+ available file. However! We only do this if their do exist *any*
+ non-source versions of the package - that way the dselect obsolete
+ handling works OK. */
+ if (VF.end() == true)
{
- delete [] Buffer;
- return false;
+ for (pkgCache::VerIterator Cur = P.VersionList(); Cur.end() != true; Cur++)
+ {
+ for (VF = Cur.FileList(); VF.end() == false; VF++)
+ {
+ if ((VF.File()->Flags & pkgCache::Flag::NotSource) == 0)
+ {
+ VF = V.FileList();
+ break;
+ }
+ }
+
+ if (VF.end() == false)
+ break;
+ }
}
+
+ VFList[P->ID] = VF;
+ }
+
+ LocalitySort(VFList,Cache.HeaderP->PackageCount,sizeof(*VFList));
- /* Write all of the records from this package file, we search the entire
- structure to find them */
- for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
+ // Iterate over all the package files and write them out.
+ char *Buffer = new char[Cache.HeaderP->MaxVerFileSize+10];
+ for (pkgCache::VerFile **J = VFList; *J != 0;)
+ {
+ pkgCache::PkgFileIterator File(Cache,(*J)->File + Cache.PkgFileP);
+ if (File.IsOk() == false)
{
- // Find the proper version to use. We should probably use the DepCache.
- pkgCache::VerIterator V = Cache.GetCandidateVer(P,false);
+ _error->Error(_("Package file %s is out of sync."),File.FileName());
+ break;
+ }
- if (V.end() == true || V.FileList().File() != I)
- continue;
+ FileFd PkgF(File.FileName(),FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ break;
+
+ /* Write all of the records from this package file, since we
+ already did locality sorting we can now just seek through the
+ file in read order. We apply 1 more optimization here, since often
+ there will be < 1 byte gaps between records (for the \n) we read that
+ into the next buffer and offset a bit.. */
+ unsigned long Pos = 0;
+ for (; *J != 0; J++)
+ {
+ if ((*J)->File + Cache.PkgFileP != File)
+ break;
+ const pkgCache::VerFile &VF = **J;
+
// Read the record and then write it out again.
- if (PkgF.Seek(V.FileList()->Offset) == false ||
- PkgF.Read(Buffer,V.FileList()->Size) == false ||
- write(STDOUT_FILENO,Buffer,V.FileList()->Size) != V.FileList()->Size)
+ unsigned long Jitter = VF.Offset - Pos;
+ if (Jitter > 8)
{
- delete [] Buffer;
- return false;
- }
+ if (PkgF.Seek(VF.Offset) == false)
+ break;
+ Jitter = 0;
+ }
+
+ if (PkgF.Read(Buffer,VF.Size + Jitter) == false)
+ break;
+ Buffer[VF.Size + Jitter] = '\n';
+
+ // See above..
+ if ((File->Flags & pkgCache::Flag::NotSource) == pkgCache::Flag::NotSource)
+ {
+ pkgTagSection Tags;
+ TFRewriteData RW[] = {{"Status",0},{}};
+ const char *Zero = 0;
+ if (Tags.Scan(Buffer+Jitter,VF.Size+1) == false ||
+ TFRewrite(stdout,Tags,&Zero,RW) == false)
+ {
+ _error->Error("Internal Error, Unable to parse a package record");
+ break;
+ }
+ fputc('\n',stdout);
+ }
+ else
+ {
+ if (fwrite(Buffer+Jitter,VF.Size+1,1,stdout) != 1)
+ break;
+ }
+
+ Pos = VF.Offset + VF.Size;
}
+
+ fflush(stdout);
+ if (_error->PendingError() == true)
+ break;
}
- return true;
+ delete [] Buffer;
+ delete [] VFList;
+ return !_error->PendingError();
}
/*}}}*/
// Depends - Print out a dependency tree /*{{{*/
@@ -388,52 +508,76 @@ bool DumpAvail(CommandLine &Cmd)
bool Depends(CommandLine &CmdL)
{
pkgCache &Cache = *GCache;
+ SPtrArray<unsigned> Colours = new unsigned[Cache.Head().PackageCount];
+ memset(Colours,0,sizeof(*Colours)*Cache.Head().PackageCount);
for (const char **I = CmdL.FileList + 1; *I != 0; I++)
{
pkgCache::PkgIterator Pkg = Cache.FindPkg(*I);
if (Pkg.end() == true)
{
- _error->Warning("Unable to locate package %s",*I);
+ _error->Warning(_("Unable to locate package %s"),*I);
continue;
}
-
- pkgCache::VerIterator Ver = Pkg.VersionList();
- if (Ver.end() == true)
- {
- cout << '<' << Pkg.Name() << '>' << endl;
- continue;
- }
-
- cout << Pkg.Name() << endl;
-
- for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; D++)
+ Colours[Pkg->ID] = 1;
+ }
+
+ bool Recurse = _config->FindB("APT::Cache::RecurseDepends",false);
+ bool DidSomething;
+ do
+ {
+ DidSomething = false;
+ for (pkgCache::PkgIterator Pkg = Cache.PkgBegin(); Pkg.end() == false; Pkg++)
{
- if ((D->CompareOp & pkgCache::Dep::Or) == pkgCache::Dep::Or)
- cout << " |";
- else
- cout << " ";
+ if (Colours[Pkg->ID] != 1)
+ continue;
+ Colours[Pkg->ID] = 2;
+ DidSomething = true;
- // Show the package
- pkgCache::PkgIterator Trg = D.TargetPkg();
- if (Trg->VersionList == 0)
- cout << D.DepType() << ": <" << Trg.Name() << ">" << endl;
- else
- cout << D.DepType() << ": " << Trg.Name() << endl;
-
- // Display all solutions
- pkgCache::Version **List = D.AllTargets();
- for (pkgCache::Version **I = List; *I != 0; I++)
+ pkgCache::VerIterator Ver = Pkg.VersionList();
+ if (Ver.end() == true)
{
- pkgCache::VerIterator V(Cache,*I);
- if (V != Cache.VerP + V.ParentPkg()->VersionList ||
- V->ParentPkg == D->Package)
- continue;
- cout << " " << V.ParentPkg().Name() << endl;
+ cout << '<' << Pkg.Name() << '>' << endl;
+ continue;
}
- delete [] List;
- }
+
+ cout << Pkg.Name() << endl;
+
+ for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; D++)
+ {
+ if ((D->CompareOp & pkgCache::Dep::Or) == pkgCache::Dep::Or)
+ cout << " |";
+ else
+ cout << " ";
+
+ // Show the package
+ pkgCache::PkgIterator Trg = D.TargetPkg();
+ if (Trg->VersionList == 0)
+ cout << D.DepType() << ": <" << Trg.Name() << ">" << endl;
+ else
+ cout << D.DepType() << ": " << Trg.Name() << endl;
+
+ if (Recurse == true)
+ Colours[D.TargetPkg()->ID]++;
+
+ // Display all solutions
+ SPtrArray<pkgCache::Version *> List = D.AllTargets();
+ pkgPrioSortList(Cache,List);
+ for (pkgCache::Version **I = List; *I != 0; I++)
+ {
+ pkgCache::VerIterator V(Cache,*I);
+ if (V != Cache.VerP + V.ParentPkg()->VersionList ||
+ V->ParentPkg == D->Package)
+ continue;
+ cout << " " << V.ParentPkg().Name() << endl;
+
+ if (Recurse == true)
+ Colours[D.ParentPkg()->ID]++;
+ }
+ }
+ }
}
+ while (DidSomething == true);
return true;
}
@@ -518,7 +662,7 @@ bool Dotty(CommandLine &CmdL)
pkgCache::PkgIterator Pkg = Cache.FindPkg(P);
if (Pkg.end() == true)
{
- _error->Warning("Unable to locate package %s",*I);
+ _error->Warning(_("Unable to locate package %s"),*I);
continue;
}
Show[Pkg->ID] = ToShow;
@@ -569,7 +713,7 @@ bool Dotty(CommandLine &CmdL)
for (pkgCache::VerIterator I = DPkg.VersionList();
I.end() == false && Hit == false; I++)
{
- if (pkgCheckDep(D.TargetVer(),I.VerStr(),D->CompareOp) == true)
+ if (Cache.VS->CheckDep(I.VerStr(),D->CompareOp,D.TargetVer()) == true)
Hit = true;
}
@@ -577,7 +721,7 @@ bool Dotty(CommandLine &CmdL)
for (pkgCache::PrvIterator I = DPkg.ProvidesList();
I.end() == false && Hit == false; I++)
{
- if (pkgCheckDep(D.TargetVer(),I.ProvideVersion(),D->CompareOp) == false)
+ if (Cache.VS->CheckDep(I.ProvideVersion(),D->CompareOp,D.TargetVer()) == false)
Hit = true;
}
@@ -591,7 +735,9 @@ bool Dotty(CommandLine &CmdL)
{
/* If a conflicts does not meet anything in the database
then show the relation but do not recurse */
- if (Hit == false && D->Type == pkgCache::Dep::Conflicts)
+ if (Hit == false &&
+ (D->Type == pkgCache::Dep::Conflicts ||
+ D->Type == pkgCache::Dep::Obsoletes))
{
if (Show[D.TargetPkg()->ID] == None &&
Show[D.TargetPkg()->ID] != ToShow)
@@ -610,6 +756,7 @@ bool Dotty(CommandLine &CmdL)
switch(D->Type)
{
case pkgCache::Dep::Conflicts:
+ case pkgCache::Dep::Obsoletes:
printf("[color=springgreen];\n");
break;
@@ -651,6 +798,8 @@ bool Dotty(CommandLine &CmdL)
/* */
bool DoAdd(CommandLine &CmdL)
{
+ return _error->Error("Unimplemented");
+#if 0
// Make sure there is at least one argument
if (CmdL.FileSize() <= 1)
return _error->Error("You must give at least one file name");
@@ -681,7 +830,7 @@ bool DoAdd(CommandLine &CmdL)
if (_error->PendingError() == true)
return _error->Error("Problem opening %s",*I);
- if (Gen.SelectFile(*I) == false)
+ if (Gen.SelectFile(*I,"") == false)
return _error->Error("Problem with SelectFile");
if (Gen.MergeList(Parser) == false)
@@ -693,6 +842,7 @@ bool DoAdd(CommandLine &CmdL)
Stats(CmdL);
return true;
+#endif
}
/*}}}*/
// DisplayRecord - Displays the complete record for the package /*{{{*/
@@ -712,17 +862,18 @@ bool DisplayRecord(pkgCache::VerIterator V)
// Check and load the package list file
pkgCache::PkgFileIterator I = Vf.File();
if (I.IsOk() == false)
- return _error->Error("Package file %s is out of sync.",I.FileName());
+ return _error->Error(_("Package file %s is out of sync."),I.FileName());
FileFd PkgF(I.FileName(),FileFd::ReadOnly);
if (_error->PendingError() == true)
return false;
// Read the record and then write it out again.
- unsigned char *Buffer = new unsigned char[GCache->HeaderP->MaxVerFileSize];
+ unsigned char *Buffer = new unsigned char[GCache->HeaderP->MaxVerFileSize+1];
+ Buffer[V.FileList()->Size] = '\n';
if (PkgF.Seek(V.FileList()->Offset) == false ||
PkgF.Read(Buffer,V.FileList()->Size) == false ||
- write(STDOUT_FILENO,Buffer,V.FileList()->Size) != V.FileList()->Size)
+ write(STDOUT_FILENO,Buffer,V.FileList()->Size+1) != V.FileList()->Size+1)
{
delete [] Buffer;
return false;
@@ -736,50 +887,107 @@ bool DisplayRecord(pkgCache::VerIterator V)
// Search - Perform a search /*{{{*/
// ---------------------------------------------------------------------
/* This searches the package names and pacakge descriptions for a pattern */
+struct ExVerFile
+{
+ pkgCache::VerFile *Vf;
+ bool NameMatch;
+};
+
bool Search(CommandLine &CmdL)
{
pkgCache &Cache = *GCache;
bool ShowFull = _config->FindB("APT::Cache::ShowFull",false);
bool NamesOnly = _config->FindB("APT::Cache::NamesOnly",false);
+ unsigned NumPatterns = CmdL.FileSize() -1;
+
+ pkgDepCache::Policy Plcy;
// Make sure there is at least one argument
- if (CmdL.FileSize() != 2)
- return _error->Error("You must give exactly one pattern");
+ if (NumPatterns < 1)
+ return _error->Error(_("You must give exactly one pattern"));
// Compile the regex pattern
- regex_t Pattern;
- if (regcomp(&Pattern,CmdL.FileList[1],REG_EXTENDED | REG_ICASE |
- REG_NOSUB) != 0)
- return _error->Error("Regex compilation error");
+ regex_t *Patterns = new regex_t[NumPatterns];
+ memset(Patterns,0,sizeof(*Patterns)*NumPatterns);
+ for (unsigned I = 0; I != NumPatterns; I++)
+ {
+ if (regcomp(&Patterns[I],CmdL.FileList[I+1],REG_EXTENDED | REG_ICASE |
+ REG_NOSUB) != 0)
+ {
+ for (; I != 0; I--)
+ regfree(&Patterns[I]);
+ return _error->Error("Regex compilation error");
+ }
+ }
// Create the text record parser
pkgRecords Recs(Cache);
if (_error->PendingError() == true)
+ {
+ for (unsigned I = 0; I != NumPatterns; I++)
+ regfree(&Patterns[I]);
return false;
+ }
- // Search package names
- pkgCache::PkgIterator I = Cache.PkgBegin();
- for (;I.end() != true; I++)
+ ExVerFile *VFList = new ExVerFile[Cache.HeaderP->PackageCount+1];
+ memset(VFList,0,sizeof(*VFList)*Cache.HeaderP->PackageCount+1);
+
+ // Map versions that we want to write out onto the VerList array.
+ for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
{
- // We search against the install version as that makes the most sense..
- pkgCache::VerIterator V = Cache.GetCandidateVer(I);
+ for (unsigned I = 0; I != NumPatterns; I++)
+ {
+ if (regexec(&Patterns[I],P.Name(),0,0,0) == 0)
+ {
+ VFList[P->ID].NameMatch = true;
+ break;
+ }
+ }
+
+ // Doing names only, drop any that dont match..
+ if (NamesOnly == true && VFList[P->ID].NameMatch == false)
+ continue;
+
+ // Find the proper version to use.
+ pkgCache::VerIterator V = Plcy.GetCandidateVer(P);
if (V.end() == true)
continue;
+ VFList[P->ID].Vf = V.FileList();
+ }
+
+ LocalitySort(&VFList->Vf,Cache.HeaderP->PackageCount,sizeof(*VFList));
- pkgRecords::Parser &P = Recs.Lookup(V.FileList());
+ // Iterate over all the version records and check them
+ for (ExVerFile *J = VFList; J->Vf != 0; J++)
+ {
+ pkgRecords::Parser &P = Recs.Lookup(pkgCache::VerFileIterator(Cache,J->Vf));
- if (regexec(&Pattern,I.Name(),0,0,0) == 0 ||
- (NamesOnly == false &&
- regexec(&Pattern,P.LongDesc().c_str(),0,0,0) == 0))
+ bool Match = J->NameMatch;
+ string LongDesc = P.LongDesc();
+ for (unsigned I = 0; I != NumPatterns && Match == false; I++)
+ if (regexec(&Patterns[I],LongDesc.c_str(),0,0,0) == 0)
+ Match = true;
+
+ if (Match == true)
{
if (ShowFull == true)
- DisplayRecord(V);
+ {
+ const char *Start;
+ const char *End;
+ P.GetRec(Start,End);
+ fwrite(Start,End-Start,1,stdout);
+ putc('\n',stdout);
+ }
else
- cout << I.Name() << " - " << P.ShortDesc() << endl;
- }
+ printf("%s - %s\n",P.Name().c_str(),P.ShortDesc().c_str());
+ }
}
- regfree(&Pattern);
+ delete [] VFList;
+ for (unsigned I = 0; I != NumPatterns; I++)
+ regfree(&Patterns[I]);
+ if (ferror(stdout))
+ return _error->Error("Write to stdout failed");
return true;
}
/*}}}*/
@@ -789,16 +997,18 @@ bool Search(CommandLine &CmdL)
bool ShowPackage(CommandLine &CmdL)
{
pkgCache &Cache = *GCache;
+ pkgDepCache::Policy Plcy;
+
for (const char **I = CmdL.FileList + 1; *I != 0; I++)
{
pkgCache::PkgIterator Pkg = Cache.FindPkg(*I);
if (Pkg.end() == true)
{
- _error->Warning("Unable to locate package %s",*I);
+ _error->Warning(_("Unable to locate package %s"),*I);
continue;
}
-
- // Find the proper version to use. We should probably use the DepCache.
+
+ // Find the proper version to use.
if (_config->FindB("APT::Cache::AllVersions","true") == true)
{
pkgCache::VerIterator V;
@@ -810,7 +1020,7 @@ bool ShowPackage(CommandLine &CmdL)
}
else
{
- pkgCache::VerIterator V = Cache.GetCandidateVer(Pkg);
+ pkgCache::VerIterator V = Plcy.GetCandidateVer(Pkg);
if (V.end() == true || V.FileList().end() == true)
continue;
if (DisplayRecord(V) == false)
@@ -873,7 +1083,7 @@ bool ShowSrcPackage(CommandLine &CmdL)
pkgSrcRecords::Parser *Parse;
while ((Parse = SrcRecs.Find(*I,false)) != 0)
- cout << Parse->AsStr();
+ cout << Parse->AsStr() << endl;;
}
return true;
}
@@ -886,7 +1096,8 @@ bool GenCaches(CommandLine &Cmd)
OpTextProgress Progress(*_config);
pkgSourceList List;
- List.ReadMainList();
+ if (List.ReadMainList() == false)
+ return false;
return pkgMakeStatusCache(List,Progress);
}
/*}}}*/
@@ -895,44 +1106,42 @@ bool GenCaches(CommandLine &Cmd)
/* */
bool ShowHelp(CommandLine &Cmd)
{
- cout << PACKAGE << ' ' << VERSION << " for " << ARCHITECTURE <<
- " compiled on " << __DATE__ << " " << __TIME__ << endl;
- if (_config->FindB("version") == true)
- return 100;
+ ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
+ COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
- cout << "Usage: apt-cache [options] command" << endl;
- cout << " apt-cache [options] add file1 [file1 ...]" << endl;
- cout << " apt-cache [options] showpkg pkg1 [pkg2 ...]" << endl;
- cout << endl;
- cout << "apt-cache is a low-level tool used to manipulate APT's binary" << endl;
- cout << "cache files stored in " << _config->FindFile("Dir::Cache") << endl;
- cout << "It is not meant for ordinary use only as a debug aide." << endl;
- cout << endl;
- cout << "Commands:" << endl;
- cout << " add - Add an package file to the source cache" << endl;
- cout << " gencaches - Build both the package and source cache" << endl;
- cout << " showpkg - Show some general information for a single package" << endl;
- cout << " stats - Show some basic statistics" << endl;
- cout << " dump - Show the entire file in a terse form" << endl;
- cout << " dumpavail - Print an available file to stdout" << endl;
- cout << " unmet - Show unmet dependencies" << endl;
- cout << " check - Check the cache a bit" << endl;
- cout << " search - Search the package list for a regex pattern" << endl;
- cout << " show - Show a readable record for the package" << endl;
- cout << " depends - Show raw dependency information for a package" << endl;
- cout << " pkgnames - List the names of all packages" << endl;
- cout << " dotty - Generate package graphs for GraphVis" << endl;
- cout << endl;
- cout << "Options:" << endl;
- cout << " -h This help text." << endl;
- cout << " -p=? The package cache. [" << _config->FindFile("Dir::Cache::pkgcache") << ']' << endl;
- cout << " -s=? The source cache. [" << _config->FindFile("Dir::Cache::srcpkgcache") << ']' << endl;
- cout << " -q Disable progress indicator." << endl;
- cout << " -i Show only important deps for the unmet command." << endl;
- cout << " -c=? Read this configuration file" << endl;
- cout << " -o=? Set an arbitary configuration option, eg -o dir::cache=/tmp" << endl;
- cout << "See the apt-cache(8) and apt.conf(5) manual pages for more information." << endl;
- return 100;
+ cout <<
+ _("Usage: apt-cache [options] command\n"
+ " apt-cache [options] add file1 [file1 ...]\n"
+ " apt-cache [options] showpkg pkg1 [pkg2 ...]\n"
+ "\n"
+ "apt-cache is a low-level tool used to manipulate APT's binary\n"
+ "cache files, and query information from them\n"
+ "\n"
+ "Commands:\n"
+ " add - Add an package file to the source cache\n"
+ " gencaches - Build both the package and source cache\n"
+ " showpkg - Show some general information for a single package\n"
+ " stats - Show some basic statistics\n"
+ " dump - Show the entire file in a terse form\n"
+ " dumpavail - Print an available file to stdout\n"
+ " unmet - Show unmet dependencies\n"
+ " check - Check the cache a bit\n"
+ " search - Search the package list for a regex pattern\n"
+ " show - Show a readable record for the package\n"
+ " depends - Show raw dependency information for a package\n"
+ " pkgnames - List the names of all packages\n"
+ " dotty - Generate package graphs for GraphVis\n"
+ "\n"
+ "Options:\n"
+ " -h This help text.\n"
+ " -p=? The package cache.\n"
+ " -s=? The source cache.\n"
+ " -q Disable progress indicator.\n"
+ " -i Show only important deps for the unmet command.\n"
+ " -c=? Read this configuration file\n"
+ " -o=? Set an arbitary configuration option, eg -o dir::cache=/tmp\n"
+ "See the apt-cache(8) and apt.conf(5) manual pages for more information.\n");
+ return true;
}
/*}}}*/
// CacheInitialize - Initialize things for apt-cache /*{{{*/
@@ -955,10 +1164,11 @@ int main(int argc,const char *argv[])
{'q',"quiet","quiet",CommandLine::IntLevel},
{'i',"important","APT::Cache::Important",0},
{'f',"full","APT::Cache::ShowFull",0},
- {'g',"no-generate","APT::Cache::NoGenerate",0},
+ {'g',"generate","APT::Cache::Generate",0},
{'a',"all-versions","APT::Cache::AllVersions",0},
{0,"names-only","APT::Cache::NamesOnly",0},
{0,"all-names","APT::Cache::AllNames",0},
+ {0,"recurse","APT::Cache::RecurseDepends",0},
{'c',"config-file",0,CommandLine::ConfigFile},
{'o',"option",0,CommandLine::ArbItem},
{0,0,0,0}};
@@ -972,7 +1182,6 @@ int main(int argc,const char *argv[])
{"dump",&Dump},
{"dumpavail",&DumpAvail},
{"unmet",&UnMet},
- {"check",&Check},
{"search",&Search},
{"depends",&Depends},
{"dotty",&Dotty},
@@ -984,8 +1193,9 @@ int main(int argc,const char *argv[])
// Parse the command line and initialize the package library
CommandLine CmdL(Args,_config);
- if (pkgInitialize(*_config) == false ||
- CmdL.Parse(argc,argv) == false)
+ if (pkgInitConfig(*_config) == false ||
+ CmdL.Parse(argc,argv) == false ||
+ pkgInitSystem(*_config,_system) == false)
{
_error->DumpErrors();
return 100;
@@ -994,8 +1204,11 @@ int main(int argc,const char *argv[])
// See if the help should be shown
if (_config->FindB("help") == true ||
CmdL.FileSize() == 0)
- return ShowHelp(CmdL);
-
+ {
+ ShowHelp(CmdL);
+ return 0;
+ }
+
// Deal with stdout not being a tty
if (ttyname(STDOUT_FILENO) == 0 && _config->FindI("quiet",0) < 1)
_config->Set("quiet","1");
@@ -1003,7 +1216,7 @@ int main(int argc,const char *argv[])
if (CmdL.DispatchArg(CmdsA,false) == false && _error->PendingError() == false)
{
MMap *Map;
- if (_config->FindB("APT::Cache::NoGenerate",false) == true)
+ if (_config->FindB("APT::Cache::Generate",true) == false)
{
Map = new MMap(*new FileFd(_config->FindFile("Dir::Cache::pkgcache"),
FileFd::ReadOnly),MMap::Public|MMap::ReadOnly);
@@ -1016,12 +1229,12 @@ int main(int argc,const char *argv[])
// Generate it and map it
OpProgress Prog;
- Map = pkgMakeStatusCacheMem(List,Prog);
+ pkgMakeStatusCache(List,Prog,&Map,true);
}
if (_error->PendingError() == false)
{
- pkgCache Cache(*Map);
+ pkgCache Cache(Map);
GCache = &Cache;
if (_error->PendingError() == false)
CmdL.DispatchArg(CmdsB);
diff --git a/cmdline/apt-cdrom.cc b/cmdline/apt-cdrom.cc
index 204cef366..c43029e5e 100644
--- a/cmdline/apt-cdrom.cc
+++ b/cmdline/apt-cdrom.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: apt-cdrom.cc,v 1.35 2000/05/10 06:03:52 jgg Exp $
+// $Id: apt-cdrom.cc,v 1.36 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
APT CDROM - Tool for handling APT's CDROM database.
@@ -19,7 +19,8 @@
#include <apt-pkg/cdromutl.h>
#include <apt-pkg/strutl.h>
#include <config.h>
-
+#include <apti18n.h>
+
#include "indexcopy.h"
#include <iostream>
@@ -119,7 +120,7 @@ bool FindPackages(string CD,vector<string> &List,vector<string> &SList,
break;
if (chdir(CD.c_str()) != 0)
- return _error->Errno("chdir","Unable to change to ",CD.c_str());
+ return _error->Errno("chdir","Unable to change to %s",CD.c_str());
};
closedir(D);
@@ -255,7 +256,7 @@ bool DropRepeats(vector<string> &List,const char *Name)
// ---------------------------------------------------------------------
/* This takes the list of source list expressed entires and collects
similar ones to form a single entry for each dist */
-bool ReduceSourcelist(string CD,vector<string> &List)
+void ReduceSourcelist(string CD,vector<string> &List)
{
sort(List.begin(),List.end());
@@ -269,8 +270,9 @@ bool ReduceSourcelist(string CD,vector<string> &List)
string::size_type SSpace = (*I).find(' ',Space + 1);
if (SSpace == string::npos)
continue;
-
+
string Word1 = string(*I,Space,SSpace-Space);
+ string Prefix = string(*I,0,Space);
for (vector<string>::iterator J = List.begin(); J != I; J++)
{
// Find a space..
@@ -281,6 +283,8 @@ bool ReduceSourcelist(string CD,vector<string> &List)
if (SSpace2 == string::npos)
continue;
+ if (string(*J,0,Space2) != Prefix)
+ continue;
if (string(*J,Space2,SSpace2-Space2) != Word1)
continue;
@@ -358,7 +362,8 @@ bool WriteSourceList(string Name,vector<string> &List,bool Source)
string File = _config->FindFile("Dir::Etc::sourcelist");
// Open the stream for reading
- ifstream F(File.c_str(),ios::in | ios::nocreate);
+ ifstream F((FileExists(File)?File.c_str():"/dev/null"),
+ ios::in | ios::nocreate);
if (!F != 0)
return _error->Errno("ifstream::ifstream","Opening %s",File.c_str());
@@ -577,6 +582,12 @@ bool DoAdd(CommandLine &)
if (Name.empty() == false)
{
+ // Escape special characters
+ string::iterator J = Name.begin();
+ for (; J != Name.end(); J++)
+ if (*J == '"' || *J == ']' || *J == '[')
+ *J = '_';
+
cout << "Found label '" << Name << "'" << endl;
Database.Set("CD::" + ID + "::Label",Name);
}
@@ -663,37 +674,78 @@ bool DoAdd(CommandLine &)
return true;
}
/*}}}*/
+// DoIdent - Ident a CDROM /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool DoIdent(CommandLine &)
+{
+ // Startup
+ string CDROM = _config->FindDir("Acquire::cdrom::mount","/cdrom/");
+ if (CDROM[0] == '.')
+ CDROM= SafeGetCWD() + '/' + CDROM;
+
+ cout << "Using CD-ROM mount point " << CDROM << endl;
+ cout << "Mounting CD-ROM" << endl;
+ if (MountCdrom(CDROM) == false)
+ return _error->Error("Failed to mount the cdrom.");
+
+ // Hash the CD to get an ID
+ cout << "Identifying.. " << flush;
+ string ID;
+ if (IdentCdrom(CDROM,ID) == false)
+ {
+ cout << endl;
+ return false;
+ }
+
+ cout << '[' << ID << ']' << endl;
+
+ // Read the database
+ Configuration Database;
+ string DFile = _config->FindFile("Dir::State::cdroms");
+ if (FileExists(DFile) == true)
+ {
+ if (ReadConfigFile(Database,DFile) == false)
+ return _error->Error("Unable to read the cdrom database %s",
+ DFile.c_str());
+ }
+ cout << "Stored Label: '" << Database.Find("CD::" + ID) << "'" << endl;
+ return true;
+}
+ /*}}}*/
// ShowHelp - Show the help screen /*{{{*/
// ---------------------------------------------------------------------
/* */
int ShowHelp()
{
- cout << PACKAGE << ' ' << VERSION << " for " << ARCHITECTURE <<
- " compiled on " << __DATE__ << " " << __TIME__ << endl;
+ ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
+ COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
if (_config->FindB("version") == true)
- return 100;
-
- cout << "Usage: apt-cdrom [options] command" << endl;
- cout << endl;
- cout << "apt-cdrom is a tool to add CDROM's to APT's source list. The " << endl;
- cout << "CDROM mount point and device information is taken from apt.conf" << endl;
- cout << "and /etc/fstab." << endl;
- cout << endl;
- cout << "Commands:" << endl;
- cout << " add - Add a CDROM" << endl;
- cout << endl;
- cout << "Options:" << endl;
- cout << " -h This help text" << endl;
- cout << " -d CD-ROM mount point" << endl;
- cout << " -r Rename a recognized CD-ROM" << endl;
- cout << " -m No mounting" << endl;
- cout << " -f Fast mode, don't check package files" << endl;
- cout << " -a Thorough scan mode" << endl;
- cout << " -c=? Read this configuration file" << endl;
- cout << " -o=? Set an arbitary configuration option, eg -o dir::cache=/tmp" << endl;
- cout << "See fstab(5)" << endl;
- return 100;
+ return 0;
+
+ cout <<
+ "Usage: apt-cdrom [options] command\n"
+ "\n"
+ "apt-cdrom is a tool to add CDROM's to APT's source list. The\n"
+ "CDROM mount point and device information is taken from apt.conf\n"
+ "and /etc/fstab.\n"
+ "\n"
+ "Commands:\n"
+ " add - Add a CDROM\n"
+ " ident - Report the identity of a CDROM\n"
+ "\n"
+ "Options:\n"
+ " -h This help text\n"
+ " -d CD-ROM mount point\n"
+ " -r Rename a recognized CD-ROM\n"
+ " -m No mounting\n"
+ " -f Fast mode, don't check package files\n"
+ " -a Thorough scan mode\n"
+ " -c=? Read this configuration file\n"
+ " -o=? Set an arbitary configuration option, eg -o dir::cache=/tmp\n"
+ "See fstab(5)\n";
+ return 0;
}
/*}}}*/
@@ -715,12 +767,14 @@ int main(int argc,const char *argv[])
{0,0,0,0}};
CommandLine::Dispatch Cmds[] = {
{"add",&DoAdd},
+ {"ident",&DoIdent},
{0,0}};
// Parse the command line and initialize the package library
CommandLine CmdL(Args,_config);
- if (pkgInitialize(*_config) == false ||
- CmdL.Parse(argc,argv) == false)
+ if (pkgInitConfig(*_config) == false ||
+ CmdL.Parse(argc,argv) == false ||
+ pkgInitSystem(*_config,_system) == false)
{
_error->DumpErrors();
return 100;
diff --git a/cmdline/apt-config.cc b/cmdline/apt-config.cc
index 3de59aee5..327700e1c 100644
--- a/cmdline/apt-config.cc
+++ b/cmdline/apt-config.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: apt-config.cc,v 1.6 1999/06/06 05:52:37 jgg Exp $
+// $Id: apt-config.cc,v 1.7 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
APT Config - Program to manipulate APT configuration files
@@ -19,9 +19,13 @@
#include <apt-pkg/cmndline.h>
#include <apt-pkg/error.h>
#include <apt-pkg/init.h>
-#include "config.h"
+#include <apt-pkg/strutl.h>
+#include <config.h>
+#include <apti18n.h>
+
#include <iostream>
+#include <string>
/*}}}*/
// DoShell - Handle the shell command /*{{{*/
@@ -32,20 +36,16 @@ bool DoShell(CommandLine &CmdL)
for (const char **I = CmdL.FileList + 1; *I != 0; I += 2)
{
if (I[1] == 0 || strlen(I[1]) == 0)
- return _error->Error("Arguments not in pairs");
+ return _error->Error(_("Arguments not in pairs"));
- // Check if the caller has requested a directory path
- if (I[1][strlen(I[1])-1] == '/')
- {
- char S[300];
- strcpy(S,I[1]);
- S[strlen(S)-1] = 0;
- if (_config->Exists(S) == true)
- cout << *I << "=\"" << _config->FindDir(S) << '"' << endl;
- }
+ string key = I[1];
+ if (key.end()[-1] == '/') // old directory format
+ key.append("d");
+
+ if (_config->ExistsAny(key.c_str()))
+ cout << *I << "='" <<
+ SubstVar(_config->FindAny(key.c_str()),"'","'\\''") << '\'' << endl;
- if (_config->Exists(I[1]) == true)
- cout << *I << "=\"" << _config->Find(I[1]) << '"' << endl;
}
return true;
@@ -65,24 +65,25 @@ bool DoDump(CommandLine &CmdL)
/* */
int ShowHelp()
{
- cout << PACKAGE << ' ' << VERSION << " for " << ARCHITECTURE <<
- " compiled on " << __DATE__ << " " << __TIME__ << endl;
+ ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
+ COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
if (_config->FindB("version") == true)
- return 100;
+ return 0;
- cout << "Usage: apt-config [options] command" << endl;
- cout << endl;
- cout << "apt-config is a simple tool to read the APT config file" << endl;
- cout << endl;
- cout << "Commands:" << endl;
- cout << " shell - Shell mode" << endl;
- cout << " dump - Show the configuration" << endl;
- cout << endl;
- cout << "Options:" << endl;
- cout << " -h This help text." << endl;
- cout << " -c=? Read this configuration file" << endl;
- cout << " -o=? Set an arbitary configuration option, eg -o dir::cache=/tmp" << endl;
- return 100;
+ cout <<
+ _("Usage: apt-config [options] command\n"
+ "\n"
+ "apt-config is a simple tool to read the APT config file\n"
+ "\n"
+ "Commands:\n"
+ " shell - Shell mode\n"
+ " dump - Show the configuration\n"
+ "\n"
+ "Options:\n"
+ " -h This help text.\n"
+ " -c=? Read this configuration file\n"
+ " -o=? Set an arbitary configuration option, eg -o dir::cache=/tmp\n");
+ return 0;
}
/*}}}*/
@@ -100,8 +101,9 @@ int main(int argc,const char *argv[])
// Parse the command line and initialize the package library
CommandLine CmdL(Args,_config);
- if (pkgInitialize(*_config) == false ||
- CmdL.Parse(argc,argv) == false)
+ if (pkgInitConfig(*_config) == false ||
+ CmdL.Parse(argc,argv) == false ||
+ pkgInitSystem(*_config,_system) == false)
{
_error->DumpErrors();
return 100;
diff --git a/cmdline/apt-get.cc b/cmdline/apt-get.cc
index 3074435e8..3cef602fa 100644
--- a/cmdline/apt-get.cc
+++ b/cmdline/apt-get.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: apt-get.cc,v 1.99 2000/06/05 04:47:30 jgg Exp $
+// $Id: apt-get.cc,v 1.100 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
apt-get - Cover for dpkg
@@ -32,14 +32,16 @@
#include <apt-pkg/sourcelist.h>
#include <apt-pkg/algorithms.h>
#include <apt-pkg/acquire-item.h>
-#include <apt-pkg/dpkgpm.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/clean.h>
#include <apt-pkg/srcrecords.h>
#include <apt-pkg/version.h>
#include <apt-pkg/cachefile.h>
-
+#include <apt-pkg/sptr.h>
+#include <apt-pkg/versionmatch.h>
+
#include <config.h>
+#include <apti18n.h>
#include "acqprogress.h"
@@ -81,6 +83,7 @@ class CacheFile : public pkgCacheFile
if (pkgCacheFile::Open(Prog,WithLock) == false)
return false;
Sort();
+
return true;
};
CacheFile() : List(0) {};
@@ -92,18 +95,24 @@ class CacheFile : public pkgCacheFile
/* Returns true on a Yes.*/
bool YnPrompt()
{
+ // This needs to be a capital
+ const char *Yes = _("Y");
+
if (_config->FindB("APT::Get::Assume-Yes",false) == true)
{
- c1out << 'Y' << endl;
+ c1out << Yes << endl;
return true;
}
char C = 0;
char Jnk = 0;
- read(STDIN_FILENO,&C,1);
- while (C != '\n' && Jnk != '\n') read(STDIN_FILENO,&Jnk,1);
+ if (read(STDIN_FILENO,&C,1) != 1)
+ return false;
+ while (C != '\n' && Jnk != '\n')
+ if (read(STDIN_FILENO,&Jnk,1) != 1)
+ return false;
- if (!(C == 'Y' || C == 'y' || C == '\n' || C == '\r'))
+ if (!(toupper(C) == *Yes || C == '\n' || C == '\r'))
return false;
return true;
}
@@ -122,7 +131,7 @@ bool AnalPrompt(const char *Text)
/*}}}*/
// ShowList - Show a list /*{{{*/
// ---------------------------------------------------------------------
-/* This prints out a string of space seperated words with a title and
+/* This prints out a string of space separated words with a title and
a two space indent line wraped to the current screen width. */
bool ShowList(ostream &out,string Title,string List)
{
@@ -154,10 +163,17 @@ bool ShowList(ostream &out,string Title,string List)
// ---------------------------------------------------------------------
/* This prints out the names of all the packages that are broken along
with the name of each each broken dependency and a quite version
- description. */
+ description.
+
+ The output looks like:
+ Sorry, but the following packages have unmet dependencies:
+ exim: Depends: libc6 (>= 2.1.94) but 2.1.3-10 is to be installed
+ Depends: libldap2 (>= 2.0.2-2) but it is not going to be installed
+ Depends: libsasl7 but it is not going to be installed
+ */
void ShowBroken(ostream &out,CacheFile &Cache,bool Now)
{
- out << "Sorry, but the following packages have unmet dependencies:" << endl;
+ out << _("Sorry, but the following packages have unmet dependencies:") << endl;
for (unsigned J = 0; J < Cache->Head().PackageCount; J++)
{
pkgCache::PkgIterator I(Cache,Cache.List[J]);
@@ -207,34 +223,38 @@ void ShowBroken(ostream &out,CacheFile &Cache,bool Now)
// Show a quick summary of the version requirements
if (Start.TargetVer() != 0)
- out << " (" << Start.CompType() << " " << Start.TargetVer() <<
- ")";
+ out << " (" << Start.CompType() << " " << Start.TargetVer() << ")";
/* Show a summary of the target package if possible. In the case
of virtual packages we show nothing */
pkgCache::PkgIterator Targ = Start.TargetPkg();
if (Targ->ProvidesList == 0)
{
- out << " but ";
+ out << ' ';
pkgCache::VerIterator Ver = Cache[Targ].InstVerIter(Cache);
if (Ver.end() == false)
- out << Ver.VerStr() << (Now?" is installed":" is to be installed");
+ {
+ if (Now == true)
+ ioprintf(out,_("but %s is installed"),Ver.VerStr());
+ else
+ ioprintf(out,_("but %s is to be installed"),Ver.VerStr());
+ }
else
{
if (Cache[Targ].CandidateVerIter(Cache).end() == true)
{
if (Targ->ProvidesList == 0)
- out << "it is not installable";
+ out << _("but it is not installable");
else
- out << "it is a virtual package";
+ out << _("but it is a virtual package");
}
else
- out << (Now?"it is not installed":"it is not going to be installed");
+ out << (Now?_("but it is not installed"):_("but it is not going to be installed"));
}
}
if (Start != End)
- cout << " or";
+ out << _(" or");
out << endl;
if (Start == End)
@@ -260,7 +280,7 @@ void ShowNew(ostream &out,CacheFile &Cache)
List += string(I.Name()) + " ";
}
- ShowList(out,"The following NEW packages will be installed:",List);
+ ShowList(out,_("The following NEW packages will be installed:"),List);
}
/*}}}*/
// ShowDel - Show packages to delete /*{{{*/
@@ -283,7 +303,7 @@ void ShowDel(ostream &out,CacheFile &Cache)
}
}
- ShowList(out,"The following packages will be REMOVED:",List);
+ ShowList(out,_("The following packages will be REMOVED:"),List);
}
/*}}}*/
// ShowKept - Show kept packages /*{{{*/
@@ -303,7 +323,7 @@ void ShowKept(ostream &out,CacheFile &Cache)
List += string(I.Name()) + " ";
}
- ShowList(out,"The following packages have been kept back",List);
+ ShowList(out,_("The following packages have been kept back"),List);
}
/*}}}*/
// ShowUpgraded - Show upgraded packages /*{{{*/
@@ -322,7 +342,26 @@ void ShowUpgraded(ostream &out,CacheFile &Cache)
List += string(I.Name()) + " ";
}
- ShowList(out,"The following packages will be upgraded",List);
+ ShowList(out,_("The following packages will be upgraded"),List);
+}
+ /*}}}*/
+// ShowDowngraded - Show downgraded packages /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ShowDowngraded(ostream &out,CacheFile &Cache)
+{
+ string List;
+ for (unsigned J = 0; J < Cache->Head().PackageCount; J++)
+ {
+ pkgCache::PkgIterator I(Cache,Cache.List[J]);
+
+ // Not interesting
+ if (Cache[I].Downgrade() == false || Cache[I].NewInstall() == true)
+ continue;
+
+ List += string(I.Name()) + " ";
+ }
+ return ShowList(out,_("The following packages will be DOWNGRADED"),List);
}
/*}}}*/
// ShowHold - Show held but changed packages /*{{{*/
@@ -339,7 +378,7 @@ bool ShowHold(ostream &out,CacheFile &Cache)
List += string(I.Name()) + " ";
}
- return ShowList(out,"The following held packages will be changed:",List);
+ return ShowList(out,_("The following held packages will be changed:"),List);
}
/*}}}*/
// ShowEssential - Show an essential package warning /*{{{*/
@@ -350,14 +389,15 @@ bool ShowHold(ostream &out,CacheFile &Cache)
bool ShowEssential(ostream &out,CacheFile &Cache)
{
string List;
- bool *Added = new bool[Cache->HeaderP->PackageCount];
- for (unsigned int I = 0; I != Cache->HeaderP->PackageCount; I++)
+ bool *Added = new bool[Cache->Head().PackageCount];
+ for (unsigned int I = 0; I != Cache->Head().PackageCount; I++)
Added[I] = false;
for (unsigned J = 0; J < Cache->Head().PackageCount; J++)
{
pkgCache::PkgIterator I(Cache,Cache.List[J]);
- if ((I->Flags & pkgCache::Flag::Essential) != pkgCache::Flag::Essential)
+ if ((I->Flags & pkgCache::Flag::Essential) != pkgCache::Flag::Essential &&
+ (I->Flags & pkgCache::Flag::Important) != pkgCache::Flag::Important)
continue;
// The essential package is being removed
@@ -374,7 +414,7 @@ bool ShowEssential(ostream &out,CacheFile &Cache)
continue;
// Print out any essential package depenendents that are to be removed
- for (pkgDepCache::DepIterator D = I.CurrentVer().DependsList(); D.end() == false; D++)
+ for (pkgCache::DepIterator D = I.CurrentVer().DependsList(); D.end() == false; D++)
{
// Skip everything but depends
if (D->Type != pkgCache::Dep::PreDepends &&
@@ -389,16 +429,15 @@ bool ShowEssential(ostream &out,CacheFile &Cache)
Added[P->ID] = true;
char S[300];
- sprintf(S,"%s (due to %s) ",P.Name(),I.Name());
+ sprintf(S,_("%s (due to %s) "),P.Name(),I.Name());
List += S;
}
}
}
delete [] Added;
- if (List.empty() == false)
- out << "WARNING: The following essential packages will be removed" << endl;
- return ShowList(out,"This should NOT be done unless you know exactly what you are doing!",List);
+ return ShowList(out,_("WARNING: The following essential packages will be removed\n"
+ "This should NOT be done unless you know exactly what you are doing!"),List);
}
/*}}}*/
// Stats - Show some statistics /*{{{*/
@@ -407,6 +446,7 @@ bool ShowEssential(ostream &out,CacheFile &Cache)
void Stats(ostream &out,pkgDepCache &Dep)
{
unsigned long Upgrade = 0;
+ unsigned long Downgrade = 0;
unsigned long Install = 0;
unsigned long ReInstall = 0;
for (pkgCache::PkgIterator I = Dep.PkgBegin(); I.end() == false; I++)
@@ -414,21 +454,32 @@ void Stats(ostream &out,pkgDepCache &Dep)
if (Dep[I].NewInstall() == true)
Install++;
else
+ {
if (Dep[I].Upgrade() == true)
Upgrade++;
+ else
+ if (Dep[I].Downgrade() == true)
+ Downgrade++;
+ }
+
if (Dep[I].Delete() == false && (Dep[I].iFlags & pkgDepCache::ReInstall) == pkgDepCache::ReInstall)
ReInstall++;
}
- out << Upgrade << " packages upgraded, " <<
- Install << " newly installed, ";
+ ioprintf(out,_("%lu packages upgraded, %lu newly installed, "),
+ Upgrade,Install);
+
if (ReInstall != 0)
- out << ReInstall << " reinstalled, ";
- out << Dep.DelCount() << " to remove and " <<
- Dep.KeepCount() << " not upgraded." << endl;
+ ioprintf(out,_("%lu reinstalled, "),ReInstall);
+ if (Downgrade != 0)
+ ioprintf(out,_("%lu downgraded, "),Downgrade);
+ ioprintf(out,_("%lu to remove and %lu not upgraded.\n"),
+ Dep.DelCount(),Dep.KeepCount());
+
if (Dep.BadCount() != 0)
- out << Dep.BadCount() << " packages not fully installed or removed." << endl;
+ ioprintf(out,_("%lu packages not fully installed or removed.\n"),
+ Dep.BadCount());
}
/*}}}*/
@@ -463,7 +514,7 @@ void CacheFile::Sort()
qsort(List,Cache->Head().PackageCount,sizeof(*List),NameComp);
}
/*}}}*/
-// CacheFile::Open - Open the cache file /*{{{*/
+// CacheFile::CheckDeps - Open the cache file /*{{{*/
// ---------------------------------------------------------------------
/* This routine generates the caches and then opens the dependency cache
and verifies that the system is OK. */
@@ -473,39 +524,39 @@ bool CacheFile::CheckDeps(bool AllowBroken)
return false;
// Check that the system is OK
- if (Cache->DelCount() != 0 || Cache->InstCount() != 0)
+ if (DCache->DelCount() != 0 || DCache->InstCount() != 0)
return _error->Error("Internal Error, non-zero counts");
// Apply corrections for half-installed packages
- if (pkgApplyStatus(*Cache) == false)
+ if (pkgApplyStatus(*DCache) == false)
return false;
// Nothing is broken
- if (Cache->BrokenCount() == 0 || AllowBroken == true)
+ if (DCache->BrokenCount() == 0 || AllowBroken == true)
return true;
// Attempt to fix broken things
if (_config->FindB("APT::Get::Fix-Broken",false) == true)
{
- c1out << "Correcting dependencies..." << flush;
- if (pkgFixBroken(*Cache) == false || Cache->BrokenCount() != 0)
+ c1out << _("Correcting dependencies...") << flush;
+ if (pkgFixBroken(*DCache) == false || DCache->BrokenCount() != 0)
{
- c1out << " failed." << endl;
+ c1out << _(" failed.") << endl;
ShowBroken(c1out,*this,true);
- return _error->Error("Unable to correct dependencies");
+ return _error->Error(_("Unable to correct dependencies"));
}
- if (pkgMinimizeUpgrade(*Cache) == false)
- return _error->Error("Unable to minimize the upgrade set");
+ if (pkgMinimizeUpgrade(*DCache) == false)
+ return _error->Error(_("Unable to minimize the upgrade set"));
- c1out << " Done" << endl;
+ c1out << _(" Done") << endl;
}
else
{
- c1out << "You might want to run `apt-get -f install' to correct these." << endl;
+ c1out << _("You might want to run `apt-get -f install' to correct these.") << endl;
ShowBroken(c1out,*this,true);
- return _error->Error("Unmet dependencies. Try using -f.");
+ return _error->Error(_("Unmet dependencies. Try using -f."));
}
return true;
@@ -540,6 +591,7 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
Fail |= !ShowHold(c1out,Cache);
if (_config->FindB("APT::Get::Show-Upgraded",false) == true)
ShowUpgraded(c1out,Cache);
+ Fail |= !ShowDowngraded(c1out,Cache);
Essential = !ShowEssential(c1out,Cache);
Fail |= Essential;
Stats(c1out,Cache);
@@ -556,8 +608,8 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
return true;
// No remove flag
- if (Cache->DelCount() != 0 && _config->FindB("APT::Get::No-Remove",false) == true)
- return _error->Error("Packages need to be removed but No Remove was specified.");
+ if (Cache->DelCount() != 0 && _config->FindB("APT::Get::Remove",true) == false)
+ return _error->Error(_("Packages need to be removed but Remove is disabled."));
// Run the simulator ..
if (_config->FindB("APT::Get::Simulate") == true)
@@ -582,7 +634,7 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
{
Lock.Fd(GetLock(_config->FindDir("Dir::Cache::Archives") + "lock"));
if (_error->PendingError() == true)
- return _error->Error("Unable to lock the download directory");
+ return _error->Error(_("Unable to lock the download directory"));
}
// Create the download object
@@ -592,18 +644,18 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
// Read the source list
pkgSourceList List;
if (List.ReadMainList() == false)
- return _error->Error("The list of sources could not be read.");
+ return _error->Error(_("The list of sources could not be read."));
// Create the package manager and prepare to download
- pkgDPkgPM PM(Cache);
- if (PM.GetArchives(&Fetcher,&List,&Recs) == false ||
+ SPtr<pkgPackageManager> PM= _system->CreatePM(Cache);
+ if (PM->GetArchives(&Fetcher,&List,&Recs) == false ||
_error->PendingError() == true)
return false;
// Display statistics
- unsigned long FetchBytes = Fetcher.FetchNeeded();
- unsigned long FetchPBytes = Fetcher.PartialPresent();
- unsigned long DebBytes = Fetcher.TotalNeeded();
+ double FetchBytes = Fetcher.FetchNeeded();
+ double FetchPBytes = Fetcher.PartialPresent();
+ double DebBytes = Fetcher.TotalNeeded();
if (DebBytes != Cache->DebSize())
{
c0out << DebBytes << ',' << Cache->DebSize() << endl;
@@ -611,19 +663,20 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
}
// Number of bytes
- c1out << "Need to get ";
if (DebBytes != FetchBytes)
- c1out << SizeToStr(FetchBytes) << "B/" << SizeToStr(DebBytes) << 'B';
+ ioprintf(c1out,_("Need to get %sB/%sB of archives. "),
+ SizeToStr(FetchBytes).c_str(),SizeToStr(DebBytes).c_str());
else
- c1out << SizeToStr(DebBytes) << 'B';
-
- c1out << " of archives. After unpacking ";
-
+ ioprintf(c1out,_("Need to get %sB of archives. "),
+ SizeToStr(DebBytes).c_str());
+
// Size delta
if (Cache->UsrSize() >= 0)
- c1out << SizeToStr(Cache->UsrSize()) << "B will be used." << endl;
+ ioprintf(c1out,_("After unpacking %sB will be used.\n"),
+ SizeToStr(Cache->UsrSize()).c_str());
else
- c1out << SizeToStr(-1*Cache->UsrSize()) << "B will be freed." << endl;
+ ioprintf(c1out,_("After unpacking %sB will be freed.\n"),
+ SizeToStr(-1*Cache->UsrSize()).c_str());
if (_error->PendingError() == true)
return false;
@@ -638,7 +691,7 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
return _error->Errno("statvfs","Couldn't determine free space in %s",
OutputDir.c_str());
if (unsigned(Buf.f_bfree) < (FetchBytes - FetchPBytes)/Buf.f_bsize)
- return _error->Error("Sorry, you don't have enough free space in %s to hold all the .debs.",
+ return _error->Error(_("Sorry, you don't have enough free space in %s to hold all the .debs."),
OutputDir.c_str());
}
@@ -647,20 +700,23 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
_config->FindB("APT::Get::Assume-Yes",false) == true)
{
if (Fail == true && _config->FindB("APT::Get::Force-Yes",false) == false)
- return _error->Error("There are problems and -y was used without --force-yes");
+ return _error->Error(_("There are problems and -y was used without --force-yes"));
}
if (Essential == true && Saftey == true)
{
if (_config->FindB("APT::Get::Trivial-Only",false) == true)
- return _error->Error("Trivial Only specified but this is not a trivial operation.");
+ return _error->Error(_("Trivial Only specified but this is not a trivial operation."));
- c2out << "You are about to do something potentially harmful" << endl;
- c2out << "To continue type in the phrase 'Yes, I understand this may be bad'" << endl;
- c2out << " ?] " << flush;
- if (AnalPrompt("Yes, I understand this may be bad") == false)
+ const char *Prompt = _("Yes, do as I say!");
+ ioprintf(c2out,
+ _("You are about to do something potentially harmful\n"
+ "To continue type in the phrase '%s'\n"
+ " ?] "),Prompt);
+ c2out << flush;
+ if (AnalPrompt(Prompt) == false)
{
- c2out << "Abort." << endl;
+ c2out << _("Abort.") << endl;
exit(1);
}
}
@@ -670,16 +726,16 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
if (Ask == true || Fail == true)
{
if (_config->FindB("APT::Get::Trivial-Only",false) == true)
- return _error->Error("Trivial Only specified but this is not a trivial operation.");
+ return _error->Error(_("Trivial Only specified but this is not a trivial operation."));
if (_config->FindI("quiet",0) < 2 &&
_config->FindB("APT::Get::Assume-Yes",false) == false)
{
- c2out << "Do you want to continue? [Y/n] " << flush;
+ c2out << _("Do you want to continue? [Y/n] ") << flush;
if (YnPrompt() == false)
{
- c2out << "Abort." << endl;
+ c2out << _("Abort.") << endl;
exit(1);
}
}
@@ -695,12 +751,17 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
I->Owner->FileSize << ' ' << I->Owner->MD5Sum() << endl;
return true;
}
+
+ /* Unlock the dpkg lock if we are not going to be doing an install
+ after. */
+ if (_config->FindB("APT::Get::Download-Only",false) == true)
+ _system->UnLock();
// Run it
while (1)
{
bool Transient = false;
- if (_config->FindB("APT::Get::No-Download",false) == true)
+ if (_config->FindB("APT::Get::Download",true) == false)
{
for (pkgAcquire::Item **I = Fetcher.ItemsBegin(); I < Fetcher.ItemsEnd();)
{
@@ -739,8 +800,8 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
continue;
}
- cerr << "Failed to fetch " << (*I)->DescURI() << endl;
- cerr << " " << (*I)->ErrorText << endl;
+ fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(),
+ (*I)->ErrorText.c_str());
Failed = true;
}
@@ -748,7 +809,7 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
'failures' then the user must specify -m. Furthermore, there
is no such thing as a transient error in no-download mode! */
if (Transient == true &&
- _config->FindB("APT::Get::No-Download",false) == true)
+ _config->FindB("APT::Get::Download",true) == false)
{
Transient = false;
Failed = true;
@@ -757,27 +818,28 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
if (_config->FindB("APT::Get::Download-Only",false) == true)
{
if (Failed == true && _config->FindB("APT::Get::Fix-Missing",false) == false)
- return _error->Error("Some files failed to download");
+ return _error->Error(_("Some files failed to download"));
+ c1out << _("Download complete and in download only mode") << endl;
return true;
}
if (Failed == true && _config->FindB("APT::Get::Fix-Missing",false) == false)
{
- return _error->Error("Unable to fetch some archives, maybe try with --fix-missing?");
+ return _error->Error(_("Unable to fetch some archives, maybe run apt-get update or try with --fix-missing?"));
}
if (Transient == true && Failed == true)
- return _error->Error("--fix-missing and media swapping is not currently supported");
+ return _error->Error(_("--fix-missing and media swapping is not currently supported"));
// Try to deal with missing package files
- if (Failed == true && PM.FixMissing() == false)
+ if (Failed == true && PM->FixMissing() == false)
{
- cerr << "Unable to correct missing packages." << endl;
- return _error->Error("Aborting Install.");
+ cerr << _("Unable to correct missing packages.") << endl;
+ return _error->Error(_("Aborting Install."));
}
- Cache.ReleaseLock();
- pkgPackageManager::OrderResult Res = PM.DoInstall();
+ _system->UnLock();
+ pkgPackageManager::OrderResult Res = PM->DoInstall();
if (Res == pkgPackageManager::Failed || _error->PendingError() == true)
return false;
if (Res == pkgPackageManager::Completed)
@@ -785,8 +847,10 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
// Reload the fetcher object and loop again for media swapping
Fetcher.Shutdown();
- if (PM.GetArchives(&Fetcher,&List,&Recs) == false)
+ if (PM->GetArchives(&Fetcher,&List,&Recs) == false)
return false;
+
+ _system->Lock();
}
}
/*}}}*/
@@ -804,16 +868,18 @@ bool TryToInstall(pkgCache::PkgIterator Pkg,pkgDepCache &Cache,
Pkg.ProvidesList()->NextProvides == 0)
{
pkgCache::PkgIterator Tmp = Pkg.ProvidesList().OwnerPkg();
- c1out << "Note, selecting " << Tmp.Name() << " instead of " << Pkg.Name() << endl;
+ ioprintf(c1out,_("Note, selecting %s instead of %s\n"),
+ Tmp.Name(),Pkg.Name());
Pkg = Tmp;
}
// Handle the no-upgrade case
- if (_config->FindB("APT::Get::no-upgrade",false) == true &&
+ if (_config->FindB("APT::Get::upgrade",true) == false &&
Pkg->CurrentVer != 0)
{
if (AllowFail == true)
- c1out << "Skipping " << Pkg.Name() << ", it is already installed and no-upgrade is set." << endl;
+ ioprintf(c1out,_("Skipping %s, it is already installed and upgrade is not set.\n"),
+ Pkg.Name());
return true;
}
@@ -821,9 +887,12 @@ bool TryToInstall(pkgCache::PkgIterator Pkg,pkgDepCache &Cache,
pkgDepCache::StateCache &State = Cache[Pkg];
if (Remove == true && Pkg->CurrentVer == 0)
{
+ /* We want to continue searching for regex hits, so we return false here
+ otherwise this is not really an error. */
if (AllowFail == false)
- return false;
- return _error->Error("Package %s is not installed",Pkg.Name());
+ return false;
+ ioprintf(c1out,_("Package %s is not installed, so not removed"),Pkg.Name());
+ return true;
}
if (State.CandidateVer == 0 && Remove == false)
@@ -833,7 +902,8 @@ bool TryToInstall(pkgCache::PkgIterator Pkg,pkgDepCache &Cache,
if (Pkg->ProvidesList != 0)
{
- c1out << "Package " << Pkg.Name() << " is a virtual package provided by:" << endl;
+ ioprintf(c1out,_("Package %s is a virtual package provided by:\n"),
+ Pkg.Name());
pkgCache::PrvIterator I = Pkg.ProvidesList();
for (; I.end() == false; I++)
@@ -844,32 +914,38 @@ bool TryToInstall(pkgCache::PkgIterator Pkg,pkgDepCache &Cache,
{
if (Cache[Pkg].Install() == true && Cache[Pkg].NewInstall() == false)
c1out << " " << Pkg.Name() << " " << I.OwnerVer().VerStr() <<
- " [Installed]"<< endl;
+ _(" [Installed]") << endl;
else
c1out << " " << Pkg.Name() << " " << I.OwnerVer().VerStr() << endl;
}
}
- c1out << "You should explicitly select one to install." << endl;
+ c1out << _("You should explicitly select one to install.") << endl;
}
else
{
- c1out << "Package " << Pkg.Name() << " has no available version, but exists in the database." << endl;
- c1out << "This typically means that the package was mentioned in a dependency and " << endl;
- c1out << "never uploaded, has been obsoleted or is not available with the contents " << endl;
- c1out << "of sources.list" << endl;
+ ioprintf(c1out,
+ _("Package %s has no available version, but exists in the database.\n"
+ "This typically means that the package was mentioned in a dependency and\n"
+ "never uploaded, has been obsoleted or is not available with the contents\n"
+ "of sources.list\n"),Pkg.Name());
string List;
+ SPtrArray<bool> Seen = new bool[Cache.Head().PackageFileCount];
+ memset(Seen,0,Cache.Head().PackageFileCount*sizeof(*Seen));
pkgCache::DepIterator Dep = Pkg.RevDependsList();
for (; Dep.end() == false; Dep++)
{
if (Dep->Type != pkgCache::Dep::Replaces)
continue;
+ if (Seen[Dep.ParentPkg()->ID] == true)
+ continue;
+ Seen[Dep.ParentPkg()->ID] = true;
List += string(Dep.ParentPkg().Name()) + " ";
}
- ShowList(c1out,"However the following packages replace it:",List);
+ ShowList(c1out,_("However the following packages replace it:"),List);
}
- _error->Error("Package %s has no installation candidate",Pkg.Name());
+ _error->Error(_("Package %s has no installation candidate"),Pkg.Name());
return false;
}
@@ -889,14 +965,16 @@ bool TryToInstall(pkgCache::PkgIterator Pkg,pkgDepCache &Cache,
if (_config->FindB("APT::Get::ReInstall",false) == true)
{
if (Pkg->CurrentVer == 0 || Pkg.CurrentVer().Downloadable() == false)
- c1out << "Sorry, re-installation of " << Pkg.Name() << " is not possible, it cannot be downloaded" << endl;
+ ioprintf(c1out,_("Sorry, re-installation of %s is not possible, it cannot be downloaded."),
+ Pkg.Name());
else
Cache.SetReInstall(Pkg,true);
}
else
{
if (AllowFail == true)
- c1out << "Sorry, " << Pkg.Name() << " is already the newest version" << endl;
+ ioprintf(c1out,_("Sorry, %s is already the newest version.\n"),
+ Pkg.Name());
}
}
else
@@ -908,12 +986,139 @@ bool TryToInstall(pkgCache::PkgIterator Pkg,pkgDepCache &Cache,
return true;
}
/*}}}*/
+// TryToChangeVer - Try to change a candidate version /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool TryToChangeVer(pkgCache::PkgIterator Pkg,pkgDepCache &Cache,
+ const char *VerTag,bool IsRel)
+{
+ pkgVersionMatch Match(VerTag,(IsRel == true?pkgVersionMatch::Release:pkgVersionMatch::Version));
+
+ pkgCache::VerIterator Ver = Match.Find(Pkg);
+
+ if (Ver.end() == true)
+ {
+ if (IsRel == true)
+ return _error->Error(_("Release '%s' for '%s' was not found"),
+ VerTag,Pkg.Name());
+ return _error->Error(_("Version '%s' for '%s' was not found"),
+ VerTag,Pkg.Name());
+ }
+
+ if (strcmp(VerTag,Ver.VerStr()) != 0)
+ {
+ ioprintf(c1out,_("Selected version %s (%s) for %s\n"),
+ Ver.VerStr(),Ver.RelStr().c_str(),Pkg.Name());
+ }
+
+ Cache.SetCandidateVersion(Ver);
+ return true;
+}
+ /*}}}*/
+// FindSrc - Find a source record /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgSrcRecords::Parser *FindSrc(const char *Name,pkgRecords &Recs,
+ pkgSrcRecords &SrcRecs,string &Src,
+ pkgDepCache &Cache)
+{
+ // We want to pull the version off the package specification..
+ string VerTag;
+ string TmpSrc = Name;
+ string::size_type Slash = TmpSrc.rfind('=');
+ if (Slash != string::npos)
+ {
+ VerTag = string(TmpSrc.begin() + Slash + 1,TmpSrc.end());
+ TmpSrc = string(TmpSrc.begin(),TmpSrc.begin() + Slash);
+ }
+
+ /* Lookup the version of the package we would install if we were to
+ install a version and determine the source package name, then look
+ in the archive for a source package of the same name. In theory
+ we could stash the version string as well and match that too but
+ today there aren't multi source versions in the archive. */
+ if (_config->FindB("APT::Get::Only-Source") == false &&
+ VerTag.empty() == true)
+ {
+ pkgCache::PkgIterator Pkg = Cache.FindPkg(TmpSrc);
+ if (Pkg.end() == false)
+ {
+ pkgCache::VerIterator Ver = Cache.GetCandidateVer(Pkg);
+ if (Ver.end() == false)
+ {
+ pkgRecords::Parser &Parse = Recs.Lookup(Ver.FileList());
+ Src = Parse.SourcePkg();
+ }
+ }
+ }
+
+ // No source package name..
+ if (Src.empty() == true)
+ Src = TmpSrc;
+
+ // The best hit
+ pkgSrcRecords::Parser *Last = 0;
+ unsigned long Offset = 0;
+ string Version;
+ bool IsMatch = false;
+
+ // If we are matching by version then we need exact matches to be happy
+ if (VerTag.empty() == false)
+ IsMatch = true;
+
+ /* Iterate over all of the hits, which includes the resulting
+ binary packages in the search */
+ pkgSrcRecords::Parser *Parse;
+ SrcRecs.Restart();
+ while ((Parse = SrcRecs.Find(Src.c_str(),false)) != 0)
+ {
+ string Ver = Parse->Version();
+
+ // Skip name mismatches
+ if (IsMatch == true && Parse->Package() != Src)
+ continue;
+
+ if (VerTag.empty() == false)
+ {
+ /* Don't want to fall through because we are doing exact version
+ matching. */
+ if (Cache.VS().CmpVersion(VerTag,Ver) != 0)
+ continue;
+
+ Last = Parse;
+ Offset = Parse->Offset();
+ break;
+ }
+
+ // Newer version or an exact match
+ if (Last == 0 || Cache.VS().CmpVersion(Version,Ver) < 0 ||
+ (Parse->Package() == Src && IsMatch == false))
+ {
+ IsMatch = Parse->Package() == Src;
+ Last = Parse;
+ Offset = Parse->Offset();
+ Version = Ver;
+ }
+ }
+
+ if (Last == 0)
+ return 0;
+
+ if (Last->Jump(Offset) == false)
+ return 0;
+
+ return Last;
+}
+ /*}}}*/
// DoUpdate - Update the package lists /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool DoUpdate(CommandLine &)
+bool DoUpdate(CommandLine &CmdL)
{
+ if (CmdL.FileSize() != 1)
+ return _error->Error(_("The update command takes no arguments"));
+
// Get the source list
pkgSourceList List;
if (List.ReadMainList() == false)
@@ -925,7 +1130,7 @@ bool DoUpdate(CommandLine &)
{
Lock.Fd(GetLock(_config->FindDir("Dir::State::Lists") + "lock"));
if (_error->PendingError() == true)
- return _error->Error("Unable to lock the list directory");
+ return _error->Error(_("Unable to lock the list directory"));
}
// Create the download object
@@ -933,13 +1138,8 @@ bool DoUpdate(CommandLine &)
pkgAcquire Fetcher(&Stat);
// Populate it with the source selection
- pkgSourceList::const_iterator I;
- for (I = List.begin(); I != List.end(); I++)
- {
- new pkgAcqIndex(&Fetcher,I);
- if (_error->PendingError() == true)
+ if (List.GetIndexes(&Fetcher) == false)
return false;
- }
// Run it
if (Fetcher.Run() == pkgAcquire::Failed)
@@ -953,8 +1153,8 @@ bool DoUpdate(CommandLine &)
(*I)->Finished();
- cerr << "Failed to fetch " << (*I)->DescURI() << endl;
- cerr << " " << (*I)->ErrorText << endl;
+ fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(),
+ (*I)->ErrorText.c_str());
Failed = true;
}
@@ -972,7 +1172,8 @@ bool DoUpdate(CommandLine &)
return false;
if (Failed == true)
- return _error->Error("Some index files failed to download, they have been ignored, or old ones used instead.");
+ return _error->Error(_("Some index files failed to download, they have been ignored, or old ones used instead."));
+
return true;
}
/*}}}*/
@@ -990,7 +1191,7 @@ bool DoUpgrade(CommandLine &CmdL)
if (pkgAllUpgrade(Cache) == false)
{
ShowBroken(c1out,Cache,false);
- return _error->Error("Internal Error, AllUpgrade broke stuff");
+ return _error->Error(_("Internal Error, AllUpgrade broke stuff"));
}
return InstallPackages(Cache,true);
@@ -1017,7 +1218,7 @@ bool DoInstall(CommandLine &CmdL)
bool DefRemove = false;
if (strcasecmp(CmdL.FileList[0],"remove") == 0)
DefRemove = true;
-
+
for (const char **I = CmdL.FileList + 1; *I != 0; I++)
{
// Duplicate the string
@@ -1027,8 +1228,10 @@ bool DoInstall(CommandLine &CmdL)
continue;
strcpy(S,*I);
- // See if we are removing the package
+ // See if we are removing and special indicators..
bool Remove = DefRemove;
+ char *VerTag = 0;
+ bool VerIsRel = false;
while (Cache->FindPkg(S).end() == true)
{
// Handle an optional end tag indicating what to do
@@ -1045,6 +1248,23 @@ bool DoInstall(CommandLine &CmdL)
S[--Length] = 0;
continue;
}
+
+ char *Slash = strchr(S,'=');
+ if (Slash != 0)
+ {
+ VerIsRel = false;
+ *Slash = 0;
+ VerTag = Slash + 1;
+ }
+
+ Slash = strchr(S,'/');
+ if (Slash != 0)
+ {
+ VerIsRel = true;
+ *Slash = 0;
+ VerTag = Slash + 1;
+ }
+
break;
}
@@ -1056,19 +1276,24 @@ bool DoInstall(CommandLine &CmdL)
// Check if the name is a regex
const char *I;
for (I = S; *I != 0; I++)
- if (*I == '.' || *I == '?' || *I == '*')
+ if (*I == '.' || *I == '?' || *I == '*' || *I == '|')
break;
if (*I == 0)
- return _error->Error("Couldn't find package %s",S);
+ return _error->Error(_("Couldn't find package %s"),S);
// Regexs must always be confirmed
ExpectedInst += 1000;
// Compile the regex pattern
regex_t Pattern;
- if (regcomp(&Pattern,S,REG_EXTENDED | REG_ICASE |
- REG_NOSUB) != 0)
- return _error->Error("Regex compilation error");
+ int Res;
+ if ((Res = regcomp(&Pattern,S,REG_EXTENDED | REG_ICASE |
+ REG_NOSUB)) != 0)
+ {
+ char Error[300];
+ regerror(Res,&Pattern,Error,sizeof(Error));
+ return _error->Error(_("Regex compilation error - %s"),Error);
+ }
// Run over the matches
bool Hit = false;
@@ -1077,16 +1302,23 @@ bool DoInstall(CommandLine &CmdL)
if (regexec(&Pattern,Pkg.Name(),0,0,0) != 0)
continue;
+ if (VerTag != 0)
+ if (TryToChangeVer(Pkg,Cache,VerTag,VerIsRel) == false)
+ return false;
+
Hit |= TryToInstall(Pkg,Cache,Fix,Remove,BrokenFix,
ExpectedInst,false);
}
regfree(&Pattern);
if (Hit == false)
- return _error->Error("Couldn't find package %s",S);
+ return _error->Error(_("Couldn't find package %s"),S);
}
else
{
+ if (VerTag != 0)
+ if (TryToChangeVer(Pkg,Cache,VerTag,VerIsRel) == false)
+ return false;
if (TryToInstall(Pkg,Cache,Fix,Remove,BrokenFix,ExpectedInst) == false)
return false;
}
@@ -1097,10 +1329,10 @@ bool DoInstall(CommandLine &CmdL)
packages */
if (BrokenFix == true && Cache->BrokenCount() != 0)
{
- c1out << "You might want to run `apt-get -f install' to correct these:" << endl;
+ c1out << _("You might want to run `apt-get -f install' to correct these:") << endl;
ShowBroken(c1out,Cache,false);
- return _error->Error("Unmet dependencies. Try 'apt-get -f install' with no packages (or specify a solution).");
+ return _error->Error(_("Unmet dependencies. Try 'apt-get -f install' with no packages (or specify a solution)."));
}
// Call the scored problem resolver
@@ -1111,22 +1343,24 @@ bool DoInstall(CommandLine &CmdL)
// Now we check the state of the packages,
if (Cache->BrokenCount() != 0)
{
- c1out << "Some packages could not be installed. This may mean that you have" << endl;
- c1out << "requested an impossible situation or if you are using the unstable" << endl;
- c1out << "distribution that some required packages have not yet been created" << endl;
- c1out << "or been moved out of Incoming." << endl;
+ c1out <<
+ _("Some packages could not be installed. This may mean that you have\n"
+ "requested an impossible situation or if you are using the unstable\n"
+ "distribution that some required packages have not yet been created\n"
+ "or been moved out of Incoming.") << endl;
if (Packages == 1)
{
c1out << endl;
- c1out << "Since you only requested a single operation it is extremely likely that" << endl;
- c1out << "the package is simply not installable and a bug report against" << endl;
- c1out << "that package should be filed." << endl;
+ c1out <<
+ _("Since you only requested a single operation it is extremely likely that\n"
+ "the package is simply not installable and a bug report against\n"
+ "that package should be filed.") << endl;
}
- c1out << "The following information may help to resolve the situation:" << endl;
+ c1out << _("The following information may help to resolve the situation:") << endl;
c1out << endl;
ShowBroken(c1out,Cache,false);
- return _error->Error("Sorry, broken packages");
+ return _error->Error(_("Sorry, broken packages"));
}
/* Print out a list of packages that are going to be installed extra
@@ -1149,7 +1383,7 @@ bool DoInstall(CommandLine &CmdL)
List += string(I.Name()) + " ";
}
- ShowList(c1out,"The following extra packages will be installed:",List);
+ ShowList(c1out,_("The following extra packages will be installed:"),List);
}
// See if we need to prompt
@@ -1168,15 +1402,15 @@ bool DoDistUpgrade(CommandLine &CmdL)
if (Cache.Open() == false || Cache.CheckDeps() == false)
return false;
- c0out << "Calculating Upgrade... " << flush;
+ c0out << _("Calculating Upgrade... ") << flush;
if (pkgDistUpgrade(*Cache) == false)
{
- c0out << "Failed" << endl;
+ c0out << _("Failed") << endl;
ShowBroken(c1out,Cache,false);
return false;
}
- c0out << "Done" << endl;
+ c0out << _("Done") << endl;
return InstallPackages(Cache,true);
}
@@ -1226,7 +1460,7 @@ bool DoDSelectUpgrade(CommandLine &CmdL)
pkgProblemResolver Fix(Cache);
// Hold back held packages.
- if (_config->FindB("APT::Ingore-Hold",false) == false)
+ if (_config->FindB("APT::Ignore-Hold",false) == false)
{
for (pkgCache::PkgIterator I = Cache->PkgBegin(); I.end() == false; I++)
{
@@ -1273,7 +1507,7 @@ bool DoClean(CommandLine &CmdL)
{
Lock.Fd(GetLock(_config->FindDir("Dir::Cache::Archives") + "lock"));
if (_error->PendingError() == true)
- return _error->Error("Unable to lock the download directory");
+ return _error->Error(_("Unable to lock the download directory"));
}
pkgAcquire Fetcher;
@@ -1306,7 +1540,7 @@ bool DoAutoClean(CommandLine &CmdL)
{
Lock.Fd(GetLock(_config->FindDir("Dir::Cache::Archives") + "lock"));
if (_error->PendingError() == true)
- return _error->Error("Unable to lock the download directory");
+ return _error->Error(_("Unable to lock the download directory"));
}
CacheFile Cache;
@@ -1349,12 +1583,12 @@ bool DoSource(CommandLine &CmdL)
return false;
if (CmdL.FileSize() <= 1)
- return _error->Error("Must specify at least one package to fetch source for");
+ return _error->Error(_("Must specify at least one package to fetch source for"));
// Read the source list
pkgSourceList List;
if (List.ReadMainList() == false)
- return _error->Error("The list of sources could not be read.");
+ return _error->Error(_("The list of sources could not be read."));
// Create the text record parsers
pkgRecords Recs(Cache);
@@ -1373,61 +1607,14 @@ bool DoSource(CommandLine &CmdL)
for (const char **I = CmdL.FileList + 1; *I != 0; I++, J++)
{
string Src;
-
- /* Lookup the version of the package we would install if we were to
- install a version and determine the source package name, then look
- in the archive for a source package of the same name. In theory
- we could stash the version string as well and match that too but
- today there aren't multi source versions in the archive. */
- pkgCache::PkgIterator Pkg = Cache->FindPkg(*I);
- if (Pkg.end() == false)
- {
- pkgCache::VerIterator Ver = Cache->GetCandidateVer(Pkg);
- if (Ver.end() == false)
- {
- pkgRecords::Parser &Parse = Recs.Lookup(Ver.FileList());
- Src = Parse.SourcePkg();
- }
- }
-
- // No source package name..
- if (Src.empty() == true)
- Src = *I;
-
- // The best hit
- pkgSrcRecords::Parser *Last = 0;
- unsigned long Offset = 0;
- string Version;
- bool IsMatch = false;
-
- // Iterate over all of the hits
- pkgSrcRecords::Parser *Parse;
- SrcRecs.Restart();
- while ((Parse = SrcRecs.Find(Src.c_str(),false)) != 0)
- {
- string Ver = Parse->Version();
-
- // Skip name mismatches
- if (IsMatch == true && Parse->Package() != Src)
- continue;
-
- // Newer version or an exact match
- if (Last == 0 || pkgVersionCompare(Version,Ver) < 0 ||
- (Parse->Package() == Src && IsMatch == false))
- {
- IsMatch = Parse->Package() == Src;
- Last = Parse;
- Offset = Parse->Offset();
- Version = Ver;
- }
- }
+ pkgSrcRecords::Parser *Last = FindSrc(*I,Recs,SrcRecs,Src,*Cache);
if (Last == 0)
- return _error->Error("Unable to find a source package for %s",Src.c_str());
+ return _error->Error(_("Unable to find a source package for %s"),Src.c_str());
// Back track
vector<pkgSrcRecords::File> Lst;
- if (Last->Jump(Offset) == false || Last->Files(Lst) == false)
+ if (Last->Files(Lst) == false)
return false;
// Load them into the fetcher
@@ -1435,40 +1622,33 @@ bool DoSource(CommandLine &CmdL)
I != Lst.end(); I++)
{
// Try to guess what sort of file it is we are getting.
- string Comp;
- if (I->Path.find(".dsc") != string::npos)
+ if (I->Type == "dsc")
{
- Comp = "dsc";
Dsc[J].Package = Last->Package();
Dsc[J].Version = Last->Version();
Dsc[J].Dsc = flNotDir(I->Path);
}
- if (I->Path.find(".tar.gz") != string::npos)
- Comp = "tar";
- if (I->Path.find(".diff.gz") != string::npos)
- Comp = "diff";
-
// Diff only mode only fetches .diff files
if (_config->FindB("APT::Get::Diff-Only",false) == true &&
- Comp != "diff")
+ I->Type != "diff")
continue;
// Tar only mode only fetches .tar files
if (_config->FindB("APT::Get::Tar-Only",false) == true &&
- Comp != "tar")
+ I->Type != "tar")
continue;
- new pkgAcqFile(&Fetcher,Last->Source()->ArchiveURI(I->Path),
- I->MD5Hash,I->Size,Last->Source()->SourceInfo(Src,
- Last->Version(),Comp),Src);
+ new pkgAcqFile(&Fetcher,Last->Index().ArchiveURI(I->Path),
+ I->MD5Hash,I->Size,
+ Last->Index().SourceInfo(*Last,*I),Src);
}
}
// Display statistics
- unsigned long FetchBytes = Fetcher.FetchNeeded();
- unsigned long FetchPBytes = Fetcher.PartialPresent();
- unsigned long DebBytes = Fetcher.TotalNeeded();
+ double FetchBytes = Fetcher.FetchNeeded();
+ double FetchPBytes = Fetcher.PartialPresent();
+ double DebBytes = Fetcher.TotalNeeded();
// Check for enough free space
struct statvfs Buf;
@@ -1477,21 +1657,21 @@ bool DoSource(CommandLine &CmdL)
return _error->Errno("statvfs","Couldn't determine free space in %s",
OutputDir.c_str());
if (unsigned(Buf.f_bfree) < (FetchBytes - FetchPBytes)/Buf.f_bsize)
- return _error->Error("Sorry, you don't have enough free space in %s",
+ return _error->Error(_("Sorry, you don't have enough free space in %s"),
OutputDir.c_str());
// Number of bytes
- c1out << "Need to get ";
if (DebBytes != FetchBytes)
- c1out << SizeToStr(FetchBytes) << "B/" << SizeToStr(DebBytes) << 'B';
+ ioprintf(c1out,_("Need to get %sB/%sB of source archives.\n"),
+ SizeToStr(FetchBytes).c_str(),SizeToStr(DebBytes).c_str());
else
- c1out << SizeToStr(DebBytes) << 'B';
- c1out << " of source archives." << endl;
-
+ ioprintf(c1out,_("Need to get %sB of source archives.\n"),
+ SizeToStr(DebBytes).c_str());
+
if (_config->FindB("APT::Get::Simulate",false) == true)
{
for (unsigned I = 0; I != J; I++)
- cout << "Fetch Source " << Dsc[I].Package << endl;
+ ioprintf(cout,_("Fetch Source %s\n"),Dsc[I].Package.c_str());
return true;
}
@@ -1517,16 +1697,19 @@ bool DoSource(CommandLine &CmdL)
(*I)->Complete == true)
continue;
- cerr << "Failed to fetch " << (*I)->DescURI() << endl;
- cerr << " " << (*I)->ErrorText << endl;
+ fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(),
+ (*I)->ErrorText.c_str());
Failed = true;
}
if (Failed == true)
- return _error->Error("Failed to fetch some archives.");
+ return _error->Error(_("Failed to fetch some archives."));
if (_config->FindB("APT::Get::Download-only",false) == true)
+ {
+ c1out << _("Download complete and in download only mode") << endl;
return true;
-
+ }
+
// Unpack the sources
pid_t Process = ExecFork();
@@ -1534,7 +1717,7 @@ bool DoSource(CommandLine &CmdL)
{
for (unsigned I = 0; I != J; I++)
{
- string Dir = Dsc[I].Package + '-' + pkgBaseVersion(Dsc[I].Version.c_str());
+ string Dir = Dsc[I].Package + '-' + Cache->VS().UpstreamVersion(Dsc[I].Version.c_str());
// Diff only mode only fetches .diff files
if (_config->FindB("APT::Get::Diff-Only",false) == true ||
@@ -1547,7 +1730,8 @@ bool DoSource(CommandLine &CmdL)
if (stat(Dir.c_str(),&Stat) == 0 &&
S_ISDIR(Stat.st_mode) != 0)
{
- c0out << "Skipping unpack of already unpacked source in " << Dir << endl;
+ ioprintf(c0out ,_("Skipping unpack of already unpacked source in %s\n"),
+ Dir.c_str());
}
else
{
@@ -1558,7 +1742,7 @@ bool DoSource(CommandLine &CmdL)
Dsc[I].Dsc.c_str());
if (system(S) != 0)
{
- cerr << "Unpack command '" << S << "' failed." << endl;
+ fprintf(stderr,_("Unpack command '%s' failed.\n"),S);
_exit(1);
}
}
@@ -1575,7 +1759,7 @@ bool DoSource(CommandLine &CmdL)
if (system(S) != 0)
{
- cerr << "Build command '" << S << "' failed." << endl;
+ fprintf(stderr,_("Build command '%s' failed.\n"),S);
_exit(1);
}
}
@@ -1594,58 +1778,216 @@ bool DoSource(CommandLine &CmdL)
}
if (WIFEXITED(Status) == 0 || WEXITSTATUS(Status) != 0)
- return _error->Error("Child process failed");
+ return _error->Error(_("Child process failed"));
+
+ return true;
+}
+ /*}}}*/
+// DoBuildDep - Install/removes packages to satisfy build dependencies /*{{{*/
+// ---------------------------------------------------------------------
+/* This function will look at the build depends list of the given source
+ package and install the necessary packages to make it true, or fail. */
+bool DoBuildDep(CommandLine &CmdL)
+{
+ CacheFile Cache;
+ if (Cache.Open(true) == false)
+ return false;
+
+ if (CmdL.FileSize() <= 1)
+ return _error->Error(_("Must specify at least one package to check builddeps for"));
+
+ // Read the source list
+ pkgSourceList List;
+ if (List.ReadMainList() == false)
+ return _error->Error(_("The list of sources could not be read."));
+ // Create the text record parsers
+ pkgRecords Recs(Cache);
+ pkgSrcRecords SrcRecs(List);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Create the download object
+ AcqTextStatus Stat(ScreenWidth,_config->FindI("quiet",0));
+ pkgAcquire Fetcher(&Stat);
+
+ unsigned J = 0;
+ for (const char **I = CmdL.FileList + 1; *I != 0; I++, J++)
+ {
+ string Src;
+ pkgSrcRecords::Parser *Last = FindSrc(*I,Recs,SrcRecs,Src,*Cache);
+ if (Last == 0)
+ return _error->Error(_("Unable to find a source package for %s"),Src.c_str());
+
+ // Process the build-dependencies
+ vector<pkgSrcRecords::Parser::BuildDepRec> BuildDeps;
+ if (Last->BuildDepends(BuildDeps) == false)
+ return _error->Error(_("Unable to get build-dependency information for %s"),Src.c_str());
+
+ if (BuildDeps.size() == 0)
+ {
+ ioprintf(c1out,_("%s has no build depends.\n"),Src.c_str());
+ continue;
+ }
+
+ // Install the requested packages
+ unsigned int ExpectedInst = 0;
+ vector <pkgSrcRecords::Parser::BuildDepRec>::iterator D;
+ pkgProblemResolver Fix(Cache);
+ for (D = BuildDeps.begin(); D != BuildDeps.end(); D++)
+ {
+ pkgCache::PkgIterator Pkg = Cache->FindPkg((*D).Package);
+ if (Pkg.end() == true)
+ return _error->Error(_("%s dependency on %s cannot be satisfied because the package %s cannot be found"),
+ Last->BuildDepType((*D).Type),Src.c_str(),(*D).Package.c_str());
+ pkgCache::VerIterator IV = (*Cache)[Pkg].InstVerIter(*Cache);
+
+ if ((*D).Type == pkgSrcRecords::Parser::BuildConflict ||
+ (*D).Type == pkgSrcRecords::Parser::BuildConflictIndep)
+ {
+ /* conflict; need to remove if we have an installed version
+ that satisfies the version criteria */
+ if (IV.end() == false &&
+ Cache->VS().CheckDep(IV.VerStr(),(*D).Op,(*D).Version.c_str()) == true)
+ TryToInstall(Pkg,Cache,Fix,true,false,ExpectedInst);
+ }
+ else
+ {
+ /* depends; need to install or upgrade if we don't have the
+ package installed or if the version does not satisfy the
+ build dep. This is complicated by the fact that if we
+ depend on a version lower than what we already have
+ installed it is not clear what should be done; in practice
+ this case should be rare though and right now nothing
+ is done about it :-( */
+ if (IV.end() == true ||
+ Cache->VS().CheckDep(IV.VerStr(),(*D).Op,(*D).Version.c_str()) == false)
+ TryToInstall(Pkg,Cache,Fix,false,false,ExpectedInst);
+ }
+ }
+
+ Fix.InstallProtect();
+ if (Fix.Resolve(true) == false)
+ _error->Discard();
+
+ // Now we check the state of the packages,
+ if (Cache->BrokenCount() != 0)
+ return _error->Error(_("Some broken packages were found while trying to process build-dependencies.\n"
+ "You might want to run `apt-get -f install' to correct these."));
+ }
+
+ if (InstallPackages(Cache, false, true) == false)
+ return _error->Error(_("Failed to process build dependencies"));
return true;
}
/*}}}*/
+// DoMoo - Never Ask, Never Tell /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool DoMoo(CommandLine &CmdL)
+{
+ cout <<
+ " (__) \n"
+ " (oo) \n"
+ " /------\\/ \n"
+ " / | || \n"
+ " * /\\---/\\ \n"
+ " ~~ ~~ \n"
+ "....\"Have you mooed today?\"...\n";
+
+ return true;
+}
+ /*}}}*/
// ShowHelp - Show a help screen /*{{{*/
// ---------------------------------------------------------------------
/* */
bool ShowHelp(CommandLine &CmdL)
{
- cout << PACKAGE << ' ' << VERSION << " for " << ARCHITECTURE <<
- " compiled on " << __DATE__ << " " << __TIME__ << endl;
+ ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
+ COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
+
if (_config->FindB("version") == true)
- return 100;
-
- cout << "Usage: apt-get [options] command" << endl;
- cout << " apt-get [options] install|remove pkg1 [pkg2 ...]" << endl;
- cout << " apt-get [options] source pkg1 [pkg2 ...]" << endl;
- cout << endl;
- cout << "apt-get is a simple command line interface for downloading and" << endl;
- cout << "installing packages. The most frequently used commands are update" << endl;
- cout << "and install." << endl;
- cout << endl;
- cout << "Commands:" << endl;
- cout << " update - Retrieve new lists of packages" << endl;
- cout << " upgrade - Perform an upgrade" << endl;
- cout << " install - Install new packages (pkg is libc6 not libc6.deb)" << endl;
- cout << " remove - Remove packages" << endl;
- cout << " source - Download source archives" << endl;
- cout << " dist-upgrade - Distribution upgrade, see apt-get(8)" << endl;
- cout << " dselect-upgrade - Follow dselect selections" << endl;
- cout << " clean - Erase downloaded archive files" << endl;
- cout << " autoclean - Erase old downloaded archive files" << endl;
- cout << " check - Verify that there are no broken dependencies" << endl;
- cout << endl;
- cout << "Options:" << endl;
- cout << " -h This help text." << endl;
- cout << " -q Loggable output - no progress indicator" << endl;
- cout << " -qq No output except for errors" << endl;
- cout << " -d Download only - do NOT install or unpack archives" << endl;
- cout << " -s No-act. Perform ordering simulation" << endl;
- cout << " -y Assume Yes to all queries and do not prompt" << endl;
- cout << " -f Attempt to continue if the integrity check fails" << endl;
- cout << " -m Attempt to continue if archives are unlocatable" << endl;
- cout << " -u Show a list of upgraded packages as well" << endl;
- cout << " -b Build the source package after fetching it" << endl;
- cout << " -c=? Read this configuration file" << endl;
- cout << " -o=? Set an arbitary configuration option, eg -o dir::cache=/tmp" << endl;
- cout << "See the apt-get(8), sources.list(5) and apt.conf(5) manual" << endl;
- cout << "pages for more information and options." << endl;
- return 100;
+ {
+ cout << _("Supported Modules:") << endl;
+
+ for (unsigned I = 0; I != pkgVersioningSystem::GlobalListLen; I++)
+ {
+ pkgVersioningSystem *VS = pkgVersioningSystem::GlobalList[I];
+ if (_system != 0 && _system->VS == VS)
+ cout << '*';
+ else
+ cout << ' ';
+ cout << "Ver: " << VS->Label << endl;
+
+ /* Print out all the packaging systems that will work with
+ this VS */
+ for (unsigned J = 0; J != pkgSystem::GlobalListLen; J++)
+ {
+ pkgSystem *Sys = pkgSystem::GlobalList[J];
+ if (_system == Sys)
+ cout << '*';
+ else
+ cout << ' ';
+ if (Sys->VS->TestCompatibility(*VS) == true)
+ cout << "Pkg: " << Sys->Label << " (Priority " << Sys->Score(*_config) << ")" << endl;
+ }
+ }
+
+ for (unsigned I = 0; I != pkgSourceList::Type::GlobalListLen; I++)
+ {
+ pkgSourceList::Type *Type = pkgSourceList::Type::GlobalList[I];
+ cout << " S.L: '" << Type->Name << "' " << Type->Label << endl;
+ }
+
+ for (unsigned I = 0; I != pkgIndexFile::Type::GlobalListLen; I++)
+ {
+ pkgIndexFile::Type *Type = pkgIndexFile::Type::GlobalList[I];
+ cout << " Idx: " << Type->Label << endl;
+ }
+
+ return true;
+ }
+
+ cout <<
+ _("Usage: apt-get [options] command\n"
+ " apt-get [options] install|remove pkg1 [pkg2 ...]\n"
+ " apt-get [options] source pkg1 [pkg2 ...]\n"
+ "\n"
+ "apt-get is a simple command line interface for downloading and\n"
+ "installing packages. The most frequently used commands are update\n"
+ "and install.\n"
+ "\n"
+ "Commands:\n"
+ " update - Retrieve new lists of packages\n"
+ " upgrade - Perform an upgrade\n"
+ " install - Install new packages (pkg is libc6 not libc6.deb)\n"
+ " remove - Remove packages\n"
+ " source - Download source archives\n"
+ " build-dep - Configure build-dependencies for source packages\n"
+ " dist-upgrade - Distribution upgrade, see apt-get(8)\n"
+ " dselect-upgrade - Follow dselect selections\n"
+ " clean - Erase downloaded archive files\n"
+ " autoclean - Erase old downloaded archive files\n"
+ " check - Verify that there are no broken dependencies\n"
+ "\n"
+ "Options:\n"
+ " -h This help text.\n"
+ " -q Loggable output - no progress indicator\n"
+ " -qq No output except for errors\n"
+ " -d Download only - do NOT install or unpack archives\n"
+ " -s No-act. Perform ordering simulation\n"
+ " -y Assume Yes to all queries and do not prompt\n"
+ " -f Attempt to continue if the integrity check fails\n"
+ " -m Attempt to continue if archives are unlocatable\n"
+ " -u Show a list of upgraded packages as well\n"
+ " -b Build the source package after fetching it\n"
+ " -c=? Read this configuration file\n"
+ " -o=? Set an arbitary configuration option, eg -o dir::cache=/tmp\n"
+ "See the apt-get(8), sources.list(5) and apt.conf(5) manual\n"
+ "pages for more information and options.\n"
+ " This APT has Super Cow Powers.\n");
+ return true;
}
/*}}}*/
// GetInitialize - Initialize things for apt-get /*{{{*/
@@ -1698,10 +2040,12 @@ int main(int argc,const char *argv[])
{'f',"fix-broken","APT::Get::Fix-Broken",0},
{'u',"show-upgraded","APT::Get::Show-Upgraded",0},
{'m',"ignore-missing","APT::Get::Fix-Missing",0},
- {0,"no-download","APT::Get::No-Download",0},
+ {'t',"target-release","APT::Default-Release",CommandLine::HasArg},
+ {'t',"default-release","APT::Default-Release",CommandLine::HasArg},
+ {0,"download","APT::Get::Download",0},
{0,"fix-missing","APT::Get::Fix-Missing",0},
- {0,"ignore-hold","APT::Ingore-Hold",0},
- {0,"no-upgrade","APT::Get::no-upgrade",0},
+ {0,"ignore-hold","APT::Ignore-Hold",0},
+ {0,"upgrade","APT::Get::upgrade",0},
{0,"force-yes","APT::Get::force-yes",0},
{0,"print-uris","APT::Get::Print-URIs",0},
{0,"diff-only","APT::Get::Diff-Only",0},
@@ -1710,7 +2054,8 @@ int main(int argc,const char *argv[])
{0,"list-cleanup","APT::Get::List-Cleanup",0},
{0,"reinstall","APT::Get::ReInstall",0},
{0,"trivial-only","APT::Get::Trivial-Only",0},
- {0,"no-remove","APT::Get::No-Remove",0},
+ {0,"remove","APT::Get::Remove",0},
+ {0,"only-source","APT::Get::Only-Source",0},
{'c',"config-file",0,CommandLine::ConfigFile},
{'o',"option",0,CommandLine::ArbItem},
{0,0,0,0}};
@@ -1720,18 +2065,24 @@ int main(int argc,const char *argv[])
{"remove",&DoInstall},
{"dist-upgrade",&DoDistUpgrade},
{"dselect-upgrade",&DoDSelectUpgrade},
+ {"build-dep",&DoBuildDep},
{"clean",&DoClean},
{"autoclean",&DoAutoClean},
{"check",&DoCheck},
{"source",&DoSource},
+ {"moo",&DoMoo},
{"help",&ShowHelp},
{0,0}};
// Parse the command line and initialize the package library
CommandLine CmdL(Args,_config);
- if (pkgInitialize(*_config) == false ||
- CmdL.Parse(argc,argv) == false)
+ if (pkgInitConfig(*_config) == false ||
+ CmdL.Parse(argc,argv) == false ||
+ pkgInitSystem(*_config,_system) == false)
{
+ if (_config->FindB("version") == true)
+ ShowHelp(CmdL);
+
_error->DumpErrors();
return 100;
}
@@ -1740,7 +2091,10 @@ int main(int argc,const char *argv[])
if (_config->FindB("help") == true ||
_config->FindB("version") == true ||
CmdL.FileSize() == 0)
- return ShowHelp(CmdL);
+ {
+ ShowHelp(CmdL);
+ return 0;
+ }
// Deal with stdout not being a tty
if (ttyname(STDOUT_FILENO) == 0 && _config->FindI("quiet",0) < 1)
@@ -1759,7 +2113,7 @@ int main(int argc,const char *argv[])
signal(SIGPIPE,SIG_IGN);
signal(SIGWINCH,SigWinch);
SigWinch(0);
-
+
// Match the operation
CmdL.DispatchArg(Cmds);
diff --git a/cmdline/apt-sortpkgs.cc b/cmdline/apt-sortpkgs.cc
new file mode 100644
index 000000000..bacaf01dd
--- /dev/null
+++ b/cmdline/apt-sortpkgs.cc
@@ -0,0 +1,201 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: apt-sortpkgs.cc,v 1.2 2001/02/20 07:03:17 jgg Exp $
+/* ######################################################################
+
+ APT Sort Packages - Program to sort Package and Source files
+
+ This program is quite simple, it just sorts the package files by
+ package and sorts the fields inside by the internal APT sort order.
+ Input is taken from a named file and sent to stdout.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <apt-pkg/tagfile.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/cmndline.h>
+#include <apt-pkg/init.h>
+#include <apt-pkg/strutl.h>
+
+#include <config.h>
+#include <apti18n.h>
+
+#include <vector>
+#include <algorithm>
+
+#include <unistd.h>
+ /*}}}*/
+
+struct PkgName
+{
+ string Name;
+ string Ver;
+ string Arch;
+ unsigned long Offset;
+ unsigned long Length;
+
+ inline int Compare3(const PkgName &x) const
+ {
+ int A = stringcasecmp(Name,x.Name);
+ if (A == 0)
+ {
+ A = stringcasecmp(Ver,x.Ver);
+ if (A == 0)
+ A = stringcasecmp(Arch,x.Arch);
+ }
+ return A;
+ }
+
+ bool operator <(const PkgName &x) const {return Compare3(x) < 0;};
+ bool operator >(const PkgName &x) const {return Compare3(x) > 0;};
+ bool operator ==(const PkgName &x) const {return Compare3(x) == 0;};
+};
+
+// DoIt - Sort a single file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool DoIt(string InFile)
+{
+ FileFd Fd(InFile,FileFd::ReadOnly);
+ pkgTagFile Tags(&Fd);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Parse.
+ vector<PkgName> List;
+ pkgTagSection Section;
+ unsigned long Largest = 0;
+ unsigned long Offset = Tags.Offset();
+ bool Source = _config->FindB("APT::SortPkgs::Source",false);
+ while (Tags.Step(Section) == true)
+ {
+ PkgName Tmp;
+
+ /* Fetch the name, auto-detecting if this is a source file or a
+ package file */
+ Tmp.Name = Section.FindS("Package");
+ Tmp.Ver = Section.FindS("Version");
+ Tmp.Arch = Section.FindS("Architecture");
+
+ if (Tmp.Name.empty() == true)
+ return _error->Error(_("Unknown package record!"));
+
+ Tmp.Offset = Offset;
+ Tmp.Length = Section.size();
+ if (Largest < Tmp.Length)
+ Largest = Tmp.Length;
+
+ List.push_back(Tmp);
+
+ Offset = Tags.Offset();
+ }
+ if (_error->PendingError() == true)
+ return false;
+
+ // Sort it
+ sort(List.begin(),List.end());
+
+ const char **Order = TFRewritePackageOrder;
+ if (Source == true)
+ Order = TFRewriteSourceOrder;
+
+ // Emit
+ unsigned char *Buffer = new unsigned char[Largest+1];
+ for (vector<PkgName>::iterator I = List.begin(); I != List.end(); I++)
+ {
+ // Read in the Record.
+ if (Fd.Seek(I->Offset) == false || Fd.Read(Buffer,I->Length) == false)
+ {
+ delete [] Buffer;
+ return false;
+ }
+
+ Buffer[I->Length] = '\n';
+ if (Section.Scan((char *)Buffer,I->Length+1) == false)
+ {
+ delete [] Buffer;
+ return _error->Error("Internal error, failed to scan buffer");
+ }
+
+ // Sort the section
+ if (TFRewrite(stdout,Section,Order,0) == false)
+ {
+ delete [] Buffer;
+ return _error->Error("Internal error, failed to sort fields");
+ }
+
+ fputc('\n',stdout);
+ }
+
+ delete [] Buffer;
+ return true;
+}
+ /*}}}*/
+// ShowHelp - Show the help text /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+int ShowHelp()
+{
+ ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
+ COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
+ if (_config->FindB("version") == true)
+ return 0;
+
+ cout <<
+ _("Usage: apt-sortpkgs [options] file1 [file2 ...]\n"
+ "\n"
+ "apt-sortpkgs is a simple tool to sort package files. The -s option is used\n"
+ "to indicate what kind of file it is.\n"
+ "\n"
+ "Options:\n"
+ " -h This help text\n"
+ " -s Use source file sorting\n"
+ " -c=? Read this configuration file\n"
+ " -o=? Set an arbitary configuration option, eg -o dir::cache=/tmp\n");
+
+ return 0;
+}
+ /*}}}*/
+
+int main(unsigned int argc,const char *argv[])
+{
+ CommandLine::Args Args[] = {
+ {'h',"help","help",0},
+ {'v',"version","version",0},
+ {'s',"source","APT::SortPkgs::Source",0},
+ {'c',"config-file",0,CommandLine::ConfigFile},
+ {'o',"option",0,CommandLine::ArbItem},
+ {0,0,0,0}};
+
+ // Parse the command line and initialize the package library
+ CommandLine CmdL(Args,_config);
+ if (pkgInitConfig(*_config) == false ||
+ CmdL.Parse(argc,argv) == false ||
+ pkgInitSystem(*_config,_system) == false)
+ {
+ _error->DumpErrors();
+ return 100;
+ }
+
+ // See if the help should be shown
+ if (_config->FindB("help") == true ||
+ CmdL.FileSize() == 0)
+ return ShowHelp();
+
+ // Match the operation
+ for (unsigned int I = 0; I != CmdL.FileSize(); I++)
+ if (DoIt(CmdL.FileList[I]) == false)
+ break;
+
+ // Print any errors or warnings found during parsing
+ if (_error->empty() == false)
+ {
+ bool Errors = _error->PendingError();
+ _error->DumpErrors();
+ return Errors == true?100:0;
+ }
+
+ return 0;
+}
diff --git a/cmdline/indexcopy.cc b/cmdline/indexcopy.cc
index a8243c085..389d3c547 100644
--- a/cmdline/indexcopy.cc
+++ b/cmdline/indexcopy.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: indexcopy.cc,v 1.5 2000/05/10 06:02:26 jgg Exp $
+// $Id: indexcopy.cc,v 1.6 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Index Copying - Aid for copying and verifying the index files
@@ -107,7 +107,7 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List)
Pkg.Seek(0);
}
- pkgTagFile Parser(Pkg);
+ pkgTagFile Parser(&Pkg);
if (_error->PendingError() == true)
return false;
@@ -119,9 +119,12 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List)
TargetF += URItoFileName(S);
if (_config->FindB("APT::CDROM::NoAct",false) == true)
TargetF = "/dev/null";
- FileFd Target(TargetF,FileFd::WriteEmpty);
+ FileFd Target(TargetF,FileFd::WriteEmpty);
+ FILE *TargetFl = fdopen(dup(Target.Fd()),"w");
if (_error->PendingError() == true)
return false;
+ if (TargetFl == 0)
+ return _error->Errno("fdopen","Failed to reopen fd");
// Setup the progress meter
Progress.OverallProgress(CurrentSize,TotalSize,FileSize,
@@ -140,7 +143,10 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List)
string File;
unsigned long Size;
if (GetFile(File,Size) == false)
+ {
+ fclose(TargetFl);
return false;
+ }
if (Chop != 0)
File = OrigPath + ChopDirs(File,Chop);
@@ -202,21 +208,13 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List)
Packages++;
Hits++;
- // Copy it to the target package file
- if (Chop != 0 || Mangled == true)
+ if (RewriteEntry(TargetFl,File) == false)
{
- if (RewriteEntry(Target,File) == false)
- continue;
+ fclose(TargetFl);
+ return false;
}
- else
- {
- const char *Start;
- const char *Stop;
- Section.GetSection(Start,Stop);
- if (Target.Write(Start,Stop-Start) == false)
- return false;
- }
}
+ fclose(TargetFl);
if (Debug == true)
cout << " Processed by using Prefix '" << Prefix << "' and chop " << Chop << endl;
@@ -448,44 +446,6 @@ bool IndexCopy::GrabFirst(string Path,string &To,unsigned int Depth)
return true;
}
/*}}}*/
-// IndexCopy::CopyWithReplace - Copy a section and replace text /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool IndexCopy::CopyWithReplace(FileFd &Target,const char *Tag,string New)
-{
- // Mangle the output filename
- const char *Start;
- const char *Stop;
- const char *Filename;
- Section->Find(Tag,Filename,Stop);
-
- /* We need to rewrite the filename field so we emit
- all fields except the filename file and rewrite that one */
- for (unsigned int I = 0; I != Section->Count(); I++)
- {
- Section->Get(Start,Stop,I);
- if (Start <= Filename && Stop > Filename)
- {
- char S[500];
- sprintf(S,"%s: %s\n",Tag,New.c_str());
- if (I + 1 == Section->Count())
- strcat(S,"\n");
- if (Target.Write(S,strlen(S)) == false)
- return false;
- }
- else
- {
- if (Target.Write(Start,Stop-Start) == false)
- return false;
- if (Stop[-1] != '\n')
- if (Target.Write("\n",1) == false)
- return false;
- }
- }
- if (Target.Write("\n",1) == false)
- return false;
-}
- /*}}}*/
// PackageCopy::GetFile - Get the file information from the section /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -501,9 +461,15 @@ bool PackageCopy::GetFile(string &File,unsigned long &Size)
// PackageCopy::RewriteEntry - Rewrite the entry with a new filename /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool PackageCopy::RewriteEntry(FileFd &Target,string File)
+bool PackageCopy::RewriteEntry(FILE *Target,string File)
{
- return CopyWithReplace(Target,"Filename",File);
+ TFRewriteData Changes[] = {{"Filename",File.c_str()},
+ {}};
+
+ if (TFRewrite(Target,*Section,TFRewritePackageOrder,Changes) == false)
+ return false;
+ fputc('\n',Target);
+ return true;
}
/*}}}*/
// SourceCopy::GetFile - Get the file information from the section /*{{{*/
@@ -520,7 +486,7 @@ bool SourceCopy::GetFile(string &File,unsigned long &Size)
if (Base.empty() == false && Base[Base.length()-1] != '/')
Base += '/';
- // Iterate over the entire list grabbing each triplet
+ // Read the first file triplet
const char *C = Files.c_str();
string sSize;
string MD5Hash;
@@ -540,9 +506,15 @@ bool SourceCopy::GetFile(string &File,unsigned long &Size)
// SourceCopy::RewriteEntry - Rewrite the entry with a new filename /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool SourceCopy::RewriteEntry(FileFd &Target,string File)
+bool SourceCopy::RewriteEntry(FILE *Target,string File)
{
- return CopyWithReplace(Target,"Directory",
- string(File,0,File.rfind('/')));
+ string Dir(File,0,File.rfind('/'));
+ TFRewriteData Changes[] = {{"Directory",Dir.c_str()},
+ {}};
+
+ if (TFRewrite(Target,*Section,TFRewriteSourceOrder,Changes) == false)
+ return false;
+ fputc('\n',Target);
+ return true;
}
/*}}}*/
diff --git a/cmdline/indexcopy.h b/cmdline/indexcopy.h
index 23be845b7..44a5e7d6b 100644
--- a/cmdline/indexcopy.h
+++ b/cmdline/indexcopy.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: indexcopy.h,v 1.1 1999/07/12 02:59:36 jgg Exp $
+// $Id: indexcopy.h,v 1.2 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
Index Copying - Aid for copying and verifying the index files
@@ -12,6 +12,7 @@
#include <vector>
#include <string>
+#include <stdio.h>
class pkgTagSection;
class FileFd;
@@ -28,11 +29,11 @@ class IndexCopy
bool ReconstructChop(unsigned long &Chop,string Dir,string File);
void ConvertToSourceList(string CD,string &Path);
bool GrabFirst(string Path,string &To,unsigned int Depth);
- bool CopyWithReplace(FileFd &Target,const char *Tag,string New);
virtual bool GetFile(string &Filename,unsigned long &Size) = 0;
- virtual bool RewriteEntry(FileFd &Target,string File) = 0;
+ virtual bool RewriteEntry(FILE *Target,string File) = 0;
virtual const char *GetFileName() = 0;
virtual const char *Type() = 0;
+
public:
bool CopyPackages(string CDROM,string Name,vector<string> &List);
@@ -43,7 +44,7 @@ class PackageCopy : public IndexCopy
protected:
virtual bool GetFile(string &Filename,unsigned long &Size);
- virtual bool RewriteEntry(FileFd &Target,string File);
+ virtual bool RewriteEntry(FILE *Target,string File);
virtual const char *GetFileName() {return "Packages";};
virtual const char *Type() {return "Package";};
@@ -55,7 +56,7 @@ class SourceCopy : public IndexCopy
protected:
virtual bool GetFile(string &Filename,unsigned long &Size);
- virtual bool RewriteEntry(FileFd &Target,string File);
+ virtual bool RewriteEntry(FILE *Target,string File);
virtual const char *GetFileName() {return "Sources";};
virtual const char *Type() {return "Source";};
diff --git a/cmdline/makefile b/cmdline/makefile
index 17042c0d2..c0647cead 100644
--- a/cmdline/makefile
+++ b/cmdline/makefile
@@ -32,3 +32,10 @@ SLIBS = -lapt-pkg
LIB_MAKES = apt-pkg/makefile
SOURCE = apt-cdrom.cc indexcopy.cc
include $(PROGRAM_H)
+
+# The apt-sortpkgs program
+PROGRAM=apt-sortpkgs
+SLIBS = -lapt-pkg
+LIB_MAKES = apt-pkg/makefile
+SOURCE = apt-sortpkgs.cc
+include $(PROGRAM_H)
diff --git a/configure.in b/configure.in
index f8356e627..431df2009 100644
--- a/configure.in
+++ b/configure.in
@@ -14,21 +14,20 @@ dnl configure.in correctly and can be run at any time
AC_INIT(configure.in)
AC_CONFIG_AUX_DIR(buildlib)
-AC_CONFIG_HEADER(include/config.h:buildlib/config.h.in)
+AC_CONFIG_HEADER(include/config.h:buildlib/config.h.in include/apti18n.h:buildlib/apti18n.h.in)
dnl -- SET THIS TO THE RELEASE VERSION --
-AC_DEFINE_UNQUOTED(VERSION,"0.3.19")
+AC_DEFINE_UNQUOTED(VERSION,"0.4.11")
AC_DEFINE_UNQUOTED(PACKAGE,"apt")
+dnl Check the archs, we want the target type.
+AC_CANONICAL_SYSTEM
+
dnl Check our C compiler
AC_CHECK_TOOL_PREFIX
AC_PROG_CC
AC_ISC_POSIX
-dnl Check the host arch (build+target not needed... yet)
-AC_CANONICAL_HOST
-AC_CHECK_TOOL_PREFIX dnl recheck, in case the initial guess was wrong
-
dnl Check for other programs
AC_PROG_CXX
AC_PROG_CPP
@@ -43,55 +42,72 @@ AC_SEARCH_LIBS(connect,socket)
SOCKETLIBS="$LIBS"
AC_SUBST(SOCKETLIBS)
LIBS="$SAVE_LIBS"
-
-dnl Section Disabled pending removal of deity widget library -- jgg
-if test "yes" != "yes"; then
- dnl Checks for X11
- AC_PATH_X
- AC_PATH_XTRA
- AC_SUBST(X11LIB)
- X11LIB=
- if test "$no_x" != "yes"; then
- X11LIB="-lX11"
- AC_DEFINE(HAVE_X11)
- dnl Checks for ZPM
- AC_CHECK_LIB(Xpm, XpmLibraryVersion,[AC_DEFINE(HAVE_LIBXPM) XPMLIB="-lXpm"],,$X_LIBS $X_PRE_LIBS $X11LIB $X_EXTRA_LIBS)
- AC_SUBST(XPMLIB)
- if test "$XPMLIB" != "-lXpm"; then
- AC_MSG_ERROR(failed: I need xpm if you are building for X)
- fi
- fi
-
- dnl Checks for Slang
- AC_CHECK_LIB(slang, SLang_Version,[AC_DEFINE(HAVE_LIBSLANG) SLANGLIB="-lslang"])
- AC_SUBST(SLANGLIB)
- dnl Checks for GPM
- AC_ARG_WITH(gpm,[ --without-gpm do not use GPM mouse support])
- if test "$with_gpm" != "no"; then
- AC_CHECK_LIB(gpm, Gpm_Open,[AC_DEFINE(HAVE_LIBGPM) GPMLIB="-lgpm"])
- AC_SUBST(GPMLIB)
- fi
+dnl Checks for pthread -- disabled due to glibc bugs jgg
+dnl AC_CHECK_LIB(pthread, pthread_create,[AC_DEFINE(HAVE_PTHREAD) PTHREADLIB="-lpthread"])
+AC_SUBST(PTHREADLIB)
+dnl if test "$PTHREADLIB" != "-lpthread"; then
+dnl AC_MSG_ERROR(failed: I need posix threads, pthread)
+dnl fi
+
+dnl Find the version of python we are using and ensure the library and header
+dnl are available.. Also get all the paths and options from the python setup
+dnl makefile.
+AC_CACHE_CHECK("python version", ac_cv_ver_python, [ac_cv_ver_python=`python -c 'import sys;print sys.version[[:3]]' 2> /dev/null`])
+AC_CACHE_CHECK("python prefix", ac_cv_prefix_python, [ac_cv_prefix_python=`python -c 'import sys;print sys.prefix' 2> /dev/null`])
+AC_CACHE_CHECK("python exec prefix", ac_cv_execprefix_python, [ac_cv_execprefix_python=`python -c 'import sys;print sys.exec_prefix' 2> /dev/null`])
+if test "x$ac_cv_ver_python" != "x"; then
+
+ cat > pytest << EOF
+include $ac_cv_execprefix_python/lib/python$ac_cv_ver_python/config/Makefile
+.PHONY: libs include
+libs:
+ @echo \$(MODLIBS) \$(LIBS) \$(SYSLIBS)
+include:
+ @echo \$(INCLUDEPY)
+EOF
+ AC_CACHE_CHECK("python libs", ac_cv_libs_python, [ac_cv_libs_python=`make -s -f pytest libs 2> /dev/null`])
+ AC_CACHE_CHECK("python include", ac_cv_include_python, [ac_cv_include_python=`make -s -f pytest include 2> /dev/null`])
+ AC_CHECK_HEADER(python$ac_cv_ver_python/Python.h,
+ [AC_CHECK_LIB(python$ac_cv_ver_python,PyArg_ParseTuple,
+ [AC_DEFINE(HAVE_PYTHONLIB) PYTHONLIB="-lpython$ac_cv_ver_python $ac_cv_libs_python"],[],$ac_cv_libs_python)])
+ AC_SUBST(PYTHONLIB)
+
+ PYTHONVER=$ac_cv_ver_python
+ PYTHONPREFIX=$ac_cv_prefix_python
+ PYTHONEXECPREFIX=$ac_cv_execprefix_python
+ PYTHONINCLUDE=$ac_cv_include_python
+ AC_SUBST(PYTHONVER)
+ AC_SUBST(PYTHONPREFIX)
+ AC_SUBST(PYTHONEXECPREFIX)
+ AC_SUBST(PYTHONINCLUDE)
fi
-dnl Checks for pthread
-#AC_CHECK_LIB(pthread, pthread_create,[AC_DEFINE(HAVE_PTHREAD) PTHREADLIB="-lpthread"])
-AC_SUBST(PTHREADLIB)
-#if test "$PTHREADLIB" != "-lpthread"; then
-# AC_MSG_ERROR(failed: I need posix threads, pthread)
-#fi
+dnl Check for DB2
+AC_CHECK_HEADER(db2/db.h,
+ [AC_CHECK_LIB(db2,db_open,
+ [AC_DEFINE(HAVE_DB2) DB2LIB="-ldb2"])])
+AC_SUBST(DB2LIB)
-dnl Converts the ARCH to be the same as dpkg
+dnl Converts the ARCH to be something singular for this general CPU family
+dnl This is often the dpkg architecture string.
AC_MSG_CHECKING(system architecture)
-archset="`awk \"{ if(\\\$1 == \\\"$host_cpu\\\") print \\\$2 }\" $srcdir/buildlib/archtable`"
+archset="`awk \" ! /^#|^\\\$/ { if(match(\\\"$target_cpu\\\",\\\"^\\\"\\\$1\\\"\\\$\\\")) {print \\\$2; exit}}\" $srcdir/buildlib/archtable`"
if test "x$archset" = "x"; then
AC_MSG_ERROR(failed: use --host=)
fi
AC_MSG_RESULT($archset)
-AC_DEFINE_UNQUOTED(ARCHITECTURE,"$archset")
+AC_DEFINE_UNQUOTED(COMMON_CPU,"$archset")
-dnl We use C9x types if at all possible
-AC_CACHE_CHECK([for C9x integer types],c9x_ints,[
+dnl Get a common name for the host OS - this is primarily only for HURD and is
+dnl non fatal if it fails
+AC_MSG_CHECKING(system OS)
+osset="`awk \" ! /^#|^\\\$/ {if (match(\\\"$target_vendor-$target_os\\\",\\\$1)) {print \\\$2; exit}}\" $srcdir/buildlib/ostable`"
+AC_MSG_RESULT($osset)
+AC_DEFINE_UNQUOTED(COMMON_OS,"$osset")
+
+dnl We use C99 types if at all possible
+AC_CACHE_CHECK([for C99 integer types],c9x_ints,[
AC_TRY_COMPILE([#include <inttypes.h>],
[uint8_t Foo1;uint16_t Foo2;uint32_t Foo3;],
c9x_ints=yes,c9x_ints=no)])
@@ -112,7 +128,7 @@ dnl This is stupid, it should just use the AC macros like it does below
dnl Cross compilers can either get a real C library or preload the cache
dnl with their size values.
changequote(,)
-archline="`grep \"^$archset\" $srcdir/buildlib/sizetable | cut -f 2- -d ' '`"
+archline="`awk \" ! /^#|^\\\$/ {if (match(\\\"$archset\\\",\\\$1)) {print; exit}}\" $srcdir/buildlib/sizetable | cut -f 2- -d ' '`"
if test "x$archline" != "x"; then
changequote([,])
set $archline
@@ -128,6 +144,8 @@ if test "x$archline" != "x"; then
fi
dnl I wonder what AC_C_BIGENDIAN does if you cross compile...
+dnl This is probably bogus, as above we only care if we have to build our own
+dnl C9x types.
if test "$cross_compiling" = "yes" -a "x$archline" = "x"; then
AC_MSG_ERROR(When cross compiling, architecture must be present in sizetable)
fi
@@ -163,12 +181,16 @@ AC_EGREP_HEADER(h_errno, netdb.h, [AC_MSG_RESULT(normal)],
])
dnl Check for debiandoc
-AC_CHECK_PROG(DEBIANDOC_HTML,debiandoc2html,"yes","")
-AC_CHECK_PROG(DEBIANDOC_TEXT,debiandoc2text,"yes","")
+AC_PATH_PROG(DEBIANDOC_HTML,debiandoc2html)
+AC_PATH_PROG(DEBIANDOC_TEXT,debiandoc2text)
+
+dnl Check for the SGML tools needed to build man pages
+AC_PATH_PROG(NSGMLS,nsgmls)
+AC_PATH_PROG(SGMLSPL,sgmlspl)
dnl Check for YODL
-AC_CHECK_PROG(YODL_MAN,yodl2man,"yes","")
+dnl AC_CHECK_PROG(YODL_MAN,yodl2man,"yes","")
ah_NUM_PROCS
-AC_OUTPUT(environment.mak:buildlib/environment.mak.in makefile:buildlib/makefile.in,make dirs)
+AC_OUTPUT(environment.mak:buildlib/environment.mak.in makefile:buildlib/makefile.in,make -s dirs)
diff --git a/debian/apt-utils.dirs b/debian/apt-utils.dirs
new file mode 100644
index 000000000..14f5b95d7
--- /dev/null
+++ b/debian/apt-utils.dirs
@@ -0,0 +1,2 @@
+usr/lib
+usr/bin
diff --git a/debian/changelog b/debian/changelog
index b723440da..5cd0dbfd3 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,12 +1,104 @@
-apt (0.3.19) frozen unstable; urgency=low
-
- * Added --dry-run to match docs. Closes: #65153, #64292
+apt (0.5.0) unstable; urgency=low
+
+ * Fixed an obscure bug with missing final double new lines in
+ package files
+ * Changed the apt-cdrom index copy routine to use the new section
+ rewriter
+ * Added a package file sorter, apt-sortpkgs
+ * Parse obsolete Optional dependencies.
+ * Added Ben's rsh method. Closes: #57794
+ * Added IPv6 FTP support and better DNS rotation support.
+ * Include the server IP in error messages when using a DNS rotation.
+ Closes: #64895
+ * Made most of the byte counters into doubles to prevent 32bit overflow.
+ Closes: #65349
+ * HTTP Authorization. Closes: #61158
+ * Ability to parse and return source index build depends from Randolph.
+ * new 'apt-get build-dep' command from Randolph. Closes: #63982
+ * Added apt-ftparchive the all dancing all singing FTP archive
+ maintinance program
+ * Allow version specifications with =1.2.4-3 and /2.2 or /stable postfixes
+ in apt-get.
+ * Removed useless internal cruft including the xstatus file.
+ * Fixed config parser bugs. Closes: #67848, #71108
+ * Brain Damanged apt-get config options changed, does not change the command
+ line interface, except to allow --enable-* to undo a configuration
+ option:
+ No-Remove -> Remove
+ No-Download -> Download
+ No-Upgrade -> Upgrade
+ * Made this fix configable (DSelect::CheckDir) and default to disabled:
+ * No remove prompt if the archives dir has not changed. Closes: #55709
+ Because it is stupid in the case where no files were downloaded due to
+ a resumed-aborted install, or a full cache! Closes: #65952
* Obscure divide by zero problem. Closes: #64394
* Update sizetable for mips. Closes: #62288
* Fixed a bug with passive FTP connections
* Has sizetable entry for sparc64. Closes: #64869
+ * Escape special characters in the ::Label section of the cdroms.lst
+ * Created apt-utils and python-apt packages
+ * Due to the new policy engine, the available file may contain entries
+ from the status file. These are generated if the package is not obsolete
+ but the policy engine prohibits using the version from the package files.
+ They can be identified by the lack of a Filename field.
+ * The new policy engine. Closes: #66509, #66944, #45122, #45094, #40006,
+ #36223, #33468, #22551
+ * Fixed deb-src line for non-us. Closes: #71501, #71601
+ * Fixes for G++ 2.96, s/friend/friend class/
+ * Fixed mis doc of APT::Get::Fix-Missing. Closes: #69269
+ * Confirmed fix for missing new line problem. Closes: #69386
+ * Fixed up dhelp files. Closes: #71312
+ * Added some notes about dselect and offline usage. Closes: #66473, #38316
+ * Lock files on read only file systems are ignored w/ warning.
+ Closes: #61701
+ * apt-get update foo now gives an error! Closes: #42891
+ * Added test for shlibs on hurd. Closes: #71499
+ * Clarified apt-cache document. Closes: #71934
+ * DocBook SGML man pages and some improvements in the text..
+ * sigwinch thing. Closes: #72382
+ * Caching can be turned off by setting the cache file names blank.
+ * Ignores arches it does not know about when autocleaning. Closes: #72862
+ * New function in apt-config to return dirs, files, bools and integers.
+ * Fixed an odd litle bug in MarkInstall and fixed it up to handle
+ complex cases involving OR groups and provides.
+ 68754 describes confusing messages which are the result of this..
+ Closes: #63149, #69394, #68754, #77683, #66806, #81486, #78712
+ * Speeling mistake and return code for the 'wicked' resolver error
+ Closes: #72621, #75226, #77464
+ * Solved unable to upgrade libc6 from potato to woody due to 3 package
+ libc6 dependency loop problem.
+ * Leading sources.list spaces. Closes: #76010
+ * Removed a possible infinite loop while processing installations.
+ * Man page updates. Closes: #75411, #75560, #64292, #78469
+ * ReduceSourceList bug. Closes: #76027
+ * --only-source option. Closes: #76320
+ * Typos. Closes: #77812, #77999
+ * Different status messages. Closes: #76652, #78353
+ * /etc/apt/apt.conf.d/ directory for Joey and Matt and pipe protocol 2
+ * OS detection an support for the new pseduo standard of os-arch for the
+ Architecture string. Also uses regexing.. Closes: #39227, #72349
+ * Various i18n stuff. Note that this still needs some i18n wizard
+ to do the last gettextization right. Closes: #62386
+ * Fixed a problem with some odd http servers/proxies that did not return
+ the content size in the header. Closes: #79878, #44379
+ * Little acquire bugs. Closes: #77029, #55820
+ * _POSIX_THREADS may not be defined to anything, just defined..
+ Closes: #78996
+ * Spelling of Ignore-Hold correctly. Closes: #78042
+ * Unlock the dpkg db if in download only mode. Closes: #84851
+ * Brendan O'Dea's dselect admindir stuff. Closes: #62811
+ * Patch from BenC. Closes: #80810
+ * Single output of some names in lists. Closes: #80498, #43286
+ * Nice message for people who can't read syserror output. Closes: #84734
+ * OR search function. Closes: #82894
+ * User's guide updates. Closes: #82469
+ * The AJ/JoeyH var/state to var/lib transition patch. Closes: #59094
+ * Various CD bugs, again thanks to Greenbush
+ Closes: #80946, #76547, #71810, #70049, #69482
+ * Using potato debhelper. Closes: #57977
+ * I cannot self-terminate. Closes: #74928
- -- Ben Gertzfield <che@debian.org> Fri, 12 May 2000 21:10:54 -0700
+ -- Jason Gunthorpe <jgg@debian.org> Wed, 25 Oct 2000 00:11:06 -0600
apt (0.3.19) frozen unstable; urgency=low
diff --git a/debian/control b/debian/control
index 365c6bef0..00e35ee59 100644
--- a/debian/control
+++ b/debian/control
@@ -3,14 +3,13 @@ Section: admin
Priority: standard
Maintainer: APT Development Team <deity@lists.debian.org>
Standards-Version: 3.1.1
-Build-Depends: debhelper, debiandoc-sgml
+Build-Depends: debhelper, debiandoc-sgml, python-dev, libdb2-dev
Package: apt
Architecture: any
Depends: ${shlibs:Depends}
Priority: standard
-Conflicts: deity
-Replaces: deity, libapt-pkg-doc (<< 0.3.7), libapt-pkg-dev (<< 0.3.7)
+Replaces: libapt-pkg-doc (<< 0.3.7), libapt-pkg-dev (<< 0.3.7)
Provides: libapt-pkg${libapt-pkg:major}
Suggests: dpkg-dev
Description: Advanced front-end for dpkg
@@ -37,3 +36,12 @@ Priority: optional
Description: Documentation for APT development
This package contains documentation for development of the APT
Debian package manipulation program and its libraries.
+
+Package: apt-utils
+Architecture: any
+Depends: ${shlibs:Depends}
+Priority: optional
+Provides: libapt-inst${libapt-inst:major}
+Description: APT utility programs
+ This package contains some infrequently used APT utility programs such
+ as apt-ftparchive and apt-sortpkgs.
diff --git a/debian/dhelp b/debian/dhelp
index e24a7c08d..091c49956 100644
--- a/debian/dhelp
+++ b/debian/dhelp
@@ -3,10 +3,10 @@
<dirtitle>Debian Utilities
<linkname>APT User's Guide
<filename>guide.html/index.html
-<descrip>
+<description>
The APT User's Guide provides an overview of how to use the the APT package
manager, and provides a detailed look at the apt-get tool.
-</descrip>
+</description>
</item>
<item>
@@ -14,8 +14,8 @@ manager, and provides a detailed look at the apt-get tool.
<dirtitle>Debian Utilities
<linkname>APT Offline Usage Guide
<filename>offline.html/index.html
-<descrip>
+<description>
The APT Offline Usage Guide provides detailed instructions and examples
of how to use APT on an unconnected computer.
-</descrip>
+</description>
</item>
diff --git a/debian/dirs b/debian/dirs
index 1551a2083..fd2a36929 100644
--- a/debian/dirs
+++ b/debian/dirs
@@ -3,4 +3,4 @@ usr/lib/apt/methods
usr/lib/dpkg/methods/apt
etc/apt
var/cache/apt/archives/partial
-var/state/apt/lists/partial
+var/lib/apt/lists/partial
diff --git a/debian/libapt-pkg-doc.dhelp b/debian/libapt-pkg-doc.dhelp
index 7458cd09b..7c93c736b 100644
--- a/debian/libapt-pkg-doc.dhelp
+++ b/debian/libapt-pkg-doc.dhelp
@@ -3,7 +3,7 @@
<dirtitle>Development Tools
<linkname>APT Cache Specification
<filename>../apt/cache.html/index.html
-<descrip>
+<description>
The APT Cache Specification describes the complete implementation and
format of the APT Cache file. The APT Cache file is a way for APT to parse
and store a large number of package files for display in the UI. It's primary
@@ -11,7 +11,7 @@ design goal is to make display of a single package in the tree very
fast by pre-linking important things like dependencies and provides.
The specification doubles as documentation for one of the in-memory
structures used by the package library and the APT GUI.
-</descrip>
+</description>
</item>
<item>
@@ -19,11 +19,11 @@ structures used by the package library and the APT GUI.
<dirtitle>Development Tools
<linkname>APT Design Document
<filename>../apt/design.html/index.html
-<descrip>
+<description>
The APT Design Document is an overview of the specifications and design goals
-of the APT project. It also attempts to give a broad description of the
+of the APT project. It also attempts to give a broad descriptiontion of the
implementation as well.
-</descrip>
+</description>
</item>
<item>
@@ -31,10 +31,10 @@ implementation as well.
<dirtitle>Development Tools
<linkname>DPkg Technical Manual
<filename>../apt/design.html/index.html
-<descrip>
+<description>
The DPkg Technical Manual gives an overview of dpkg's external functions
and describes how it views the world.
-</descrip>
+</description>
</item>
<item>
@@ -42,11 +42,11 @@ and describes how it views the world.
<dirtitle>Development Tools
<linkname>APT Files
<filename>../apt/files.html/index.html
-<descrip>
+<description>
The APT Files document describes the complete implementation and format of
the installed APT directory structure. It also serves as guide to how
APT views the Debian archive.
-</descrip>
+</description>
</item>
<item>
@@ -54,8 +54,8 @@ APT views the Debian archive.
<dirtitle>Development Tools
<linkname>APT Method Interface
<filename>../apt/method.html/index.html
-<descrip>
+<description>
The APT Method Interface document describes the interface that APT uses to
the archive access methods.
-</descrip>
+</description>
</item>
diff --git a/debian/postrm b/debian/postrm
index 625280034..b1bb971d4 100755
--- a/debian/postrm
+++ b/debian/postrm
@@ -13,8 +13,8 @@ case "$1" in
echo -n "Removing APT cache and state files... "
echo -n "/var/cache/apt"
rm -rf /var/cache/apt
- echo -n ", /var/state/apt"
- rm -rf /var/state/apt
+ echo -n ", /var/lib/apt"
+ rm -rf /var/lib/apt
echo ". Done."
esac
diff --git a/debian/preinst b/debian/preinst
new file mode 100755
index 000000000..141db6e45
--- /dev/null
+++ b/debian/preinst
@@ -0,0 +1,28 @@
+#! /bin/sh
+
+# dpkg does this for us while we are upgrading..
+#if [ "$1" = "upgrade" -a -L /var/state/apt -a -e /var/lib/apt -a ! -L /var/state/apt ] && dpkg --compare-versions "$2" ">=" "0.4.10"; then
+# rm /var/state/apt
+#fi
+
+if [ "$1" = "upgrade" -o "$1" = "install" -a "$2" != "" ]; then
+ if [ ! -e /var/lib/apt -a -e /var/state/apt ]; then
+ # upgrading from /var/state/apt using apt.
+ # it's probably running now so we want to ensure /var/state/apt
+ # is still valid afterwards. and since we're upgrading
+
+ if [ -x /usr/bin/perl -a -d /var/state/apt -a ! -L /var/state/apt ] &&
+ perl -e 'exit 1 if ((stat("/var/lib"))[0] != (stat("/var/state/apt"))[0])'
+ then
+ # same fs, we can mv it
+ mv /var/state/apt /var/lib/apt
+ ln -s ../lib/apt /var/state/apt
+ # note that this symlink (/var/state/apt) will be removed when
+ # dpkg finishes unpacking the apt we're about to install; this is okay
+ else
+ # scary, let's just symlink it and hope
+ ln -s /var/state/apt /var/lib/apt
+ fi
+ fi
+ touch /var/lib/apt/lists/partial/.delete-me-later || true
+fi
diff --git a/debian/prerm b/debian/prerm
new file mode 100755
index 000000000..64b3635c5
--- /dev/null
+++ b/debian/prerm
@@ -0,0 +1,11 @@
+#! /bin/sh
+
+if [ "$1" = "upgrade" -o "$1" = "failed-upgrade" ] &&
+ dpkg --compare-versions "$2" "<<" 0.4.10
+then
+ if [ ! -d /var/state/apt/ ]; then
+ ln -s /var/lib/apt /var/state/apt
+ touch /var/lib/apt/lists/partial/.delete-me-later
+ fi
+fi
+
diff --git a/debian/python-apt.dirs b/debian/python-apt.dirs
new file mode 100644
index 000000000..621814dff
--- /dev/null
+++ b/debian/python-apt.dirs
@@ -0,0 +1 @@
+usr/lib/python1.5/site-packages
diff --git a/debian/rules b/debian/rules
index d194ba1cf..13912e9f7 100755
--- a/debian/rules
+++ b/debian/rules
@@ -2,8 +2,10 @@
# Made with the aid of dh_make, by Craig Small
# Sample debian/rules that uses debhelper. GNU copyright 1997 by Joey Hess.
# Some lines taken from debmake, by Christoph Lameter.
-# $Id: rules,v 1.23 2000/06/04 05:37:30 doogie Exp $
+# $Id: rules,v 1.24 2001/02/20 07:03:17 jgg Exp $
+# LD_LIBRARY_PATH=pwd/debian/tmp/usr/lib dh_shlibdeps -papt
+# dpkg: /home/jgg/work/apt2/debian/tmp/usr/lib/libapt-pkg.so.2.9 not found.
# For the deb builder, you can run 'debian/rules cvs-build', which does all
# steps nescessary to produce a proper source tarball with the CVS/ removed.
@@ -12,6 +14,12 @@
# create ../upload-<VER>, with all the files needed to be uploaded placed
# in it.
+# See below
+include build/environment.mak
+
+# Default rule
+build:
+
DEB_BUILD_PROG:=debuild -us -uc
APT_DEBVER=$(shell dpkg-parsechangelog |sed -n -e '/^Version:/s/^Version: //p')
APT_CONFVER=$(shell sed -n -e 's/^AC_DEFINE_UNQUOTED(VERSION,"\(.*\)")/\1/p' configure.in)
@@ -31,6 +39,7 @@ ifeq ($(words $(BLD)),0)
override BLD := ./build
endif
+# Rebuild configure.in to have the correct version from the change log
ifneq ($(APT_DEBVER),$(APT_CONFVER))
.PHONY: configure.in
configure.in:
@@ -39,27 +48,46 @@ else
configure.in:
endif
+# APT Programs in apt-utils
+APT_UTILS=ftparchive sortpkgs
+
# Uncomment this to turn on verbose mode.
#export DH_VERBOSE=1
# Find the libapt-pkg major version for use in other control files
-export LIBAPT_MAJOR:=$(shell egrep '^MAJOR=' apt-pkg/makefile |cut -d '=' -f 2)
+export LIBAPTPKG_MAJOR:=$(shell egrep '^MAJOR=' apt-pkg/makefile |cut -d '=' -f 2)
+export LIBAPTINST_MAJOR:=$(shell egrep '^MAJOR=' apt-inst/makefile |cut -d '=' -f 2)
debian/shlibs.local: apt-pkg/makefile
- rm -f $@
- echo "libapt-pkg $(LIBAPT_MAJOR) libapt-pkg$(LIBAPT_MAJOR)" >> $@
- echo "libapt-pkg $(LIBAPT_MAJOR) apt $(APT_DEBVER)" >> $@
+ # We have 3 shlibs.local files.. One for 'apt', one for 'apt-utils' and
+ # one for the rest of the packages. This ensures that each package gets
+ # the right overrides..
+ rm -rf $@ $@.apt $@.apt-utils
+ echo "libapt-pkg $(LIBAPTPKG_MAJOR)" > $@.apt
+
+ echo "libapt-pkg $(LIBAPTPKG_MAJOR) libapt-pkg$(LIBAPTPKG_MAJOR)" > $@.apt-utils
+ echo "libapt-inst $(LIBAPTINST_MAJOR)" >> $@.apt-utils
-build: build-stamp
-build-stamp: configure
+ echo "libapt-pkg $(LIBAPTPKG_MAJOR) libapt-pkg$(LIBAPTPKG_MAJOR)" > $@
+ echo "libapt-inst $(LIBAPTINST_MAJOR) libapt-inst$(LIBAPTINST_MAJOR)" >> $@
+
+build: build/build-stamp
+
+# Note that this is unconditionally done first as part of loading environment.mak
+# The true is needed to force make to reload environment.mak after running
+# configure-stamp. Otherwise we can get stale or invalid, or missing config data.
+build/environment.mak: build/configure-stamp
+ @true
+
+configure: configure.in
+build/configure-stamp: configure
dh_testdir
-mkdir build
(cd build; ../configure)
-# cd build && CXXFLAGS="-g -Wall -D_POSIX_C_SOURCE=199309" ../configure --disable-nls --disable-static --prefix=/usr
-# cd build && make all-hdr
-# cd build && make -s
+ touch $@
+build/build-stamp: build/configure-stamp
# Add here commands to compile the package.
- $(MAKE)
+ $(MAKE) -s
touch $@
clean:
@@ -67,7 +95,8 @@ clean:
# dh_testroot
rm -f build-stamp debian/shlibs.local
rm -rf build
-
+ rm -f debian/shlibs.local.apt debian/shlibs.local.apt-utils
+
# Add here commands to clean up after the build process.
-$(MAKE) clean
-$(MAKE) distclean
@@ -84,7 +113,8 @@ libapt-pkg-doc: build debian/shlibs.local
# libapt-pkg-doc install
#
dh_installdocs -p$@ $(BLD)/docs/cache* $(BLD)/docs/design* $(BLD)/docs/dpkg-tech* \
- $(BLD)/docs/files* $(BLD)/docs/method*
+ $(BLD)/docs/files* $(BLD)/docs/method* \
+ doc/libapt-pkg2_to_3.txt doc/style.txt
-cp -a debian/libapt-pkg-doc.dhelp debian/libapt-pkg-doc/usr/doc/libapt-pkg-doc/.dhelp
-cp -a debian/libapt-pkg-doc.dhelp debian/libapt-pkg-doc/usr/share/doc/libapt-pkg-doc/.dhelp
@@ -102,14 +132,14 @@ libapt-pkg-doc: build debian/shlibs.local
dh_fixperms -p$@
# dh_suidregister -p$@
dh_installdeb -p$@
- dh_gencontrol -p$@ -u -Vlibapt-pkg:major=${LIBAPT_MAJOR}
+ dh_gencontrol -p$@ -u -Vlibapt-pkg:major=$(LIBAPTPKG_MAJOR)
dh_md5sums -p$@
dh_builddeb -p$@
# Build architecture-dependent files here.
-binary-arch: apt libapt-pkg-dev
+binary-arch: apt libapt-pkg-dev apt-utils
apt: build debian/shlibs.local
dh_testdir -p$@
dh_testroot -p$@
@@ -120,6 +150,9 @@ apt: build debian/shlibs.local
#
cp $(BLD)/bin/apt-* debian/tmp/usr/bin/
+ # Remove the bits that are in apt-utils
+ rm $(addprefix debian/tmp/usr/bin/apt-,$(APT_UTILS))
+
# install the shared libs
find $(BLD)/bin/ -type f -name "libapt-pkg.so.*" -exec cp -a "{}" debian/tmp/usr/lib/ \;
find $(BLD)/bin/ -type l -name "libapt-pkg.so.*" -exec cp -a "{}" debian/tmp/usr/lib/ \;
@@ -129,26 +162,27 @@ apt: build debian/shlibs.local
cp $(BLD)/scripts/dselect/* debian/tmp/usr/lib/dpkg/methods/apt/
# Copy the guides
- dh_installdocs -p$@ $(BLD)/docs/guide.text $(BLD)/docs/guide.html \
+ dh_installdocs -p$@ $(BLD)/docs/guide*.text $(BLD)/docs/guide*.html \
$(BLD)/docs/offline.text $(BLD)/docs/offline.html
# One or the other..
-cp -a debian/dhelp debian/tmp/usr/doc/apt/.dhelp
- -cp -a debian/dhelp debian/tmp/usr/share/doc/apt/.dhelp
+ -cp -a debian/dhelp debian/tmp/usr/share/doc/apt/.dhelp
# head -n 500 ChangeLog > debian/ChangeLog
dh_installexamples -p$@ $(BLD)/docs/examples/*
- dh_installmanpages -p$@
+ dh_installmanpages -p$@ apt-ftparchive.1 apt-sortpkgs.1
dh_installchangelogs -p$@
dh_strip -p$@
dh_compress -p$@
dh_fixperms -p$@
dh_installdeb -p$@
- LD_LIBRARY_PATH=`pwd`/debian/tmp/usr/lib dh_shlibdeps -papt
- dh_gencontrol -p$@ -u -Vlibapt-pkg:major=${LIBAPT_MAJOR}
- dh_makeshlibs -m${LIBAPT_MAJOR} -Vlibapt-pkg${LIBAPT_MAJOR} -papt
+ dh_shlibdeps -papt -l`pwd`/debian/tmp/usr/lib -- -Ldebian/shlibs.local.apt
+# LD_LIBRARY_PATH=`pwd`/debian/tmp/usr/lib dh_shlibdeps -papt
+ dh_gencontrol -p$@ -u -Vlibapt-pkg:major=$(LIBAPTPKG_MAJOR)
+ dh_makeshlibs -m$(LIBAPTPKG_MAJOR) -V 'libapt-pkg$(LIBAPTPKG_MAJOR)' -papt
dh_md5sums -p$@
dh_builddeb -p$@
@@ -161,7 +195,8 @@ libapt-pkg-dev: build debian/shlibs.local
# libapt-pkg-dev install
#
cp -a $(BLD)/bin/libapt-pkg.so debian/libapt-pkg-dev/usr/lib/
- #ln -s libapt-pkg.so.${LIBAPT_MAJOR} debian/libapt-pkg-dev/usr/lib/libapt-pkg.so
+ cp -a $(BLD)/bin/libapt-inst.so debian/libapt-pkg-dev/usr/lib/
+ #ln -s libapt-pkg.so.$(LIBAPTPKG_MAJOR) debian/libapt-pkg-dev/usr/lib/libapt-pkg.so
cp $(BLD)/include/apt-pkg/*.h debian/libapt-pkg-dev/usr/include/apt-pkg/
dh_installdocs -p$@
@@ -176,11 +211,39 @@ libapt-pkg-dev: build debian/shlibs.local
dh_fixperms -p$@
# dh_suidregister -p$@
dh_installdeb -p$@
- dh_gencontrol -p$@ -u -Vlibapt-pkg:major=${LIBAPT_MAJOR}
+ dh_gencontrol -p$@ -u -Vlibapt-pkg:major=$(LIBAPTPKG_MAJOR)
+ dh_md5sums -p$@
+ dh_builddeb -p$@
+
+apt-utils: build debian/shlibs.local
+ dh_testdir -p$@
+ dh_testroot -p$@
+ dh_clean -p$@ -k
+ dh_installdirs -p$@
+
+ # install the shared libs
+ find $(BLD)/bin/ -type f -name "libapt-inst.so.*" -exec cp -a "{}" debian/$@/usr/lib/ \;
+ find $(BLD)/bin/ -type l -name "libapt-inst.so.*" -exec cp -a "{}" debian/$@/usr/lib/ \;
+
+ cp $(addprefix $(BLD)/bin/apt-,$(APT_UTILS)) debian/$@/usr/bin/
+ dh_installdocs -p$@
+
+ # Install the man pages..
+ mkdir -p debian/$@/usr/share/man/man1
+ cp doc/apt-sortpkgs.1 doc/apt-ftparchive.1 debian/$@/usr/share/man/man1/
+
+ dh_installchangelogs -p$@
+ dh_strip -p$@
+ dh_compress -p$@
+ dh_fixperms -p$@
+ dh_installdeb -p$@
+ LD_LIBRARY_PATH=`pwd`/debian/tmp/usr/lib:`pwd`/debian/$@/usr/lib dh_shlibdeps -p$@ -- -Ldebian/shlibs.local.apt-utils
+ dh_gencontrol -p$@ -u -Vlibapt-inst:major=$(LIBAPTINST_MAJOR)
+ dh_makeshlibs -m$(LIBAPTINST_MAJOR) -V 'libapt-inst$(LIBAPTINST_MAJOR)' -p$@
dh_md5sums -p$@
dh_builddeb -p$@
-source diff:
+source diff:
@echo >&2 'source and diff are obsolete - use dpkg-source -b'; false
# Update from CVS
diff --git a/doc/.cvsignore b/doc/.cvsignore
new file mode 100644
index 000000000..22128776f
--- /dev/null
+++ b/doc/.cvsignore
@@ -0,0 +1,12 @@
+apt-cache.8
+apt-get.8
+apt-cdrom.8
+apt.conf.5
+sources.list.5
+apt-config.8
+apt-sortpkgs.1
+apt-ftparchive.1
+manpage.links
+manpage.refs
+manpage.log
+apt_preferences.5
diff --git a/doc/Bugs b/doc/Bugs
index 332abc592..deb7334db 100644
--- a/doc/Bugs
+++ b/doc/Bugs
@@ -26,7 +26,7 @@
Summary: APT does not provide a way to download packages onto a
removable media for another computer
Status: 0.3.0 has substantially better support for this to the point
- that it is doable by using a seperate configuration file and
+ that it is doable by using a separate configuration file and
the -c option
#27601: srange errors from dselect
Summary: Couldn't locate an archive source
diff --git a/doc/apt-cache.8.sgml b/doc/apt-cache.8.sgml
new file mode 100644
index 000000000..79bfa962f
--- /dev/null
+++ b/doc/apt-cache.8.sgml
@@ -0,0 +1,365 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V3.1//EN" [
+
+<!ENTITY % aptent SYSTEM "apt.ent">
+%aptent;
+
+]>
+
+<refentry>
+ &apt-docinfo;
+
+ <refmeta>
+ <refentrytitle>apt-cache</>
+ <manvolnum>8</>
+ </refmeta>
+
+ <!-- Man page title -->
+ <refnamediv>
+ <refname>apt-cache</>
+ <refpurpose>APT package handling utility -- cache manipulator</>
+ </refnamediv>
+
+ <!-- Arguments -->
+ <refsynopsisdiv>
+ <cmdsynopsis>
+ <command>apt-config</>
+ <arg><option>-hvs</></arg>
+ <arg><option>-o=<replaceable/config string/</></arg>
+ <arg><option>-c=<replaceable/file/</></arg>
+ <group choice=req>
+ <arg>add <arg choice="plain" rep="repeat"><replaceable>file</replaceable></arg></arg>
+ <arg>gencaches</>
+ <arg>showpkg <arg choice="plain" rep="repeat"><replaceable>pkg</replaceable></arg></arg>
+ <arg>stats</>
+ <arg>dump</>
+ <arg>dumpavail</>
+ <arg>unmet</>
+ <arg>search <arg choice="plain"><replaceable>regex</replaceable></arg></arg>
+ <arg>show <arg choice="plain" rep="repeat"><replaceable>pkg</replaceable></arg></arg>
+ <arg>showpkg <arg choice="plain" rep="repeat"><replaceable>pkg</replaceable></arg></arg>
+ <arg>depends <arg choice="plain" rep="repeat"><replaceable>pkg</replaceable></arg></arg>
+ <arg>pkgnames <arg choice="plain"><replaceable>prefix</replaceable></arg></arg>
+ <arg>dotty <arg choice="plain" rep="repeat"><replaceable>pkg</replaceable></arg></arg>
+ </group>
+ </cmdsynopsis>
+ </refsynopsisdiv>
+
+ <RefSect1><Title>Description</>
+ <para>
+ <command/apt-cache/ performs a variety of operations on APT's package
+ cache. <command/apt-cache/ does not manipulate the state of the system
+ but does provide operations to search and generate interesting output
+ from the package metadata.
+
+ <para>
+ Unless the <option/-h/, or <option/--help/ option is given one of the
+ above commands must be present.
+
+ <VariableList>
+ <VarListEntry><Term>add</Term>
+ <ListItem><Para>
+ <literal/add/ adds the names package index files to the package cache.
+ </VarListEntry>
+
+ <VarListEntry><Term>gencaches</Term>
+ <ListItem><Para>
+ <literal/gencaches/ performs the same opration as
+ <command/apt-get check/. It builds the source and package caches from
+ the sources in &sources-list; and from <filename>/var/lib/dpkg/status</>.
+ </VarListEntry>
+
+ <VarListEntry><Term>showpkg</Term>
+ <ListItem><Para>
+ <literal/showpkg/ displays information about the packages listed on the
+ command line. Remaining arguments are package names. The available
+ versions and reverse dependencies of each package listed are listed, as
+ well as forward dependencies for each version. Forward (normal)
+ dependencies are those packages upon which the package in question
+ depends; reverse dependencies are those packages that depend upon the
+ package in question. Thus, forward dependencies must be satisfied for a
+ package, but reverse dependencies need not be.
+ For instance, <command>apt-cache showpkg libreadline2</> would produce
+ output similar to the following:
+
+<informalexample><programlisting>
+Package: libreadline2
+Versions: 2.1-12(/var/state/apt/lists/foo_Packages),
+Reverse Depends:
+ libreadlineg2,libreadline2
+ libreadline2-altdev,libreadline2
+Dependencies:
+2.1-12 - libc5 (2 5.4.0-0) ncurses3.0 (0 (null))
+Provides:
+2.1-12 -
+Reverse Provides:
+</programlisting></informalexample>
+
+ <para>
+ Thus it may be seen that libreadline2, version 2.1-8, depends on libc5,
+ ncurses3.0, and ldso, which must be installed for libreadline2 to work.
+ In turn, libreadlineg2 and libreadline2-altdev depend on libreadline2. If
+ libreadline2 is installed, libc5, ncurses3.0, and ldso must also be
+ installed; libreadlineg2 and libreadline2-altdev do not have to be
+ installed. For the specific meaning of the remainder of the output it
+ is best to consult the apt source code.
+ </VarListEntry>
+
+ <VarListEntry><Term>stats</Term>
+ <ListItem><Para>
+ <literal/stats/ displays some statistics about the cache.
+ No further arguments are expected. Statistics reported are:
+ <itemizedlist>
+ <listitem><para>
+ <literal/Total package names/ is the number of package names found
+ in the cache.
+ </listitem>
+
+ <listitem><para>
+ <literal/Normal packages/ is the number of regular, ordinary package
+ names; these are packages that bear a one-to-one correspondence between
+ their names and the names used by other packages for them in
+ dependencies. The majority of packages fall into this category.
+ </listitem>
+
+ <listitem><para>
+ <literal/Pure virtual packages/ is the number of packages that exist
+ only as a virtual package name; that is, packages only "provide" the
+ virtual package name, and no package actually uses the name. For
+ instance, "mail-transport-agent" in the Debian GNU/Linux system is a
+ pure virtual package; several packages provide "mail-transport-agent",
+ but there is no package named "mail-transport-agent".
+ </listitem>
+
+ <listitem><para>
+ <literal/Single virtual packages/ is the number of packages with only
+ one package providing a particular virtual package. For example, in the
+ Debian GNU/Linux system, "X11-text-viewer" is a virtual package, but
+ only one package, xless, provides "X11-text-viewer".
+ </listitem>
+
+ <listitem><para>
+ <literal/Mixed virtual packages/ is the number of packages that either
+ provide a particular virtual package or have the virtual package name
+ as the package name. For instance, in the Debian GNU/Linux system,
+ debconf is both an actual package, and provided by the debconf-tiny
+ package.
+ </listitem>
+
+ <listitem><para>
+ <literal/Missing/ is the number of package names that were referenced in
+ a dependency but were not provided by any package. Missing packages may
+ be in evidence if a full distribution is not accesssed, or if a package
+ (real or virtual) has been dropped from the distribution. Usually they
+ are referenced from Conflicts statements.
+ </listitem>
+
+ <listitem><para>
+ <literal/Total distinct/ versions is the number of package versions
+ found in the cache; this value is therefore at least equal to the
+ number of total package names. If more than one distribution (both
+ "stable" and "unstable", for instance), is being accessed, this value
+ can be considerably larger than the number of total package names.
+ </listitem>
+
+ <listitem><para>
+ <literal/Total dependencies/ is the number of dependency relationships
+ claimed by all of the packages in the cache.
+ </listitem>
+ </itemizedlist>
+ </VarListEntry>
+
+ <VarListEntry><Term>dump</Term>
+ <ListItem><Para>
+ <literal/dump/ shows a short listing of every package in the cache. It is
+ primarily for debugging.
+ </VarListEntry>
+
+ <VarListEntry><Term>dumpavail</Term>
+ <ListItem><Para>
+ <literal/dumpavail/ prints out an available list to stdout. This is
+ suitable for use with &dpkg; and is used by the &dselect; method.
+ </VarListEntry>
+
+ <VarListEntry><Term>unmet</Term>
+ <ListItem><Para>
+ <literal/unmet/ displays a summary of all unmet dependencies in the
+ package cache.
+ </VarListEntry>
+
+ <VarListEntry><Term>show</Term>
+ <ListItem><Para>
+ <literal/show/ performs a function similar to
+ <command>dpkg --print-avail</>, it displays the package records for the
+ named packages.
+ </VarListEntry>
+
+ <VarListEntry><Term>search</Term>
+ <ListItem><Para>
+ <literal/search/ performs a full text search on all available package
+ files for the regex pattern given. It searchs the package names and the
+ descriptions for an occurance of the string and prints out the package
+ name and the short description. If <option/--full/ is given then output
+ identical to <literal/show/ is produced for each matched package and
+ if <option/--names-only/ is given then the long description is not
+ searched, only the package name is.
+ <para>
+ Seperate arguments can be used to specified multiple search patterns that
+ are or'd together.
+ </VarListEntry>
+
+ <VarListEntry><Term>depends</Term>
+ <ListItem><Para>
+ <literal/depends/ shows a listing of each dependency a package has
+ and all the possible other packages that can fullfill that dependency.
+ </VarListEntry>
+
+ <VarListEntry><Term>pkgnames</Term>
+ <ListItem><Para>
+ This command prints the name of each package in the system. The optional
+ argument is a prefix match to filter the name list. The output is suitable
+ for use in a shell tab complete function and the output is generated
+ extremly quickly. This command is best used with the
+ <option/--generate/ option.
+ </VarListEntry>
+
+ <VarListEntry><Term>dotty</Term>
+ <ListItem><Para>
+ <literal/dotty/ takes a list of packages on the command line and
+ gernerates output suitable for use by dotty from the
+ <ulink url="http://www.research.att.com/sw/tools/graphviz/">GraphVis</>
+ package. The result will be a set of nodes and edges representing the
+ relationships between the packages. By default the given packages will
+ trace out all dependent packages which can produce a very large graph.
+ This can be turned off by setting the
+ <literal>APT::Cache::GivenOnly</> option.
+
+ <para>
+ The resulting nodes will have several shapse, normal packages are boxes,
+ pure provides are triangles, mixed provides are diamonds,
+ hexagons are missing packages. Orange boxes mean recursion was stopped
+ [leaf packages], blue lines are prre-depends, green lines are conflicts.
+
+ <para>
+ Caution, dotty cannot graph larger sets of packages.
+ </VarListEntry>
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>Options</>
+ &apt-cmdblurb;
+
+ <VariableList>
+ <VarListEntry><term><option/-p/</><term><option/--pkg-cache/</>
+ <ListItem><Para>
+ Select the file to store the package cache. The package cache is the
+ primary cache used by all operations.
+ Configuration Item: <literal/Dir::Cache::pkgcache/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-s/</><term><option/--src-cache/</>
+ <ListItem><Para>
+ Select the file to store the source cache. The source is used only by
+ <literal/gencaches/ and it stores a parsed version of the package
+ information from remote sources. When building the package cache the
+ source cache is used to advoid reparsing all of the package files.
+ Configuration Item: <literal/Dir::Cache::srcpkgcache/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-q/</><term><option/--quiet/</>
+ <ListItem><Para>
+ Quiet; produces output suitable for logging, omitting progress indicators.
+ More qs will produce more quite up to a maximum of 2. You can also use
+ <option/-q=#/ to set the quiet level, overriding the configuration file.
+ Configuration Item: <literal/quiet/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-i/</><term><option/--important/</>
+ <ListItem><Para>
+ Print only important deps; for use with unmet causes only Depends and
+ Pre-Depends relations to be printed.
+ Configuration Item: <literal/APT::Cache::Important/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-f/</><term><option/--full/</>
+ <ListItem><Para>
+ Print full package records when searching.
+ Configuration Item: <literal/APT::Cache::ShowFull/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-a/</><term><option/--all-versions/</>
+ <ListItem><Para>
+ Print full records for all available versions, this is only applicable to
+ the show command.
+ Configuration Item: <literal/APT::Cache::AllVersions/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-g/</><term><option/--generate/</>
+ <ListItem><Para>
+ Perform automatic package cache regeneration, rather than use the cache
+ as it is. This is the default, to turn it off use <option/--no-generate/.
+ Configuration Item: <literal/APT::Cache::Generate/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--names-only/</>
+ <ListItem><Para>
+ Only search on the package names, not the long description.
+ Configuration Item: <literal/APT::Cache::NamesOnly/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--all-names/</>
+ <ListItem><Para>
+ Make <literal/pkgnames/ print all names, including virtual packages
+ and missing dependencies.
+ Configuration Item: <literal/APT::Cache::AllNames/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--recurse/</>
+ <ListItem><Para>
+ Make <literal/depends/ recursive so that all packages mentioned are
+ printed once.
+ Configuration Item: <literal/APT::Cache::RecruseDepends/.
+ </VarListEntry>
+
+ &apt-commonoptions;
+
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>Files</>
+ <variablelist>
+ <VarListEntry><term><filename>/etc/apt/sources.list</></term>
+ <ListItem><Para>
+ locations to fetch packages from.
+ Configuration Item: <literal/Dir::Etc::SourceList/.
+ </VarListEntry>
+
+ <VarListEntry><term><filename>&statedir;/lists/</></term>
+ <ListItem><Para>
+ storage area for state information for each package resource specified in
+ &sources-list;
+ Configuration Item: <literal/Dir::State::Lists/.
+ </VarListEntry>
+
+ <VarListEntry><term><filename>&statedir;/lists/partial/</></term>
+ <ListItem><Para>
+ storage area for state information in transit.
+ Configuration Item: <literal/Dir::State::Lists/ (implicit partial).
+ </VarListEntry>
+ </variablelist>
+ </RefSect1>
+
+ <RefSect1><Title>See Also</>
+ <para>
+ &apt-conf;, &sources-list;, &apt-get;
+ </RefSect1>
+
+ <RefSect1><Title>Diagnostics</>
+ <para>
+ <command/apt-cache/ returns zero on normal operation, decimal 100 on error.
+ </RefSect1>
+
+ &manbugs;
+ &manauthor;
+
+</refentry>
diff --git a/doc/apt-cache.8.yo b/doc/apt-cache.8.yo
deleted file mode 100644
index 5ce5c48a9..000000000
--- a/doc/apt-cache.8.yo
+++ /dev/null
@@ -1,280 +0,0 @@
-mailto(apt@packages.debian.org)
-manpage(apt-cache)(8)(4 Dec 1998)(apt)()
-manpagename(apt-cache)(APT package handling utility -- cache manipulator)
-
-manpagesynopsis()
-apt-cache command [argument ...]
-
-manpagedescription()
-bf(apt-cache) performs a variety of operations on APT's package cache.
-bf(apt-cache) is seldom called directly; instead its operations are
-performed automatically by the other bf(apt) utilities.
-
-em(command) is one of:
-itemize(
- it() add file1 [file2] [...]
- it() gencaches
- it() showpkg package1 [package2] [...]
- it() stats
- it() dump
- it() dumpavail
- it() unmet
- it() check
- it() search
- it() show
- it() showpkg
- it() depends
- it() pkgnames
- it() dotty
-)
-
-Unless the -h, or --help option is given one of the above commands
-must be present.
-
-startdit()
-dit(bf(add))
-bf(add) adds the names package index files to the package cache.
-
-dit(bf(gencaches))
-bf(gencaches) performs the same opration as bf(apt-get check). It builds
-the source and package caches from thes sources in bf(/etc/apt/sources.list)
-and from bf(/var/lib/dpkg/status).
-
-dit(bf(showpkg))
-bf(showpkg) displays information about the packages listed on the
-command line. Remaining arguments are package names. The available versions
-and reverse dependencies of each package listed are listed, as well as
-forward dependencies for each version. Forward (normal) dependencies
-are those packages upon which the package in question depends; reverse
-dependencies are those packages that depend upon the package in
-question. Thus, forward dependencies must be satisfied for a package,
-but reverse dependencies need not be.
-For instance, bf(apt-cache showpkg libreadline2) would produce output similar
-to the following:
-
-verb(
-Package: libreadline2
-
-Versions:
-
-2.1-12(/var/state/apt/lists/debian.midco.net_debian_dists_slink_main_binary-i386_Packages),
-
-Reverse Depends:
-
- libreadlineg2,libreadline2
-
- libreadline2-altdev,libreadline2
-Dependencies:
-
-2.1-12 - libc5 (2 5.4.0-0) ncurses3.0 (0 (null)) ldso (2 1.9.0-1)
-
-Provides:
-
-2.1-12 -
-
-Reverse Provides:
-)
-
-Thus it may be seen that libreadline2, version 2.1-8, depends on libc5,
-ncurses3.0, and ldso, which must be installed for libreadline2 to work. In
-turn, libreadlineg2 and libreadline2-altdev depend on libreadline2. If
-libreadline2 is installed, libc5, ncurses3.0, and ldso must also be
-installed; libreadlineg2 and libreadline2-altdev do not have to be
-installed. For the specific meaning of the remainder of the output it
-is best to consult the apt source code.
-
-dit(bf(stats))
-bf(stats) displays some statistics about bf(cache).
-No further arguments are expected. Statistics reported are:
-itemize(
- it() bf(Total package names) is the number of package names found in the cache.
-
- it() bf(Normal packages) is the number of regular, ordinary package names; these
- are packages that bear a one-to-one correspondence between their names and
- the names used by other packages for them in dependencies. The majority of
- packages fall into this category.
-
- it() bf(Pure virtual packages) is the number of packages that exist only as
- a virtual package name; that is, packages only "provide" the virtual
- package name, and no package actually uses the name. For instance,
- "mail-transport-agent" in the Debian GNU/Linux system is a pure virtual
- package; several packages provide "mail-transport-agent", but there is no
- package named "mail-transport-agent".
-
- it() bf(Single virtual packages) is the number of packages with only one
- package providing a particular virtual package. For example, in the
- Debian GNU/Linux system, "X11-text-viewer" is a virtual package, but only
- one package, xless, provides "X11-text-viewer".
-
- it() bf(Mixed virtual packages) is the number of packages that either provide
- a particular virtual package or have the virtual package name as the
- package name. For instance, in the Debian GNU/Linux system, e2fsprogs is
- both an actual package, and provided by the e2compr package.
-
- it() bf(Missing) is the number of package names that were referenced in a
- dependency but were not provided by any package. Missing packages may be
- in evidence if a full distribution is not accesssed, or if a package
- (real or virtual) has been dropped from the distribution.
-
- it() bf(Total distinct) versions is the number of package versions found in
- the cache; this value is therefore at least equal to the number of total
- package names. If more than one distribution (both "stable" and "unstable",
- for instance), is being accessed, this value can be considerably larger
- than the number of total package names.
-
- it() bf(Total dependencies) is the number of dependency relationships claimed
- by all of the packages in the cache.
-)
-
-dit(bf(dump))
-bf(dump) shows a short listing of every package in the cache. It is primarily
-for debugging.
-
-dit(bf(dumpavail))
-bf(dumpavail) prints out an available list to stdout. This is suitable for use
-with bf(dpkg) and is used by the bf(dselect) method.
-
-dit(bf(unmet))
-bf(unmet) displays a summary of all unmet dependencies in the package cache.
-
-dit(bf(check))
-bf(check) is a random function for testing certain aspects of the cache.
-Do not use it.
-
-dit(bf(showpkg))
-bf(showpkg) displays a listing of the given package cache structure and some
-related information about it. The list is meant primarily for debugging.
-
-dit(bf(show))
-bf(show) performs a function similar to dpkg --print-avail, it displays
-the package records for the named packages.
-
-dit(bf(search))
-bf(search) performs a full text search on all available package files for
-the pattern given. It searchs the package names and the descriptions for
-an occurance of the string and prints out the package name and the short
-description. If --full is given then output identical to bf(show) is produced
-for each matched package and if --names-only is given then the long
-description is not searched, only the package name is.
-
-dit(bf(depends))
-bf(depends) shows a listing of each dependency a package has and all
-the possible other packages that can fullfill that dependency.
-
-dit(bf(pkgnames))
-This command prints the name of each package in the system. The optional
-argument is a prefix match to filter the name list. The output is suitable
-for use in a shell tab complete function and the output is generated extremly
-quickly. This command is best used with the bf(--no-generate) option.
-
-dit(bf(dotty))
-bf(dotty) Takes a list of packages on the command line and gernerates output
-suitable for use by dotty from the GraphVis
-(http://www.research.att.com/sw/tools/graphviz/) package. The result will be
-a set of nodes and edges representing the relationships between the
-packages. By default the given packages will trace out all dependent packages
-which can produce a very large graph. This can be turned off by setting the
-APT::Cache::GivenOnly option.
-
-The resulting nodes will have several shapse, normal packages are boxes,
-pure provides are triangles, mixed provides are diamonds,
-hexagons are missing packages. Orange boxes mean recursion was stopped
-[leaf packages], blue lines are prre-depends, green lines are conflicts.
-
-Caution, dotty cannot graph larger sets of packages.
-
-enddit()
-
-manpageoptions()
-All command line options may be set using the configuration file, the
-descriptions indicate the configuration option to set. For boolean
-options you can override the config file by using something like bf(-f-),
-bf(--no-f), bf(-f=no) or several other variations.
-
-startdit()
-dit(bf(-h, --help))
-Show a short usage summary.
-
-dit(bf(-v, --version))
-Show the program verison.
-
-dit(bf(-p --pkg-cache))
-Select the file to store the package cache. The package cache is the primary
-cache used by all operations.
-Configuration Item: bf(Dir::Cache::pkgcache).
-
-dit(bf(-s --src-cache))
-Select the file to store the source cache. The source is used only by
-bf(gencaches) and it stores a parsed version of the package information from
-remote sources. When building the package cache the source cache is used
-to advoid reparsing all of the package files.
-Configuration Item: bf(Dir::Cache::srcpkgcache).
-
-dit(bf(-q, --quiet))
-Quiet; produces output suitable for logging, omitting progress indicators.
-More qs will produce more quite up to a maximum of 2. You can also use
-bf(-q=#) to set the quiet level, overriding the configuration file.
-Configuration Item: bf(quiet).
-
-dit(bf(-i --important))
-Print only important deps; for use with unmet causes only em(Depends) and
-em(Pre-Depends) relations to be printed.
-Configuration Item: bf(APT::Cache::Important).
-
-dit(bf(-f --full))
-Print full package records when searching. Configuration Item: bf(APT::Cache::ShowFull).
-
-dit(bf(-a --all-versions))
-Print full records for all available versions, this is only applicable to the
-show command. Configuration Item: bf(APT::Cache::AllVersions)
-
-dit(bf(-g --no-generate))
-Do not perform automatic package cache regeneration, use the cache as it is.
-Configuration Item: bf(APT::Cache::NoGenerate).
-
-dit(bf(--names-only))
-Only search on the package names, not the long description.
-Configuration Item: bf(APT::Cache::NamesOnly).
-
-dit(bf(--all-names))
-Make bf(pkgnames) print all names, including virtual packages and missing
-dependencies. Configuration Item: bf(APT::Cache::AllNames).
-
-dit(bf(-c, --config-file))
-Configuration File; Specify a configuration file to use. bf(apt-get) will
-read the default configuration file and then this configuration file. See
-bf(apt.conf(5)) for syntax information.
-
-dit(bf(-o, --option))
-Set a Configuration Option; This will set an arbitary configuration option.
-The syntax is
-verb(-o Foo::Bar=bar)
-enddit()
-
-manpagefiles()
-itemize(
- it() /etc/apt/sources.list
- locations to fetch packages from
-
- it() /var/state/apt/lists/
- storage area for state information for each package resource specified in
-
- it() /var/state/apt/lists/partial/
- storage area for state information in transit
-)
-
-manpageseealso()
-apt-get(8),
-sources.list(5),
-apt.conf(5)
-
-manpagediagnostics()
-apt-cache returns zero on normal operation, decimal 100 on error.
-
-manpagebugs()
-See http://bugs.debian.org/apt. If you wish to report a
-bug in bf(apt-cache), please see bf(/usr/doc/debian/bug-reporting.txt)
-or the bf(bug(1)) command.
-
-manpageauthor()
-apt-get was written by the APT team <apt@packages.debian.org>.
diff --git a/doc/apt-cdrom.8.sgml b/doc/apt-cdrom.8.sgml
new file mode 100644
index 000000000..414be4c09
--- /dev/null
+++ b/doc/apt-cdrom.8.sgml
@@ -0,0 +1,146 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V3.1//EN" [
+
+<!ENTITY % aptent SYSTEM "apt.ent">
+%aptent;
+
+]>
+
+<refentry>
+ &apt-docinfo;
+
+ <refmeta>
+ <refentrytitle>apt-cdrom</>
+ <manvolnum>8</>
+ </refmeta>
+
+ <!-- Man page title -->
+ <refnamediv>
+ <refname>apt-cdrom</>
+ <refpurpose>APT CDROM managment utility</>
+ </refnamediv>
+
+ <!-- Arguments -->
+ <refsynopsisdiv>
+ <cmdsynopsis>
+ <command>apt-cdrom</>
+ <arg><option>-hvrmfan</></arg>
+ <arg><option>-d=<replaceable/cdrom mount point/</></arg>
+ <arg><option>-o=<replaceable/config string/</></arg>
+ <arg><option>-c=<replaceable/file/</></arg>
+ <group choice=req>
+ <arg>add</>
+ <arg>ident</>
+ </group>
+ </cmdsynopsis>
+ </refsynopsisdiv>
+
+ <RefSect1><Title>Description</>
+ <para>
+ <command/apt-cdrom/ is used to add a new CDROM to APTs list of available
+ sources. <command/apt-cdrom/ takes care of determining the structure of
+ the disc as well as correcting for several possible mis-burns and
+ verifying the index files.
+ <para>
+ It is necessary to use <command/apt-cdrom/ to add CDs to the APT system,
+ it cannot be done by hand. Furthermore each disk in a multi-cd set must be
+ inserted and scanned separately to account for possible mis-burns.
+ <para>
+ Unless the <option/-h/, or <option/--help/ option is given one of the
+ above commands must be present.
+
+ <VariableList>
+ <VarListEntry><Term>add</Term>
+ <ListItem><Para>
+ <literal/add/ is used to add a new disc to the source list. It will unmount the
+ CDROM device, prompt for a disk to be inserted and then procceed to
+ scan it and copy the index files. If the disc does not have a proper
+ <filename>.disk/</> directory you will be prompted for a descriptive
+ title.
+
+ <para>
+ APT uses a CDROM ID to track which disc is currently in the drive and
+ maintains a database of these IDs in
+ <filename>&statedir;/cdroms.list</>
+ </VarListEntry>
+
+ <VarListEntry><Term>ident</Term>
+ <ListItem><Para>
+ A debugging tool to report the identity of the current disc as well
+ as the stored file name
+ </VarListEntry>
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>Options</>
+ &apt-cmdblurb;
+
+ <VariableList>
+ <VarListEntry><term><option/-d/</><term><option/--cdrom/</>
+ <ListItem><Para>
+ Mount point; specify the location to mount the cdrom. This mount
+ point must be listed in <filename>/etc/fstab</> and propely configured.
+ Configuration Item: <literal/Acquire::cdrom::mount/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-r/</><term><option/--rename/</>
+ <ListItem><Para>
+ Rename a disc; change the label of a disk or override the disks
+ given label. This option will cause <command/apt-cdrom/ to prompt for
+ a new label.
+ Configuration Item: <literal/APT::CDROM::Rename/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-m/</><term><option/--no-mount/</>
+ <ListItem><Para>
+ No mounting; prevent <command/apt-cdrom/ from mounting and unmounting
+ the mount point.
+ Configuration Item: <literal/APT::CDROM::NoMount/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-f/</><term><option/--fast/</>
+ <ListItem><Para>
+ Fast Copy; Assume the package files are valid and do not check
+ every package. This option should be used only if
+ <command/apt-cdrom/ has been run on this disc before and did not detect
+ any errors.
+ Configuration Item: <literal/APT::CDROM::Fast/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-a/</><term><option/--thorough/</>
+ <ListItem><Para>
+ Thorough Package Scan; This option may be needed with some old
+ Debian 1.1/1.2 discs that have Package files in strange places. It
+ takes much longer to scan the CD but will pick them all up.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-n/</>
+ <term><option/--just-print/</>
+ <term><option/--recon/</>
+ <term><option/--no-act/</>
+ <ListItem><Para>
+ No Changes; Do not change the &sources-list; file and do not
+ write index files. Everything is still checked however.
+ Configuration Item: <literal/APT::CDROM::NoAct/.
+ </VarListEntry>
+
+ &apt-commonoptions;
+
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>See Also</>
+ <para>
+ &apt-conf;, &apt-get;, &sources-list;
+ </RefSect1>
+
+ <RefSect1><Title>Diagnostics</>
+ <para>
+ <command/apt-cdrom/ returns zero on normal operation, decimal 100 on error.
+ </RefSect1>
+
+ &manbugs;
+ &manauthor;
+
+</refentry>
+
diff --git a/doc/apt-cdrom.8.yo b/doc/apt-cdrom.8.yo
deleted file mode 100644
index 9d5b598a2..000000000
--- a/doc/apt-cdrom.8.yo
+++ /dev/null
@@ -1,120 +0,0 @@
-mailto(apt@packages.debian.org)
-manpage(apt-cdrom)(8)(4 Dec 1998)(apt)()
-manpagename(apt-cdrom)(APT CDROM managment utility)
-
-manpagesynopsis()
-apt-cdrom command
-
-manpagedescription()
-bf(apt-cdrom) is used to add a new CDROM to APTs list of available sources.
-bf(apt-cdrom) takes care of determining the structure of the disc as well
-as correcting for several possible mis-burns and verifying the index files.
-It is necessary to use bf(apt-cdrom) to add CDs to the APT system, it cannot
-be done by hand. Furthermore each disk in a multi-cd set must be inserted
-and scanned seperately to account for possible mis-burns.
-
-em(command) is one of:
-itemize(
- it() add
-)
-
-Unless the -h, or --help option is given one of the above commands
-must be present.
-
-startdit()
-dit(bf(add))
-bf(add) is used to add a new disc to the source list. It will unmount the
-CDROM device, prompt for a disk to be inserted and then procceed to scan it
-and copy the index files. If the disc does not have a proper bf(.disk/)
-directory you will be prompted for a descriptive title.
-
-APT uses a CDROM ID to track which disc is currently in the drive and
-maintains a database of these IDs in bf(/var/state/apt/cdroms.list)
-
-enddit()
-
-manpageoptions()
-All command line options may be set using the configuration file, the
-descriptions indicate the configuration option to set. For boolean
-options you can override the config file by using something like bf(-f-),
-bf(--no-f), bf(-f=no) or several other variations.
-
-startdit()
-dit(bf(-h, --help))
-Show a short usage summary.
-
-dit(bf(-v, --version))
-Show the program verison.
-
-dit(bf(-d --cdrom))
-Mount point; specify the location to mount the cdrom. This mount point must
-be listed in bf(/etc/fstab) and propely configured.
-Configuration Item: bf(Acquire::cdrom::mount).
-
-dit(bf(-r --rename))
-Rename a disc; change the label of a disk or override the disks given label.
-This option will cause bf(apt-cdrom) to prompt for a new label
-Configuration Item: bf(APT::CDROM::Rename).
-
-dit(bf(-m, --no-mount))
-No mounting; prevent bf(apt-cdrom) from mounting and unmounting the mount
-point.
-Configuration Item: bf(APT::CDROM::NoMount).
-
-dit(bf(-f, --fast))
-Fast Copy; Assume the package files are valid and do not check every package.
-This option should be used only if bf(apt-cdrom) has been run on this disc
-before and did not detect any errors.
-Configuration Item: bf(APT::CDROM::Fast).
-
-dit(bf(-a, --thorough))
-Thorough Package Scan; This option may be needed with some old Debian 1.1/1.2
-burns that have Package files in strange places. It takes much longer to
-scan the CD but will pick them all up.
-
-dit(bf(-n --just-print, --recon, --no-act))
-No Changes; Do not change the sources.list and do not write package files.
-Everything is still checked however.
-Configuration Item: bf(APT::CDROM::NoAct).
-
-dit(bf(-c, --config-file))
-Configuration File; Specify a configuration file to use. bf(apt-get) will
-read the default configuration file and then this configuration file. See
-bf(apt.conf(5)) for syntax information.
-
-dit(bf(-o, --option))
-Set a Configuration Option; This will set an arbitary configuration option.
-The syntax is
-verb(-o Foo::Bar=bar)
-enddit()
-
-manpagefiles()
-itemize(
- it() /etc/apt/sources.list
- locations to fetch packages from
-
- it() /var/state/apt/lists/
- storage area for state information for each package resource specified in
-
- it() /var/state/apt/lists/partial/
- storage area for state information in transit
-
- it() /var/state/apt/cdroms.list
- list of cdrom IDs and names.
-)
-
-manpageseealso()
-apt-get(8),
-sources.list(5),
-apt.conf(5)
-
-manpagediagnostics()
-apt-cdrom returns zero on normal operation, decimal 100 on error.
-
-manpagebugs()
-See http://bugs.debian.org/apt. If you wish to report a
-bug in bf(apt-cache), please see bf(/usr/doc/debian/bug-reporting.txt)
-or the bf(bug(1)) command.
-
-manpageauthor()
-apt-get was written by the APT team <apt@packages.debian.org>.
diff --git a/doc/apt-config.8.sgml b/doc/apt-config.8.sgml
new file mode 100644
index 000000000..0f5324e0a
--- /dev/null
+++ b/doc/apt-config.8.sgml
@@ -0,0 +1,105 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V3.1//EN" [
+
+<!ENTITY % aptent SYSTEM "apt.ent">
+%aptent;
+
+]>
+
+<refentry>
+ &apt-docinfo;
+
+ <refmeta>
+ <refentrytitle>apt-config</>
+ <manvolnum>8</>
+ </refmeta>
+
+ <!-- Man page title -->
+ <refnamediv>
+ <refname>apt-config</>
+ <refpurpose>APT Configuration Query program</>
+ </refnamediv>
+
+ <!-- Arguments -->
+ <refsynopsisdiv>
+ <cmdsynopsis>
+ <command>apt-config</>
+ <arg><option>-hv</></arg>
+ <arg><option>-o=<replaceable/config string/</></arg>
+ <arg><option>-c=<replaceable/file/</></arg>
+ <group choice=req>
+ <arg>shell</>
+ <arg>dump</>
+ </group>
+ </cmdsynopsis>
+ </refsynopsisdiv>
+
+ <RefSect1><Title>Description</>
+ <para>
+ <command/apt-config/ is an internal program used by various portions of
+ the APT suite to provide consistent configurability. It accesses the main
+ configuarion file <filename>/etc/apt/apt.conf</> in a manner that is
+ easy to use by scripted applications.
+ <para>
+ Unless the <option/-h/, or <option/--help/ option is given one of the above commands
+ must be present.
+ </para>
+
+ <VariableList>
+ <VarListEntry><Term>shell</Term>
+ <ListItem><Para>
+ shell is used to access the configuration information from a shell
+ script. It is given pairs of arguments, the first being a shell
+ variable and the second the configuration value to query. As output
+ it lists a series of shell assignments commands for each present value.
+ In a shell script it should be used like:
+ </para>
+
+<informalexample><programlisting>
+OPTS="-f"
+RES=`apt-config shell OPTS MyApp::Options`
+eval $RES
+</programlisting></informalexample>
+
+ <para>
+ This will set the shell environment variable $OPTS to the value of
+ MyApp::Options with a default of <option/-f/.
+
+ <para>
+ The configuration item may be postfixed with a /[fdbi]. f returns file
+ names, d returns directories, b returns true or false and i returns an
+ integer. Each of the returns is normalized and verified internally.
+ </VarListEntry>
+
+ <VarListEntry><Term>dump</Term>
+ <ListItem><Para>
+ Just show the contents of the configuration space.
+ </VarListEntry>
+
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>Options</>
+ &apt-cmdblurb;
+
+ <VariableList>
+
+ &apt-commonoptions;
+
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>See Also</>
+ <para>
+ &apt-conf;
+ </RefSect1>
+
+ <RefSect1><Title>Diagnostics</>
+ <para>
+ <command/apt-config/ returns zero on normal operation, decimal 100 on error.
+ </RefSect1>
+
+ &manbugs;
+ &manauthor;
+
+</refentry>
diff --git a/doc/apt-config.8.yo b/doc/apt-config.8.yo
deleted file mode 100644
index 809c790c2..000000000
--- a/doc/apt-config.8.yo
+++ /dev/null
@@ -1,86 +0,0 @@
-mailto(apt@packages.debian.org)
-manpage(apt-config)(8)(14 Feb 1999)(apt)()
-manpagename(apt-config)(APT Configuration Query program)
-
-manpagesynopsis()
-apt-config command
-
-manpagedescription()
-bf(apt-config) is an internal program used by various portions of the APT
-suite to provide consistent configurability. It accesses the main configuarion
-file /etc/apt/apt.conf in a manner that is easy to use by scripted
-applications.
-
-em(command) is one of:
-itemize(
- it() shell
- it() dump
-)
-
-Unless the -h, or --help option is given one of the above commands
-must be present.
-
-startdit()
-dit(bf(shell))
-bf(shell) is used to access the configuration information from a shell script.
-It is given pairs of arguments, the first being a shell variable and the
-second the configuration value to query. As output it lists a series of shell
-assignments commands for each present value. In a shell script it should be
-used like:
-
-verb(
-OPTS="-f"
-
-RES=`apt-config shell OPTS MyApp::Options`
-
-eval $RES
-)
-
-This will set the shell environment variable $OPTS to the value of
-MyApp::Options with a default of -f.
-
-If the configuration item to retrieve is prefixed with a / then it will
-be retrieved using filename mode which prepends base paths.
-
-dit(bf(dump))
-Just show the contents of the configuration space.
-
-enddit()
-
-manpageoptions()
-All command line options may be set using the configuration file, the
-descriptions indicate the configuration option to set. For boolean
-options you can override the config file by using something like bf(-f-),
-bf(--no-f), bf(-f=no) or several other variations.
-
-startdit()
-dit(bf(-h, --help))
-Show a short usage summary.
-
-dit(bf(-v, --version))
-Show the program verison.
-
-dit(bf(-c, --config-file))
-Configuration File; Specify a configuration file to use. bf(apt-get) will
-read the default configuration file and then this configuration file. See
-bf(apt.conf(5)) for syntax information.
-
-dit(bf(-o, --option))
-Set a Configuration Option; This will set an arbitary configuration option.
-The syntax is
-verb(-o Foo::Bar=bar)
-enddit()
-
-manpageseealso()
-apt.conf(5)
-
-manpagediagnostics()
-apt-config returns zero on normal operation, decimal 100 on error.
-
-manpagebugs()
-See http://bugs.debian.org/apt. If you wish to report a
-bug in bf(apt-config), please see bf(/usr/doc/debian/bug-reporting.txt)
-or the bf(bug(1)) command.
-
-manpageauthor()
-apt-get was written by the APT team <apt@packages.debian.org>.
diff --git a/doc/apt-ftparchive.1.sgml b/doc/apt-ftparchive.1.sgml
new file mode 100644
index 000000000..f05765650
--- /dev/null
+++ b/doc/apt-ftparchive.1.sgml
@@ -0,0 +1,507 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V3.1//EN" [
+
+<!ENTITY % aptent SYSTEM "apt.ent">
+%aptent;
+
+]>
+
+<refentry>
+ &apt-docinfo;
+
+ <refmeta>
+ <refentrytitle>apt-ftparchive</>
+ <manvolnum>1</>
+ </refmeta>
+
+ <!-- Man page title -->
+ <refnamediv>
+ <refname>apt-ftparchive</>
+ <refpurpose>Utility to generate index files</>
+ </refnamediv>
+
+ <!-- Arguments -->
+ <refsynopsisdiv>
+ <cmdsynopsis>
+ <command>apt-ftparchive</>
+ <arg><option>-hvdsq</></arg>
+ <arg><option>--md5</></arg>
+ <arg><option>--delink</></arg>
+ <arg><option>--readonly</></arg>
+ <arg><option>--contents</></arg>
+ <arg><option>-o=<replaceable/config string/</></arg>
+ <arg><option>-c=<replaceable/file/</></arg>
+ <group choice=req>
+ <arg>packages<arg choice="plain" rep="repeat"><replaceable>path</replaceable></arg><arg><replaceable>override</replaceable><arg><replaceable>pathprefix</replaceable></arg></arg></arg>
+ <arg>sources<arg choice="plain" rep="repeat"><replaceable>path</replaceable></arg><arg><replaceable>override</replaceable><arg><replaceable>pathprefix</replaceable></arg></arg></arg>
+ <arg>contents <arg choice="plain"><replaceable>path</replaceable></arg></arg>
+ <arg>generate <arg choice="plain"><replaceable>config-file</replaceable></arg> <arg choice="plain" rep="repeat"><replaceable>section</replaceable></arg></arg>
+ <arg>clean <arg choice="plain"><replaceable>config-file</replaceable></arg></arg>
+ </group>
+ </cmdsynopsis>
+ </refsynopsisdiv>
+
+ <RefSect1><Title>Description</>
+ <para>
+ <command/apt-ftparchive/ is the command line tool that generates the index
+ files that APT uses to access a distribution source. The index files should
+ be generated on the origin site based on the content of that site.
+
+ <para>
+ <command/apt-ftparchive/ is a superset of the &dpkg-scanpackages; program,
+ incorporating it's entire functionality via the <literal/directory/ command.
+ It also contains a contents file generator, <literal/contents/, and an
+ elaborate means to 'script' the generation process for a complete
+ archive.
+
+ <para>
+ Internally <command/apt-ftparchive/ can make use of binary databases to
+ cache the contents of a .deb file and it does not rely on any external
+ programs aside from &gzip;. When doing a full generate it automatically
+ performs file-change checks and builds the desired compressed output files.
+
+ <para>
+ Unless the <option/-h/, or <option/--help/ option is given one of the above
+ commands must be present.
+
+ <VariableList>
+ <VarListEntry><term>packages</term>
+ <ListItem><Para>
+ The packages command generates a package file from a directory tree. It
+ takes the given directory and recursively searches it for .deb files,
+ emitting a package record to stdout for each. This command is
+ approximately equivalent to &dpkg-scanpackages;.
+ <para>
+ The option <option/--db/ can be used to specify a binary caching DB.
+ </VarListEntry>
+
+ <VarListEntry><term>sources</term>
+ <ListItem><Para>
+ The <literal/sources/ command generates a source index file from a directory tree.
+ It takes the given directory and recursively searches it for .dsc files,
+ emitting a source record to stdout for each. This command is approximately
+ equivalent to &dpkg-scansources;.
+ <para>
+ If an override file is specified then a source override file will be
+ looked for with an extension of .src. The --source-override option can be
+ used to change the source override file that will be used.
+ </VarListEntry>
+
+ <VarListEntry><term>contents</term>
+ <ListItem><Para>
+ The <literal/contents/ command generates a contents file from a directory tree. It
+ takes the given directory and recursively searches it for .deb files,
+ and reads the file list from each file. It then sorts and writes to stdout
+ the list of files matched to packages. Directories are not written to
+ the output. If multiple packages own the same file then each package is
+ separated by a comma in the output.
+ <para>
+ The option <option/--db/ can be used to specify a binary caching DB.
+ </VarListEntry>
+
+ <VarListEntry><term>generate</term>
+ <ListItem><Para>
+ The <literal/generate/ command is designed to be runnable from a cron script and
+ builds indexes according to the given config file. The config language
+ provides a flexible means of specifying which index files are built from
+ which directories, as well as providing a simple means of maintaining the
+ required settings.
+ </VarListEntry>
+
+ <VarListEntry><term>clean</term>
+ <ListItem><Para>
+ The <literal/clean/ command tidies the databases used by the given
+ configuration file by removing any records that are no longer necessary.
+ </VarListEntry>
+ </VariableList>
+
+ </RefSect1>
+
+ <RefSect1><Title>The Generate Configuration</>
+ <para>
+ The <literal/generate/ command uses a configuration file to describe the
+ archives that are going to be generated. It follows the typical ISC
+ configuration format as seen in ISC tools like bind 8 and dhcpd.
+ &apt-conf; contains a decsription of the syntax. Note that the generate
+ configuration is parsed in sectional manner, but &apt-conf; is parsed in a
+ tree manner. This only effects how the scope tag is handled.
+
+ <para>
+ The generate configuration has 4 separate sections, each decribed below.
+
+ <refsect2><title>Dir Section</>
+ <Para>
+ The <literal/Dir/ section defines the standard directories needed to
+ locate the files required during the generation process. These
+ directories are prepended to certain relative paths defined in later
+ sections to produce a complete an absolute path.
+ <VariableList>
+ <VarListEntry><term>ArchiveDir</term>
+ <ListItem><Para>
+ Specifies the root of the FTP archive, in a standard
+ Debian configuration this is the directory that contains the
+ <filename/ls-LR/, and dist nodes.
+ </VarListEntry>
+
+ <VarListEntry><term>OverrideDir</term>
+ <ListItem><Para>
+ Specifies the location of the override files.
+ </VarListEntry>
+
+ <VarListEntry><term>CacheDir</term>
+ <ListItem><Para>
+ Specifies the location of the cache files
+ </VarListEntry>
+
+ <VarListEntry><term>FileListDir</term>
+ <ListItem><Para>
+ Specifies the location of the file list files,
+ if the <literal/FileList/ setting is used below.
+ </VarListEntry>
+ </VariableList>
+ </refsect2>
+
+ <refsect2><title>Default Section</>
+ <para>
+ The <literal/Default/ section specifies default values, and settings
+ that control the operation of the generator. Other sections may override
+ these defaults with a per-section setting.
+ <VariableList>
+ <VarListEntry><term>Packages::Compress</term>
+ <ListItem><Para>
+ Sets the default compression schemes to use
+ for the Package index files. It is a string that contains a space
+ separated list of at least one of: '.' (no compression), 'gzip' and
+ 'bzip2'. The default for all compression schemes is '. gzip'.
+ </VarListEntry>
+
+ <VarListEntry><term>Packages::Extensions</term>
+ <ListItem><Para>
+ Sets the default list of file extensions that are package files.
+ This defaults to '.deb'.
+ </VarListEntry>
+
+ <VarListEntry><term>Sources::Compress</term>
+ <ListItem><Para>
+ This is similar to <literal/Packages::Compress/
+ except that it controls the compression for the Sources files.
+ </VarListEntry>
+
+ <VarListEntry><term>Sources::Extensions</term>
+ <ListItem><Para>
+ Sets the default list of file extensions that are source files.
+ This defaults to '.dsc'.
+ </VarListEntry>
+
+ <VarListEntry><term>Contents::Compress</term>
+ <ListItem><Para>
+ This is similar to <literal/Packages::Compress/
+ except that it controls the compression for the Contents files.
+ </VarListEntry>
+
+ <VarListEntry><term>DeLinkLimit</term>
+ <ListItem><Para>
+ Specifies the number of kilobytes to delink (and
+ replace with hard links) per run. This is used in conjunction with the
+ per-section <literal/External-Links/ setting.
+ </VarListEntry>
+
+ <VarListEntry><term>FileMode</term>
+ <ListItem><Para>
+ Specifies the mode of all created index files. It
+ defaults to 0644. All index files are set to this mode with no regard
+ to the umask.
+ </VarListEntry>
+ </VariableList>
+ </refsect2>
+
+ <refsect2><title>TreeDefault Section</>
+ <para>
+ Sets defaults specific to <literal/Tree/ sections. All of these
+ variables are substitution variables and have the strings $(DIST),
+ $(SECTION) and $(ARCH) replaced with their respective values.
+
+ <VariableList>
+ <VarListEntry><term>MaxContentsChange</term>
+ <ListItem><Para>
+ Sets the number of kilobytes of contents
+ files that are generated each day. The contents files are round-robined
+ so that over several days they will all be rebuilt.
+ </VarListEntry>
+
+ <VarListEntry><term>ContentsAge</term>
+ <ListItem><Para>
+ Controls the number of days a contents file is allowed
+ to be checked without changing. If this limit is passed the mtime of the
+ contents file is updated. This case can occur if the package file is
+ changed in such a way that does not result in a new contents file
+ [overried edit for instance]. A hold off is allowed in hopes that new
+ .debs will be installed, requiring a new file anyhow. The default is 10,
+ the units are in days.
+ </VarListEntry>
+
+ <VarListEntry><term>Directory</term>
+ <ListItem><Para>
+ Sets the top of the .deb directory tree. Defaults to
+ <filename>$(DIST)/$(SECTION)/binary-$(ARCH)/</>
+ </VarListEntry>
+
+ <VarListEntry><term>Packages</term>
+ <ListItem><Para>
+ Sets the output Packages file. Defaults to
+ <filename>$(DIST)/$(SECTION)/binary-$(ARCH)/Packages</>
+ </VarListEntry>
+
+ <VarListEntry><term>Sources</term>
+ <ListItem><Para>
+ Sets the output Packages file. Defaults to
+ <filename>$(DIST)/$(SECTION)/source/Sources</>
+ </VarListEntry>
+
+ <VarListEntry><term>InternalPrefix</term>
+ <ListItem><Para>
+ Sets the path prefix that causes a symlink to be
+ considered an internal link instead of an external link. Defaults to
+ <filename>$(DIST)/$(SECTION)/</>
+ </VarListEntry>
+
+ <VarListEntry><term>Contents</term>
+ <ListItem><Para>
+ Sets the output Contents file. Defaults to
+ <filename>$(DIST)/Contents-$(ARCH)</>. If this setting causes multiple
+ Packages files to map onto a single Contents file (such as the default)
+ then <command/apt-ftparchive/ will integrate those package files
+ together automatically.
+ </VarListEntry>
+
+ <VarListEntry><term>Contents::Header</term>
+ <ListItem><Para>
+ Sets header file to prepend to the contents output.
+ </VarListEntry>
+
+ <VarListEntry><term>BinCacheDB</term>
+ <ListItem><Para>
+ Sets the binary cache database to use for this
+ section. Multiple sections can share the same database.
+ </VarListEntry>
+
+ <VarListEntry><term>FileList</term>
+ <ListItem><Para>
+ Specifies that instead of walking the directory tree
+ that <command/apt-ftparchive/ should read the list of files from the given
+ file. Relative files names are prefixed with the archive directory.
+ </VarListEntry>
+
+ <VarListEntry><term>SourceFileList</term>
+ <ListItem><Para>
+ Specifies that instead of walking the directory tree
+ that <command/apt-ftparchive/ should read the list of files from the given
+ file. Relative files names are prefixed with the archive directory.
+ This is used when processing source indexs.
+ </VarListEntry>
+ </VariableList>
+ </refsect2>
+
+ <refsect2><title>Tree Section</>
+ <para>
+ The <literal/Tree/ section defines a standard Debian file tree which
+ consists of a base directory, then multiple sections in that base
+ directory and finally multiple Architectures in each section. The exact
+ pathing used is defined by the <literal/Directory/ substitution variable.
+ <para>
+ The <literal/Tree/ section takes a scope tag which sets the
+ <literal/$(DIST)/ variable and defines the root of the tree
+ (the path is prefixed by <literal/ArchiveDir/).
+ Typically this is a setting such as <filename>dists/woody</>.
+ <para>
+ All of the settings defined in the <literal/TreeDefault/ section can be
+ use in a <literal/Tree/ section as well as three new variables.
+ <para>
+ When processing a <literal/Tree/ section <command/apt-ftparchive/
+ performs an operation similar to:
+<informalexample><programlisting>
+for i in Sections do
+ for j in Architectures do
+ Generate for DIST=scope SECTION=i ARCH=j
+</programlisting></informalexample>
+
+ <VariableList>
+ <VarListEntry><term>Sections</term>
+ <ListItem><Para>
+ This is a space separated list of sections which appear
+ under the distribution, typically this is something like
+ <literal/main contrib non-free/.
+ </VarListEntry>
+
+ <VarListEntry><term>Architectures</term>
+ <ListItem><Para>
+ This is a space separated list of all the
+ architectures that appear under seach section. The special architecture
+ 'source' is used to indicate that this tree has a source archive.
+ </VarListEntry>
+
+ <VarListEntry><term>BinOverride</term>
+ <ListItem><Para>
+ Sets the binary override file. The override file
+ contains section, priority and maintainer address information.
+ </VarListEntry>
+
+ <VarListEntry><term>SrcOverride</term>
+ <ListItem><Para>
+ Sets the source override file. The override file
+ contains section information.
+ </VarListEntry>
+ </VariableList>
+ </refsect2>
+
+ <refsect2><title>BinDirectory Section</>
+ <para>
+ The <literal/bindirectory/ section defines a binary directory tree
+ with no special structure. The scope tag specifies the location of
+ the binary directory and the settings are similar to the <literal/Tree/
+ section with no substitution variables or
+ <literal>Section</><literal>Architecture</> settings.
+ <VariableList>
+ <VarListEntry><term>Packages</term>
+ <ListItem><Para>
+ Sets the Packages file output.
+ </VarListEntry>
+
+ <VarListEntry><term>SrcPackages</term>
+ <ListItem><Para>
+ Sets the Sources file output. At least one of
+ <literal/Packages/ or <literal/SrcPackages/ is required.
+ </VarListEntry>
+
+ <VarListEntry><term>Contents</term>
+ <ListItem><Para>
+ Sets the Contents file output. (Optional)
+ </VarListEntry>
+
+ <VarListEntry><term>Binoverride</term>
+ <ListItem><Para>
+ Sets the binary override file.
+ </VarListEntry>
+
+ <VarListEntry><term>SrcOverride</term>
+ <ListItem><Para>
+ Sets the source override file.
+ </VarListEntry>
+
+ <VarListEntry><term>BinCacheDB</term>
+ <ListItem><Para>
+ Sets the cache DB.
+ </VarListEntry>
+
+ <VarListEntry><term>PathPrefix</term>
+ <ListItem><Para>
+ Appends a path to all the output paths.
+ </VarListEntry>
+
+ <VarListEntry><term>FileList, SourceFileList</term>
+ <ListItem><Para>
+ Specifies the file list file.
+ </VarListEntry>
+ </VariableList>
+ </refsect2>
+ </RefSect1>
+
+ <RefSect1><Title>The Binary Override File</>
+ <para>
+ The binary override file is fully compatible with &dpkg-scanpackages;. It
+ contains 4 fields sperated by spaces. The first field is the package name,
+ the second is the priority to force that package to, the third is the
+ the section to force that package to and the final field is the maintainer
+ permutation field.
+ <para>
+ The general form of the maintainer field is:
+ <literallayout>old [// oldn]* => new</literallayout>
+ or simply,
+ <literallayout>new</literallayout>
+ The first form allows a double-slash separated list of old email addresses
+ to be specified. If any of those are found then new is substituted for the
+ maintainer field. The second form unconditionally substitutes the
+ maintainer field.
+ </RefSect1>
+
+ <RefSect1><title>The Source Override File</>
+ <para>
+ The source override file is fully compatible with &dpkg-scansources;. It
+ contains 2 fields separated by spaces. The first fields is the source
+ package name, the second is the section to assign it.
+ </RefSect1>
+
+ <RefSect1><Title>Options</>
+ &apt-cmdblurb;
+
+ <VariableList>
+ <VarListEntry><term><option/--md5/</>
+ <ListItem><Para>
+ Generate MD5 sums. This defaults to on, when turned off the generated
+ index files will not have MD5Sum fields where possible.
+ Configuration Item: <literal/APT::FTPArchive::MD5/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-d/</><term><option/--db/</>
+ <ListItem><Para>
+ Use a binary caching DB. This has no effect on the generate command.
+ Configuration Item: <literal/APT::FTPArchive::DB/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-q/</><term><option/--quiet/</>
+ <ListItem><Para>
+ Quiet; produces output suitable for logging, omitting progress indicators.
+ More q's will produce more quiet up to a maximum of 2. You can also use
+ <option/-q=#/ to set the quiet level, overriding the configuration file.
+ Configuration Item: <literal/quiet/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--delink/</>
+ <ListItem><Para>
+ Perform Delinking. If the <literal/External-Links/ setting is used then
+ this option actually enables delinking of the files. It defaults to on and
+ can be turned off with <option/--no-delink/.
+ Configuration Item: <literal/APT::FTPArchive::DeLinkAct/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--contents/</>
+ <ListItem><Para>
+ Perform contents generation. When this option is set and package indexes
+ are being generated with a cache DB then the file listing will also be
+ extracted and stored in the DB for later use. When using the generate
+ command this option also allows the creation of any Contents files. The
+ default is on.
+ Configuration Item: <literal/APT::FTPArchive::Contents/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-s/</><term><option/--source-override/</>
+ <ListItem><Para>
+ Select the source override file to use with the <literal/sources/ command.
+ Configuration Item: <literal/APT::FTPArchive::SourceOverride/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--readonly/</>
+ <ListItem><Para>
+ Make the caching databases read only.
+ Configuration Item: <literal/APT::FTPArchive::ReadOnlyDB/.
+ </VarListEntry>
+
+ &apt-commonoptions;
+
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>See Also</>
+ <para>
+ &apt-conf;
+ </RefSect1>
+
+ <RefSect1><Title>Diagnostics</>
+ <para>
+ <command/apt-ftparchive/ returns zero on normal operation, decimal 100 on error.
+ </RefSect1>
+
+ &manbugs;
+ &manauthor;
+
+</refentry>
diff --git a/doc/apt-get.8.sgml b/doc/apt-get.8.sgml
new file mode 100644
index 000000000..cf35b65b0
--- /dev/null
+++ b/doc/apt-get.8.sgml
@@ -0,0 +1,451 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V3.1//EN" [
+
+<!ENTITY % aptent SYSTEM "apt.ent">
+%aptent;
+
+]>
+
+<refentry>
+ &apt-docinfo;
+
+ <refmeta>
+ <refentrytitle>apt-get</>
+ <manvolnum>8</>
+ </refmeta>
+
+ <!-- Man page title -->
+ <refnamediv>
+ <refname>apt-get</>
+ <refpurpose>APT package handling utility -- command-line interface</>
+ </refnamediv>
+
+ <!-- Arguments -->
+ <refsynopsisdiv>
+ <cmdsynopsis>
+ <command>apt-config</>
+ <arg><option>-hvs</></arg>
+ <arg><option>-o=<replaceable/config string/</></arg>
+ <arg><option>-c=<replaceable/file/</></arg>
+ <group choice=req>
+ <arg>update</>
+ <arg>upgrade</>
+ <arg>dselect-upgrade</>
+ <arg>install <arg choice="plain" rep="repeat"><replaceable>pkg</replaceable></arg></arg>
+ <arg>remove <arg choice="plain" rep="repeat"><replaceable>pkg</replaceable></arg></arg>
+ <arg>source <arg choice="plain" rep="repeat"><replaceable>pkg</replaceable></arg></arg>
+ <arg>build-dep <arg choice="plain" rep="repeat"><replaceable>pkg</replaceable></arg></arg>
+ <arg>check</>
+ <arg>clean</>
+ <arg>autoclean</>
+ </group>
+ </cmdsynopsis>
+ </refsynopsisdiv>
+
+ <RefSect1><Title>Description</>
+ <para>
+ <command/apt-get/ is the command-line tool for handling packages, and may be
+ considered the user's "back-end" to other tools using the APT library.
+ <para>
+ Unless the <option/-h/, or <option/--help/ option is given one of the
+ above commands must be present.
+
+ <VariableList>
+ <VarListEntry><Term>update</Term>
+ <ListItem><Para>
+ <literal/update/ is used to resynchronize the package index files from
+ their sources. The indexes of available packages are fetched from the
+ location(s) specified in <filename>/etc/apt/sources.list</>.
+ For example, when using a Debian archive, this command retrieves and
+ scans the <filename>Packages.gz</> files, so that information about new
+ and updated packages is available. An <literal/update/ should always be
+ performed before an <literal/upgrade/ or <literal/dist-upgrade/. Please
+ be aware that the overall progress meter will be incorrect as the size
+ of the package files cannot be known in advance.
+ </VarListEntry>
+
+ <VarListEntry><Term>upgrade</Term>
+ <ListItem><Para>
+ <literal/upgrade/ is used to install the newest versions of all packages
+ currently installed on the system from the sources enumerated in
+ <filename>/etc/apt/sources.list</>. Packages currently installed with
+ new versions available are retrieved and upgraded; under no circumstances
+ are currently installed packages removed, or packages not already installed
+ retrieved and installed. New versions of currently installed packages that
+ cannot be upgraded without changing the install status of another package
+ will be left at their current version. An <literal/update/ must be
+ performed first so that <command/apt-get/ knows that new versions of packages are
+ available.
+ </VarListEntry>
+
+ <VarListEntry><Term>dselect-upgrade</Term>
+ <ListItem><Para>
+ is used in conjunction with the traditional Debian GNU/Linux packaging
+ front-end, &dselect;. <literal/dselect-upgrade/
+ follows the changes made by &dselect; to the <literal/Status/
+ field of available packages, and performs the actions necessary to realize
+ that state (for instance, the removal of old and the installation of new
+ packages).
+ </VarListEntry>
+
+ <VarListEntry><Term>dist-upgrade</Term>
+ <ListItem><Para>
+ <literal/dist-upgrade/, in addition to performing the function of
+ <literal/upgrade/, also intelligently handles changing dependencies
+ with new versions of packages; <command/apt-get/ has a "smart" conflict
+ resolution system, and it will attempt to upgrade the most important
+ packages at the expense of less important ones if necessary.
+ The <filename>/etc/apt/sources.list</> file contains a list of locations
+ from which to retrieve desired package files.
+ </VarListEntry>
+
+ <VarListEntry><Term>install</Term>
+ <ListItem><Para>
+ <literal/install/ is followed by one or more packages desired for
+ installation. Each package is a package name, not a fully qualified
+ filename (for instance, in a Debian GNU/Linux system, libc6 would be the
+ argument provided, not em(libc6_1.9.6-2.deb)). All packages required
+ by the package(s) specified for installation will also be retrieved and
+ installed. The <filename>/etc/apt/sources.list</> file is used to locate
+ the desired packages. If a hyphen is appended to the package name (with
+ no intervening space), the identified package will be removed if it is
+ installed. Similarly a plus sign can be used to designate a package to
+ isntall. These latter feature may be used to override decisions made by
+ apt-get's conflict resolution system.
+ <para>
+ A specific version of a package can be selected for installation by
+ following the package name with an equals and the version of the package
+ to select. This will cause that version to be located and selected for
+ install. Alternatively a specific distribution can be selected by
+ following the package name with a slash and the version of the
+ distribution or the Archive name (stable, frozen, unstable).
+ <para>
+ Both of the version selection mechansims can downgrade packages and must
+ be used with care.
+ <para>
+ If no package matches the given expression and the expression contains one
+ of '.', '?' or '*' then it is assumed to be a POSIX regex and it is applied
+ to all package names in the database. Any matches are then installed (or
+ removed). Note that matching is done by substring so 'lo.*' matches 'how-lo'
+ and 'lowest'. If this is undesired prefix with a '^' character.
+ </VarListEntry>
+
+ <VarListEntry><Term>remove</Term>
+ <ListItem><Para>
+ <literal/remove/ is identical to bf(install) except that packages are
+ removed instead of installed. If a plus sign is appended to the package
+ name (with no intervening space), the identified package will be
+ installed.
+ </VarListEntry>
+
+ <VarListEntry><Term>source</Term>
+ <ListItem><Para>
+ <literal/source/ causes <command/apt-get/ to fetch source packages. APT
+ will examine the available packages to decide which source package to
+ fetch. It will then find and download into the current directory the
+ newest available version of that source package. Source packages are
+ tracked separately from binary packages via <literal/deb-src/ type lines
+ in the &sources-list; file. This probably will mean that you will not
+ get the same source as the package you have installed or as you could
+ install. If the --compile options is specified then the package will be
+ compiled to a binary .deb using dpkg-buildpackage, if --download-only is
+ specified then the source package will not be unpacked.
+ <para>
+ A specific source version can be retrieved by postfixing the source name
+ with an equals and then the version to fetch, similar to the mechanism
+ used for the package files. This enables exact matching of the source
+ package name and version, implicitly enabling the
+ <literal/APT::Get::Only-Source/ option.
+
+ <para>
+ Note that source packages are not tracked like binary packages, they
+ exist only in the current directory and are similar to downloading source
+ tar balls.
+ </VarListEntry>
+
+ <VarListEntry><Term>build-dep</Term>
+ <ListItem><Para>
+ <literal/build-dep/ causes apt-get to install/remove packages in an
+ attempt to satisfy the build dependencies for a source packages. Right
+ now virtual package build depends choose a package at random.
+ </VarListEntry>
+
+ <VarListEntry><Term>check</Term>
+ <ListItem><Para>
+ <literal/check/ is a diagnostic tool; it updates the package cache and checks
+ for broken dependencies.
+ </VarListEntry>
+
+ <VarListEntry><Term>clean</Term>
+ <ListItem><Para>
+ <literal/clean/ clears out the local repository of retrieved package
+ files. It removes everything but the lock file from
+ <filename>&cachedir;/archives/</> and
+ <filename>&cachedir;/archive/partial/</>. When APT is used as a
+ &dselect; method, <literal/clean/ is run automatically.
+ Those who do not use dselect will likely want to run <literal/apt-get clean/
+ from time to time to free up disk space.
+ </VarListEntry>
+
+ <VarListEntry><Term>autoclean</Term>
+ <ListItem><Para>
+ Like <literal/clean/, <literal/autoclean/ clears out the local
+ repository of retrieved package files. The difference is that it only
+ removes package files that can no longer be downloaded, and are largely
+ useless. This allows a cache to be maintained over a long period without
+ it growing out of control. The configuration option
+ <literal/APT::Clean-Installed/ will prevent installed packages from being
+ erased if it is set off.
+ </VarListEntry>
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>Options</>
+ &apt-cmdblurb;
+
+ <VariableList>
+ <VarListEntry><term><option/-d/</><term><option/--download-only/</>
+ <ListItem><Para>
+ Download only; package files are only retrieved, not unpacked or installed.
+ Configuration Item: <literal/APT::Get::Download-Only/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-f/</><term><option/--fix-broken/</>
+ <ListItem><Para>
+ Fix; attempt to correct a system with broken dependencies in
+ place. This option, when used with install/remove, can omit any packages
+ to permit APT to deduce a likely soltion. Any Package that are specified
+ must completly correct the problem. The option is sometimes necessary when
+ running APT for the first time; APT itself does not allow broken package
+ dependencies to exist on a system. It is possible that a system's
+ dependency structure can be so corrupt as to require manual intervention
+ (which usually means using &dselect; or <command/dpkg --remove/ to eliminate some of
+ the offending packages). Use of this option together with <option/-m/ may produce an
+ error in some situations.
+ Configuration Item: <literal/APT::Get::Fix-Broken/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-m/</><term><option/--ignore-missing/</>
+ <term><option/--fix-missing/</>
+ <ListItem><Para>
+ Ignore missing packages; If packages cannot be retrieved or fail the
+ integrity check after retrieval (corrupted package files), hold back
+ those packages and handle the result. Use of this option together with
+ <option/-f/ may produce an error in some situations. If a package is
+ selected for installation (particularly if it is mentioned on the
+ command line) and it could not be downloaded then it will be silently
+ held back.
+ Configuration Item: <literal/APT::Get::Fix-Missing/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--no-download/</>
+ <ListItem><Para>
+ Disables downloading of packages. This is best used with
+ <option/--ignore-missing/ to force APT to use only the .debs it has
+ already downloaded.
+ Configuration Item: <literal/APT::Get::Download/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-q/</><term><option/--quiet/</>
+ <ListItem><Para>
+ Quiet; produces output suitable for logging, omitting progress indicators.
+ More q's will produce more quiet up to a maximum of 2. You can also use
+ <option/-q=#/ to set the quiet level, overriding the configuration file.
+ Note that quiet level 2 implies <option/-y/, you should never use -qq
+ without a no-action modifier such as -d, --print-uris or -s as APT may
+ decided to do something you did not expect.
+ Configuration Item: <literal/quiet/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-s/</>
+ <term><option/--simulate/</>
+ <term><option/--just-print/</>
+ <term><option/--dry-run/</>
+ <term><option/--recon/</>
+ <term><option/--no-act/</>
+ <ListItem><Para>
+ No action; perform a simulation of events that would occur but do not
+ actually change the system.
+ Configuration Item: <literal/APT::Get::Simulate/.
+ <para>
+ Simulate prints out
+ a series of lines each one representing a dpkg operation, Configure (Conf),
+ Remove (Remv), Unpack (Inst). Square brackets indicate broken packages with
+ and empty set of square brackets meaning breaks that are of no consequence
+ (rare).
+ </VarListEntry>
+
+ <VarListEntry><term><option/-y/</><term><option/--yes/</>
+ <term><option/--assume-yes/</>
+ <ListItem><Para>
+ Automatic yes to prompts; assume "yes" as answer to all prompts and run
+ non-interactively. If an undesirable situation, such as changing a held
+ package or removing an essential package occurs then <literal/apt-get/
+ will abort.
+ Configuration Item: <literal/APT::Get::Assume-Yes/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-u/</><term><option/--show-upgraded/</>
+ <ListItem><Para>
+ Show upgraded packages; Print out a list of all packages that are to be
+ upgraded.
+ Configuration Item: <literal/APT::Get::Show-Upgraded/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-b/</><term><option/--compile/</>
+ <term><option/--build/</>
+ <ListItem><Para>
+ Compile source packages after downloading them.
+ Configuration Item: <literal/APT::Get::Compile/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--ignore-hold/</>
+ <ListItem><Para>
+ Ignore package Holds; This causes <command/apt-get/ to ignore a hold
+ placed on a package. This may be useful in conjunction with
+ <literal/dist-upgrade/ to override a large number of undesired holds.
+ Configuration Item: <literal/APT::Ignore-Hold/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--no-upgrade/</>
+ <ListItem><Para>
+ Do not upgrade packages; When used in conjunction with <literal/install/
+ <literal/no-upgrade/ will prevent packages listed from being upgraded
+ if they are already installed.
+ Configuration Item: <literal/APT::Get::Upgrade/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--force-yes/</>
+ <ListItem><Para>
+ Force yes; This is a dangerous option that will cause apt to continue
+ without prompting if it is doing something potentially harmful. It
+ should not be used except in very special situations. Using
+ <literal/force-yes/ can potentially destroy your system!
+ Configuration Item: <literal/APT::Get::force-yes/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--print-uris/</>
+ <ListItem><Para>
+ Instead of fetching the files to install their URIs are printed. Each
+ URI will have the path, the destination file name, the size and the expected
+ md5 hash. Note that the file name to write to will not always match
+ the file name on the remote site! This also works with the /source/
+ command. Configuration Item: <literal/APT::Get::Print-URIs/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--purge/</>
+ <ListItem><Para>
+ Use purge instead of remove for anything that would be removed.
+ Configuration Item: <literal/APT::Get::Purge/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--reinstall/</>
+ <ListItem><Para>
+ Re-Install packages that are already installed and at the newest version.
+ Configuration Item: <literal/APT::Get::ReInstall/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--list-cleanup/</>
+ <ListItem><Para>
+ This option defaults to on, use <literal/--no-list-cleanup/ to turn it
+ off. When on <command/apt-get/ will automatically manage the contents of
+ <filename>&statedir;/lists</> to ensure that obsolete files are erased.
+ The only reason to turn it off is if you frequently change your source
+ list.
+ Configuration Item: <literal/APT::Get::List-Cleanup/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-t/</>
+ <term><option/--target-release/</>
+ <term><option/--default-release/</>
+ <ListItem><Para>
+ This option controls the default input to the policy engine, it creates
+ a default pin at priority 990 using the specified release string. The
+ preferences file may further override this setting. In short, this option
+ lets you have simple control over which distribution packages will be
+ retrieved from. Some common examples might me
+ <option>-t '2.1*'</> or <option>-t unstable</>.
+ Configuration Item: <literal/APT::Default-Release/
+ </VarListEntry>
+
+ <VarListEntry><term><option/--trivial-only/</>
+ <ListItem><Para>
+ Only perform operations are 'trivial'. Logically this can be considered
+ related to <option/--assume-yes/, where <option/--assume-yes/ will answer
+ yes to any prompt, <option/--trivial-only/ will answer no.
+ Configuration Item: <literal/APT::Get::Trivial-Only/.
+ </VarListEntry>
+
+ <VarListEntry><term><option/--no-remove/</>
+ <ListItem><Para>
+ If any packages are to be removed apt-get immediately aborts without
+ prompting.
+ Configuration Item: <literal/APT::Get::Remove/
+ </VarListEntry>
+
+ <VarListEntry><term><option/--only-source/</>
+ <ListItem><Para>
+ Only has meaning for the <literal/source/ command. indicates that the
+ given source names are not to be mapped through the binary table.
+ Configuration Item: <literal/APT::Get::Only-Source/
+ </VarListEntry>
+
+ <VarListEntry><term><option/--diff-only/</><term><option/--tar-only/</>
+ <ListItem><Para>
+ Download only the diff or tar file of a source archive.
+ Configuration Item: <literal/APT::Get::Diff-Only/ and
+ <literal/APT::Get::Tar-Only/
+ </VarListEntry>
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>Files</>
+ <variablelist>
+ <VarListEntry><term><filename>/etc/apt/sources.list</></term>
+ <ListItem><Para>
+ locations to fetch packages from.
+ Configuration Item: <literal/Dir::Etc::SourceList/.
+ </VarListEntry>
+
+ <VarListEntry><term><filename>&cachedir;/archives/</></term>
+ <ListItem><Para>
+ storage area for retrieved package files
+ Configuration Item: <literal/Dir::Cache::Archives/.
+ </VarListEntry>
+
+ <VarListEntry><term><filename>&cachedir;/archives/partial/</></term>
+ <ListItem><Para>
+ storage area for package files in transit
+ Configuration Item: <literal/Dir::Cache::Archives/ (implicit partial).
+ </VarListEntry>
+
+ <VarListEntry><term><filename>&statedir;/lists/</></term>
+ <ListItem><Para>
+ storage area for state information for each package resource specified in
+ &sources-list;
+ Configuration Item: <literal/Dir::State::Lists/.
+ </VarListEntry>
+
+ <VarListEntry><term><filename>&statedir;/lists/partial/</></term>
+ <ListItem><Para>
+ storage area for state information in transit.
+ Configuration Item: <literal/Dir::State::Lists/ (implicit partial).
+ </VarListEntry>
+ </variablelist>
+ </RefSect1>
+
+ <RefSect1><Title>See Also</>
+ <para>
+ &apt-cache;, &dpkg;, &dselect;, &sources-list;, &apt-conf;, The
+ APT users guide in &docdir;.
+ </RefSect1>
+
+ <RefSect1><Title>Diagnostics</>
+ <para>
+ <command/apt-get/ returns zero on normal operation, decimal 100 on error.
+ </RefSect1>
+
+ &manbugs;
+ &manauthor;
+
+</refentry>
diff --git a/doc/apt-get.8.yo b/doc/apt-get.8.yo
deleted file mode 100644
index 4983b2581..000000000
--- a/doc/apt-get.8.yo
+++ /dev/null
@@ -1,302 +0,0 @@
-mailto(apt@packages.debian.org)
-manpage(apt-get)(8)(4 Dec 1998)(apt)()
-manpagename(apt-get)(APT package handling utility -- command-line interface)
-
-manpagesynopsis()
- apt-get [options] [command] [package ...]
-
-manpagedescription()
-
-apt-get is the command-line tool for handling packages, and may be considered
-the user's "back-end" to apt(8).
-
-em(command) is one of:
-itemize(
- it() update
- it() upgrade
- it() dselect-upgrade
- it() dist-upgrade
- it() install package1 [package2] [...]
- it() remove package1 [package2] [...]
- it() source package1 [package2] [...]
- it() check
- it() clean
- it() autoclean
-)
-
-Unless the -h, or --help option is given one of the above commands
-must be present.
-
-startdit()
-dit(bf(update))
-bf(update) is used to resynchronize the package overview files from their
-sources. The overviews of available packages are fetched from the
-location(s) specified in bf(/etc/apt/sources.list).
-For example, when using a Debian archive, this command retrieves and
-scans the bf(Packages.gz) files, so that information about new and updated
-packages is available. An bf(update) should always be performed before an
-bf(upgrade) bf(dist-upgrade). Please be aware that the overall progress
-meter will be incorrect as the size of the package files cannot be known in
-advance.
-
-dit(bf(upgrade))
-bf(upgrade) is used to install the newest versions of all packages currently
-installed on the system from the sources enumerated in
-bf(/etc/apt/sources.list). Packages currently installed with new versions
-available are retrieved and upgraded; under no circumstances are currently
-installed packages removed, or packages not already installed retrieved and
-installed. New versions of currently installed packages that cannot be
-upgraded without changing the install status of another package will be left
-at their current version. An bf(update) must be performed first so that
-bf(apt-get) knows that new versions of packages are available.
-
-dit(bf(dselect-upgrade))
-bf(dselect-upgrade)
-is used in conjunction with the traditional Debian GNU/Linux packaging
-front-end, bf(dselect (8)). bf(dselect-upgrade)
-follows the changes made by bf(dselect) to the em(Status)
-field of available packages, and performs the actions necessary to realize
-that state (for instance, the removal of old and the installation of new
-packages).
-
-dit(bf(dist-upgrade))
-bf(dist-upgrade),in addition to performing the function of bf(upgrade),
-also intelligently handles changing dependencies with new versions of
-packages; bf(apt-get) has a "smart" conflict resolution system, and it will
-attempt to upgrade the most important packages at the expense of less
-important ones if necessary. The bf(/etc/apt/sources.list) file contains a
-list of locations from which to retrieve desired package files.
-
-dit(bf(install))
-bf(install) is followed by one or more em(packages) desired for installation.
-Each em(package) is a package name, not a fully qualified filename
-(for instance, in a Debian GNU/Linux system, em(ldso) would be the argument
-provided, not em(ldso_1.9.6-2.deb)). All packages required by the package(s)
-specified for installation will also be retrieved and installed. The
-bf(/etc/apt/sources.list) file is used to locate the desired packages. If a
-hyphen is appended to the package name (with no intervening space), the
-identified package will be removed if it is installed. This latter feature
-may be used to override decisions made by apt-get's conflict resolution system.
-
-If no package matches the given expression and the expression contains one
-of '.', '?' or '*' then it is assumed to be a POSIX regex and it is applied
-to all package names in the database. Any matches are then installed (or
-removed). Note that matching is done by substring so 'lo*' matches 'how-lo'
-and 'lowest'. If this is undesired prefix with a '^' character.
-
-dit(bf(remove))
-bf(remove) is identical to bf(install) except that packages are removed
-instead of installed. If a plus sign is appended to the package name (with no
-intervening space), the identified package will be installed.
-
-dit(bf(source))
-bf(source) causes apt-get to fetch source packages. APT will examine the
-available packages to decide which source package to fetch. It will then
-find and download into the current directory the newest available version of
-that source package. Source packages are tracked separately from binary
-packages via bf(deb-src) type lines in the bf(/etc/apt/sources.list) file.
-This probably will mean that you will not get the same source as the package
-you have installed or as you could install. If the --compile options is
-specified then the package will be compiled to a binary .deb using
-dpkg-buildpackage, if --download-only is specified then the source package
-will not be unpacked.
-
-Note that source packages are not tracked like binary packages, they exist
-only in the current directory and are similar to downloading source
-tar balls.
-
-dit(bf(check))
-bf(check) is a diagnostic tool; it updates the package cache and checks for
-broken packages.
-
-dit(bf(clean))
-bf(clean) clears out the local repository of retrieved package files. It
-removes everything but the lock file from bf(/var/cache/apt/archives/)
-and bf(/var/cache/apt/archives/partial/).
-When APT is used as a bf(dselect(8)) method, bf(clean) is run automatically.
-Those who do not use dselect will likely want to run code(apt-get clean)
-from time to time to free up disk space.
-
-dit(bf(autoclean))
-Like bf(clean), bf(autoclean) clears out the local repository of retrieved
-package files. The difference is that it only removes package files that
-can no longer be downloaded, and are largely useless. This allows a
-cache to be maintained over a long period without it growing out of
-control.
-
-enddit()
-
-manpageoptions()
-All command line options may be set using the configuration file, the
-descriptions indicate the configuration option to set. For boolean
-options you can override the config file by using something like bf(-f-),
-bf(--no-f), bf(-f=no) or several other variations.
-
-startdit()
-dit(bf(-d, --download-only))
-Download only; package files are only retrieved, not unpacked or installed.
-Configuration Item: bf(APT::Get::Download-Only).
-
-dit(bf(-f, --fix-broken))
-Fix; attempt to correct a system with broken dependencies in
-place. This option, when used with install/remove, can omit any packages
-to permit APT to deduce a likely soltion. Any Package that are specified
-must completly correct the problem. The option is sometimes necessary when
-running APT for the first time; APT itself does not allow broken package
-dependencies to exist on a system. It is possible that a system's
-dependency structure can be so corrupt as to require manual intervention
-(which usually means using dselect or dpkg --remove to eliminate some of
-the offending packages). Use of this option together with -m may produce an
-error in some situations. Configuration Item: bf(APT::Get::Fix-Broken).
-
-dit(bf(-h, --help))
-Help; display a helpful usage message and exits.
-
-dit(bf(-v, --version))
-Show the program version.
-
-dit(bf(-m, --ignore-missing, --fix-missing))
-Ignore missing packages; If packages cannot be retrieved or fail the
-integrity check after retrieval (corrupted package files), hold back
-those packages and handle the result. Use of this option together with
--f may produce an error in some situations. If a package is selected for
-installation (particularly if it is mentioned on the command line) and it
-could not be downloaded then it will be silently held back.
-Configuration Item: bf(APT::Get::ignore-missing).
-
-dit(bf(--no-download))
-Disables downloading of packages. This is best used with --ignore-missing to
-force APT to use only the .debs it has already downloaded.
-Configuration Item: bf(APT::Get::No-Download).
-
-dit(bf(-q, --quiet))
-Quiet; produces output suitable for logging, omitting progress indicators.
-More q's will produce more quiet up to a maximum of 2. You can also use
-bf(-q=#) to set the quiet level, overriding the configuration file. Note that
-quiet level 2 implies -y, you should never use -qq without a no-action
-modifier such as -d, --print-uris or -s as APT may decided to do something
-you did not expect.
-Configuration Item: bf(quiet)
-
-dit(bf(-s, --simulate, --just-print, --dry-run, --recon, --no-act))
-No action; perform a simulation of events that would occur but do not
-actually change the system. Configuration Item: bf(APT::Get::Simulate).
-
-Simulate prints out
-a series of lines each one representing a dpkg operation, Configure (Conf),
-Remove (Remv), Unpack (Inst). Square brackets indicate broken packages with
-and empty set of square brackets meaning breaks that are of no consequence
-(rare).
-
-dit(bf(-y, --yes, --assume-yes))
-Automatic yes to prompts; assume "yes" as answer to all prompts and run
-non-interactively. If an undesirable situation, such as changing a held
-package or removing an essential package occurs then bf(apt-get) will
-abort. Configuration Item: bf(APT::Get::Assume-Yes).
-
-dit(bf(-u, --show-upgraded))
-Show upgraded packages; Print out a list of all packages that are to be
-upgraded. Configuration Item: bf(APT::Get::Show-Upgraded).
-
-dit(bf(-b, --compile, --build))
-Compile source packages after downloading them.
-Configuration Item: bf(APT::Get::Compile).
-
-dit(bf(--ignore-hold))
-Ignore package Holds; This causes bf(apt-get) to ignore a hold placed on
-a package. This may be useful in conjunction with bf(dist-upgrade) to
-override a large number of undesired holds. Configuration Item: bf(APT::Ignore-Hold).
-
-dit(bf(--no-upgrade))
-Do not upgrade packages; When used in conjunction with bf(install)
-bf(no-upgrade) will prevent packages listed from being upgraded if they
-are already installed. Configuration Item: bf(APT::Get::no-upgrade).
-
-dit(bf(--force-yes))
-Force yes; This is a dangerous option that will cause apt to continue without
-prompting if it is doing something potentially harmful. It should not be used
-except in very special situations. Using bf(force-yes) can potentially destroy
-your system! Configuration Item: bf(APT::Get::force-yes).
-
-dit(bf(--print-uris))
-Instead of fetching the files to install their URIs are printed. Each
-URI will have the path, the destination file name, the size and the expected
-md5 hash. Note that the file name to write to will not always match
-the file name on the remote site! This also works with the bf(source)
-command. Configuration Item: bf(APT::Get::Print-URIs).
-
-dit(bf(--purge))
-Use purge instead of remove for anything that would be removed.
-Configuration Item: bf(APT::Get::Purge).
-
-dit(bf(--reinstall))
-Re-Install packages that are already installed and at the newest version.
-
-dit(bf(--list-cleanup))
-This option defaults to on, use bf(--no-list-cleanup) to turn it off.
-When on apt-get will automatically manage the contents of
-/var/state/apt/lists to ensure that obsolete files are erased. The only
-reason to turn it off is if you frequently change your source list.
-Configuration Item: bf(APT::Get::List-Cleanup)
-
-dit(bf(--trivial-only))
-Only perform operations are 'trivial'. Logically this can be considered
-related to --assume-yes, where --assume-yes will answer yes to any prompt,
---trivial-only will answer no. Configuration Item: bf(APT::Get::Trivial-Only)
-
-dit(bf(--no-remove))
-If any packages are to be removed apt-get immediately aborts without
-prompting. Configuration Item: bf(APT::Get::No-Remove)
-
-dit(bf(--diff-only), bf(--tar-only))
-Download only the diff or tar file of a source archive.
-Configuration Item: bf(APT::Get::Diff-Only)
-
-dit(bf(-c, --config-file))
-Configuration File; Specify a configuration file to use. bf(apt-get) will
-read the default configuration file and then this configuration file. See
-bf(apt.conf(5)) for syntax information.
-
-dit(bf(-o, --option))
-Set a Configuration Option; This will set an arbitrary configuration option.
-The syntax is
-verb(-o Foo::Bar=bar)
-enddit()
-
-manpagefiles()
-itemize(
- it() /etc/apt/sources.list
- locations to fetch packages from
-
- it() /var/cache/apt/archives/
- storage area for retrieved package files
-
- it() /var/cache/apt/archives/partial/
- storage area for package files in transit
-
- it() /var/state/apt/lists/
- storage area for state information for each package resource specified in
- the source list
-
- it() /var/state/apt/lists/partial/
- storage area for state information in transit
-)
-
-manpageseealso()
-apt-cache(8),
-dpkg(8),
-dselect(8),
-sources.list(5),
-apt.conf(5),
-The APT Users Guide in /usr/doc/apt/
-
-manpagediagnostics()
-apt-get returns zero on normal operation, decimal 100 on error.
-
-manpagebugs()
-See http://bugs.debian.org/apt. If you wish to report a
-bug in bf(apt-get), please see bf(/usr/doc/debian/bug-reporting.txt)
-or the bf(bug(1)) command.
-
-manpageauthor()
-apt-get was written by the APT team <apt@packages.debian.org>.
diff --git a/doc/apt-sortpkgs.1.sgml b/doc/apt-sortpkgs.1.sgml
new file mode 100644
index 000000000..c939b973f
--- /dev/null
+++ b/doc/apt-sortpkgs.1.sgml
@@ -0,0 +1,73 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V3.1//EN" [
+
+<!ENTITY % aptent SYSTEM "apt.ent">
+%aptent;
+
+]>
+
+<refentry>
+ &apt-docinfo;
+
+ <refmeta>
+ <refentrytitle>apt-sortpkgs</>
+ <manvolnum>1</>
+ </refmeta>
+
+ <!-- Man page title -->
+ <refnamediv>
+ <refname>apt-sortpkgs</>
+ <refpurpose>Utility to sort package index files</>
+ </refnamediv>
+
+ <!-- Arguments -->
+ <refsynopsisdiv>
+ <cmdsynopsis>
+ <command>apt-config</>
+ <arg><option>-hvs</></arg>
+ <arg><option>-o=<replaceable/config string/</></arg>
+ <arg><option>-c=<replaceable/file/</></arg>
+ <arg choice="plain" rep="repeat"><replaceable>file</replaceable></arg>
+ </cmdsynopsis>
+ </refsynopsisdiv>
+
+ <RefSect1><Title>Description</>
+ <para>
+ <command/apt-sortpkgs/ will take an index file (Source index or Package
+ index) and sort the records so that they are ordered by the package name.
+ It will also sort the internal fields of each record according to the
+ internal sorting rules.
+
+ <para>
+ All output is sent to stdout, the input must be a seekable file.
+ </RefSect1>
+
+ <RefSect1><Title>Options</>
+ &apt-cmdblurb;
+
+ <VariableList>
+ <VarListEntry><term><option/-s/</><term><option/--source/</>
+ <ListItem><Para>
+ Use Source index field ordering.
+ Configuration Item: <literal/APT::SortPkgs::Source/.
+ </VarListEntry>
+
+ &apt-commonoptions;
+
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>See Also</>
+ <para>
+ &apt-conf;
+ </RefSect1>
+
+ <RefSect1><Title>Diagnostics</>
+ <para>
+ <command/apt-sortpkgs/ returns zero on normal operation, decimal 100 on error.
+ </RefSect1>
+
+ &manbugs;
+ &manauthor;
+
+</refentry>
diff --git a/doc/apt.conf.5.sgml b/doc/apt.conf.5.sgml
new file mode 100644
index 000000000..da40e3df9
--- /dev/null
+++ b/doc/apt.conf.5.sgml
@@ -0,0 +1,407 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V3.1//EN" [
+
+<!ENTITY % aptent SYSTEM "apt.ent">
+%aptent;
+
+]>
+
+<refentry>
+ &apt-docinfo;
+
+ <refmeta>
+ <refentrytitle>apt.conf</>
+ <manvolnum>5</>
+ </refmeta>
+
+ <!-- Man page title -->
+ <refnamediv>
+ <refname>apt.conf</>
+ <refpurpose>Configuratoin file for APT</>
+ </refnamediv>
+
+ <RefSect1><Title>Description</>
+ <para>
+ <filename/apt.conf/ is the main configuration file for the APT suite of
+ tools, all tools make use of the configuration file and a common command line
+ parser to provide a uniform environment. When an APT tool starts up it will
+ read the configuration specified by the <envar/APT_CONFIG/ environment
+ variable (if any) and then read the files in <literal/Dir::Etc::Parts/
+ then read the main configuration file specified by
+ <literal/Dir::Etc::main/ then finally apply the
+ command line options to override the configuration directives, possibly
+ loading even more config files.
+ <para>
+ The configuration file is organized in a tree with options organized into
+ functional groups. Option specification is given with a double colon
+ notation, for instance <literal/APT::Get::Assume-Yes/ is an option within
+ the APT tool group, for the Get tool. Options do not inherit from their
+ parent groups.
+ <para>
+ Syntacticly the configuration language is modeled after what the ISC tools
+ such as bind and dhcp use. Each line is of the form
+ <literallayout>APT::Get::Assume-Yes "true";</literallayout> The trailing
+ semicolon is required and the quotes are optional. A new scope can be
+ opened with curly braces, like:
+<informalexample><programlisting>
+APT {
+ Get {
+ Assume-Yes "true";
+ Fix-Broken "true";
+ };
+};
+</programlisting></informalexample>
+ with newlines placed to make it more readable. Lists can be created by
+ opening a scope and including a single word enclosed in quotes followed by a
+ semicolon. Multiple entries can be included, each seperated by a semicolon.
+<informalexample><programlisting>
+DPkg::Pre-Install-Pkgs {"/usr/sbin/dpkg-preconfigure --apt";};
+</programlisting></informalexample>
+ <para>
+ In general the sample configuration file in
+ <filename>&docdir;/examples/apt.conf</> &configureindex;
+ is a good guide for how it should look.
+ <para>
+ Two specials are allowed, <literal/#include/ and <literal/#clear/.
+ <literal/#include/ will include the given file, unless the filename
+ ends in a slash, then the whole directory is included.
+ <literal/#clear/ is used to erase a list of names.
+ <para>
+ All of the APT tools take a -o option which allows an arbitary configuration
+ directive to be specified on the command line. The syntax is a full option
+ name (<literal/APT::Get::Assume-Yes/ for instance) followed by an equals
+ sign then the new value of the option. Lists can be appended too by adding
+ a trailing :: to the list name.
+ </RefSect1>
+
+ <RefSect1><Title>The APT Group</>
+ <para>
+ This group of options controls general APT behavoir as well as holding the
+ options for all of the tools.
+
+ <VariableList>
+ <VarListEntry><Term>Architecture</Term>
+ <ListItem><Para>
+ System Architecture; sets the architecture to use when fetching files and
+ parsing package lists. The internal default is the architecture apt was
+ compiled for.
+ </VarListEntry>
+
+ <VarListEntry><Term>Ignore-Hold</Term>
+ <ListItem><Para>
+ Ignore Held packages; This global options causes the problem resolver to
+ ignore held packages in its decision making.
+ </VarListEntry>
+
+ <VarListEntry><Term>Clean-Installed</Term>
+ <ListItem><Para>
+ Defaults to on. When turned on the autoclean feature will remove any pacakge
+ which can no longer be downloaded from the cache. If turned off then
+ packages that are locally installed are also excluded from cleaning - but
+ note that APT provides no direct means to reinstall them.
+ </VarListEntry>
+
+ <VarListEntry><Term>Immediate-Configure</Term>
+ <ListItem><Para>
+ Disable Immedate Configuration; This dangerous option disables some
+ of APT's ordering code to cause it to make fewer dpkg calls. Doing
+ so may be necessary on some extremely slow single user systems but
+ is very dangerous and may cause package install scripts to fail or worse.
+ Use at your own risk.
+ </VarListEntry>
+
+ <VarListEntry><Term>Force-LoopBreak</Term>
+ <ListItem><Para>
+ Never Enable this option unless you -really- know what you are doing. It
+ permits APT to temporarily remove an essential package to break a
+ Conflicts/Conflicts or Conflicts/Pre-Depend loop between two essential
+ packages. SUCH A LOOP SHOULD NEVER EXIST AND IS A GRAVE BUG. This option
+ will work if the essential packages are not tar, gzip, libc, dpkg, bash or
+ anything that those packages depend on.
+ </VarListEntry>
+
+ <VarListEntry><Term>Cache-Limit</Term>
+ <ListItem><Para>
+ APT uses a fixed size memory mapped cache file to store the 'available'
+ information. This sets the size of that cache.
+ </VarListEntry>
+
+ <VarListEntry><Term>Get</Term>
+ <ListItem><Para>
+ The Get subsection controls the &apt-get; tool, please see its
+ documentation for more information about the options here.
+ </VarListEntry>
+
+ <VarListEntry><Term>Cache</Term>
+ <ListItem><Para>
+ The Cache subsection controls the &apt-cache; tool, please see its
+ documentation for more information about the options here.
+ </VarListEntry>
+
+ <VarListEntry><Term>CDROM</Term>
+ <ListItem><Para>
+ The CDROM subsection controls the &apt-cdrom; tool, please see its
+ documentation for more information about the options here.
+ </VarListEntry>
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>The Acquire Group</>
+ <para>
+ The <literal/Acquire/ group of options controls the download of packages
+ and the URI handlers.
+
+ <VariableList>
+ <VarListEntry><Term>Queue-Mode</Term>
+ <ListItem><Para>
+ Queuing mode; <literal/Queue-Mode/ can be one of <literal/host/ or
+ <literal/access/ which determins how APT parallelizes outgoing
+ connections. <literal/host/ means that one connection per target host
+ will be opened, <literal/access/ means that one connection per URI type
+ will be opened.
+ </VarListEntry>
+
+ <VarListEntry><Term>Retries</Term>
+ <ListItem><Para>
+ Number of retries to perform. If this is non-zero APT will retry failed
+ files the given number of times.
+ </VarListEntry>
+
+ <VarListEntry><Term>Source-Symlinks</Term>
+ <ListItem><Para>
+ Use symlinks for source archives. If set to true then source archives will
+ be symlinked when possible instead of copying. True is the default
+ </VarListEntry>
+
+ <VarListEntry><Term>http</Term>
+ <ListItem><Para>
+ HTTP URIs; http::Proxy is the default http proxy to use. It is in the
+ standard form of <literal>http://[[user][:pass]@]host[:port]/</>. Per
+ host proxies can also be specified by using the form
+ <literal/http::Proxy::&lt;host&gt;/ with the special keyword <literal/DIRECT/
+ meaning to use no proxies. The <envar/http_proxy/ environment variable
+ will override all settings.
+ <para>
+ Three settings are provided for cache control with HTTP/1.1 complient
+ proxy caches. <literal/No-Cache/ tells the proxy to not used its cached
+ response under any circumstances, <literal/Max-Age/ is sent only for
+ index files and tells the cache to refresh its object if it is older than
+ the given number of seconds. Debian updates its index files daily so the
+ default is 1 day. <literal/No-Store/ specifies that the cache should never
+ store this request, it is only set for archive files. This may be useful
+ to prevent polluting a proxy cache with very large .deb files. Note:
+ Squid 2.0.2 does not support any of these options.
+ <para>
+ The option <literal/timeout/ sets the timeout timer used by the method,
+ this applies to all things including connection timeout and data timeout.
+ <para>
+ One setting is provided to control the pipeline depth in cases where the
+ remote server is not RFC conforming or buggy (such as Squid 2.0.2)
+ <literal/Acquire::http::Pipeline-Depth/ can be a value from 0 to 5
+ indicating how many outstanding requests APT should send. A value of
+ zero MUST be specified if the remote host does not properly linger
+ on TCP connections - otherwise data corruption will occure. Hosts which
+ require this are in violation of RFC 2068.
+ </VarListEntry>
+
+ <VarListEntry><Term>ftp</Term>
+ <ListItem><Para>
+ FTP URis; ftp::Proxy is the default proxy server to use. It is in the
+ standard form of <literal>ftp://[[user][:pass]@]host[:port]/</> and is
+ overriden by the <envar/ftp_proxy/ environment variable. To use a ftp
+ proxy you will have to set the <literal/ftp::ProxyLogin/ script in the
+ configuration file. This entry specifies the commands to send to tell
+ the proxy server what to connect to. Please see
+ &configureindex; for an example of
+ how to do this. The subsitution variables available are
+ <literal/$(PROXY_USER)/, <literal/$(PROXY_PASS)/, <literal/$(SITE_USER)/,
+ <literal/$(SITE_PASS)/, <literal/$(SITE)/, and <literal/$(SITE_PORT)/.
+ Each is taken from it's respective URI component.
+ <para>
+ The option <literal/timeout/ sets the timeout timer used by the method,
+ this applies to all things including connection timeout and data timeout.
+ <para>
+ Several settings are provided to control passive mode. Generally it is
+ safe to leave passive mode on, it works in nearly every environment.
+ However some situations require that passive mode be disabled and port
+ mode ftp used instead. This can be done globally, for connections that
+ go through a proxy or for a specific host (See the sample config file
+ for examples)
+ <para>
+ It is possible to proxy FTP over HTTP by setting the <envar/ftp_proxy/
+ environment variable to a http url - see the discussion of the http method
+ above for syntax. You cannot set this in the configuration file and it is
+ not recommended to use FTP over HTTP due to its low efficiency.
+ <para>
+ The setting <literal/ForceExtended/ controls the use of RFC2428
+ <literal/EPSV/ and <literal/EPRT/ commands. The defaut is false, which means
+ these commands are only used if the control connection is IPv6. Setting this
+ to true forces their use even on IPv4 connections. Note that most FTP servers
+ do not support RFC2428.
+ </VarListEntry>
+
+ <VarListEntry><Term>cdrom</Term>
+ <ListItem><Para>
+ CDROM URIs; the only setting for CDROM URIs is the mount point,
+ <literal/cdrom::Mount/ which must be the mount point for the CDROM drive
+ as specified in <filename>/etc/fstab</>. It is possible to provide
+ alternate mount and unmount commands if your mount point cannot be listed
+ in the fstab (such as an SMB mount and old mount packages). The syntax
+ is to put <literallayout>"/cdrom/"::Mount "foo";</literallayout> within
+ the cdrom block. It is important to have the trailing slash. Unmount
+ commands can be specified using UMount.
+ </VarListEntry>
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>Directories</>
+ <para>
+ The <literal/Dir::State/ section has directories that pertain to local
+ state information. <literal/lists/ is the directory to place downloaded
+ package lists in and <literal/status/ is the name of the dpkg status file.
+ <literal/preferences/ is the name of the APT preferencse file.
+ <literal/Dir::State/ contains the default directory to prefix on all sub
+ items if they do not start with <filename>/</> or <filename>./</>.
+ <para>
+ <literal/Dir::Cache/ contains locations pertaining to local cache
+ information, such as the two package caches <literal/srcpkgcache/ and
+ <literal/pkgcache/ as well as the location to place downloaded archives,
+ <literal/Dir::Cache::archives/. Generation of caches can be turned off
+ by setting their names to be blank. This will slow down startup but
+ save disk space. It is probably prefered to turn off the pkgcache rather
+ than the srcpkgcache. Like <literal/Dir::State/ the default
+ directory is contained in <literal/Dir::Cache/
+ <para>
+ <literal/Dir::Etc/ contains the location of configuration files,
+ <literal/sourcelist/ gives the location of the sourcelist and
+ <literal/main/ is the default configuration file (setting has no effect,
+ unless it is done from the config file specified by
+ <envar/APT_CONFIG/).
+ <para>
+ The <literal/Dir::Parts/ setting reads in all the config fragments in
+ lexical order from the directory specified. After this is done then the
+ main config file is loaded.
+ <para>
+ Binary programs are pointed to by <literal/Dir::Bin/. <literal/methods/
+ specifies the location of the method handlers and <literal/gzip/,
+ <literal/dpkg/, <literal/apt-get/, <literal/dpkg-source/,
+ <literal/dpkg-buildpackage/ and <literal/apt-cache/ specify the location
+ of the respective programs.
+ </RefSect1>
+
+ <RefSect1><Title>APT in DSelect</>
+ <para>
+ When APT is used as a &dselect; method several configuration directives
+ control the default behavoir. These are in the <literal/DSelect/ section.
+
+ <VariableList>
+ <VarListEntry><Term>Clean</Term>
+ <ListItem><Para>
+ Cache Clean mode; this value may be one of always, auto, prompt and never.
+ always will remove all archives after they have been downloaded while auto
+ will only remove things that are no longer downloadable (replaced with a
+ new version for instance)
+ </VarListEntry>
+
+ <VarListEntry><Term>Options</Term>
+ <ListItem><Para>
+ The contents of this variable is passed to &apt-get; as command line
+ options when it is run for the install phase.
+ </VarListEntry>
+
+ <VarListEntry><Term>UpdateOptions</Term>
+ <ListItem><Para>
+ The contents of this variable is passed to &apt-get; as command line
+ options when it is run for the update phase.
+ </VarListEntry>
+
+ <VarListEntry><Term>PromptAfterUpdate</Term>
+ <ListItem><Para>
+ If true the [U]pdate operation in &dselect; will always prompt to continue.
+ The default is to prompt only on error.
+ </VarListEntry>
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>How APT calls dpkg</>
+ <para>
+ Several configuration directives control how APT invokes &dpkg;. These are
+ in the <literal/DPkg/ section.
+
+ <VariableList>
+ <VarListEntry><Term>Options</Term>
+ <ListItem><Para>
+ This is a list of options to pass to dpkg. The options must be specified
+ using the list notation and each list item is passed as a single arugment
+ to &dpkg;.
+ </VarListEntry>
+
+ <VarListEntry><Term>Pre-Invoke</Term><Term>Post-Invoke</Term>
+ <ListItem><Para>
+ This is a list of shell commands to run before/after invoking &dpkg;.
+ Like <literal/Options/ this must be specified in list notation. The
+ commands are invoked in order using <filename>/bin/sh</>, should any
+ fail APT will abort.
+ </VarListEntry>
+
+ <VarListEntry><Term>Pre-Install-Pkgs</Term>
+ <ListItem><Para>
+ This is a list of shell commands to run before invoking dpkg. Like
+ <literal/Options/ this must be specified in list notation. The commands
+ are invoked in order using <filename>/bin/sh</>, should any fail APT
+ will abort. APT will pass to the commands on standard input the
+ filenames of all .deb files it is going to install, one per line.
+ <para>
+ Version 2 of this protocol dumps more information, including the
+ protocol version, the APT configuration space and the packages, files
+ and versions being changed. Version 2 is enabled by setting
+ <literal/DPkg::Tools::Options::cmd::Version/ to 2. <literal/cmd/ is a
+ command given to <literal/Pre-Install-Pkgs/.
+ </VarListEntry>
+
+ <VarListEntry><Term>Run-Directory</Term>
+ <ListItem><Para>
+ APT chdirs to this directory before invoking dpkg, the default is
+ <filename>/</>.
+ </VarListEntry>
+
+ <VarListEntry><Term>Build-Options</Term>
+ <ListItem><Para>
+ These options are passed to &dpkg-buildpackage; when compiling packages,
+ the default is to disable signing and produce all binaries.
+ </VarListEntry>
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><Title>Debug Options</>
+ <para>
+ Most of the options in the <literal/debug/ section are not interesting to
+ the normal user, however <literal/Debug::pkgProblemResolver/ shows
+ interesting output about the decisions dist-upgrade makes.
+ <literal/Debug::NoLocking/ disables file locking so APT can do some
+ operations as non-root and <literal/Debug::pkgDPkgPM/ will print out the
+ command line for each dpkg invokation. <literal/Debug::IdentCdrom/ will
+ disable the inclusion of statfs data in CDROM IDs.
+ </RefSect1>
+
+ <RefSect1><Title>Examples</>
+ <para>
+ &configureindex; contains a
+ sample configuration file showing the default values for all possible
+ options.
+ </RefSect1>
+
+ <RefSect1><Title>Files</>
+ <para>
+ <filename>/etc/apt/apt.conf</>
+ </RefSect1>
+
+ <RefSect1><Title>See Also</>
+ <para>
+ &apt-cache; &apt-conf;
+ </RefSect1>
+
+ &manbugs;
+ &manauthor;
+
+</refentry>
diff --git a/doc/apt.conf.5.yo b/doc/apt.conf.5.yo
deleted file mode 100644
index d0759802f..000000000
--- a/doc/apt.conf.5.yo
+++ /dev/null
@@ -1,282 +0,0 @@
-mailto(apt@packages.debian.org)
-manpage(apt.conf)(5)(5 Dec 1998)(apt)()
-manpagename(apt.conf)(configuration file for APT)
-
-manpagedescription()
-bf(apt.conf) is the main configuration file for the APT suite of
-tools, all tools make use of the configuration file and a common command line
-parser to provide a uniform environment. When an APT tool starts up it will
-read bf(/etc/apt/apt.conf), then read the configuration specified by the
-bf($APT_CONFIG) environment variable and then finally apply the command line
-options to override the configuration directives, possibly loading more
-config files.
-
-The configuration file is organized in a tree with options organized into
-functional groups. Option specification is given with a double colon
-notation, for instance em(APT::Get::Assume-Yes) is an option within the
-APT tool group, for the Get tool. Options do not inherit from their parent
-groups.
-
-Syntacticly the configuration language is modeled after what the ISC tools
-such as bind and dhcp use. Each line is of the form
-quote(APT::Get::Assume-Yes "true";) The trailing semicolon is required and
-the quotes are optional. A new em(scope) can be opened with curly braces,
-like:
-verb(APT {
- Get {
- Assume-Yes "true";
- Fix-Broken "true";
- };
-};
-)
-with newlines placed to make
-it more readable. Lists can be created by opening a scope an including a
-single word enclosed in quotes followed by a semicolon.
-In general the sample configuration file in
-em(/usr/doc/apt/examples/apt.conf) and
-em(/usr/doc/apt/examples/configure-index)
-is a good guide for how it should look.
-
-All of the APT tools take a -o option which allows an arbitary configuration
-directive to be specified on the command line. The syntax is a full option
-name (APT::Get::Assume-Yes for instance) followed by an equals sign then the
-new value of the option. Lists can be appended too by adding a trailing ::
-to the list name.
-
-manpagesection(The APT Group)
-This group of options controls general APT behavoir as well as holding the
-options for all of the tools.
-
-startdit()
-dit(bf(Architecture))
-System Architecture; sets the architecture to use when fetching files and
-parsing package lists. The internal default is the architecture apt was
-compiled for.
-
-dit(bf(Ignore-Hold))
-Ignore Held packages; This global options causes the problem resolver to
-ignore held packages in its decision making.
-
-dit(bf(Clean-Installed))
-Defaults to on. When turned on the autoclean feature will remove any pacakge
-which can no longer be downloaded from the cache. If turned off then
-packages that are locally installed are also excluded from cleaning - but
-note that APT provides no direct means to reinstall them.
-
-dit(bf(Immediate-Configure))
-Disable Immedate Configuration; This dangerous option disables some
-of APT's ordering code to cause it to make fewer dpkg calls. Doing
-so may be necessary on some extremely slow single user systems but
-is very dangerous and may cause package install scripts to fail or worse.
-Use at your own risk.
-
-dit(bf(Force-LoopBreak))
-Never Enable this option unless you -really- know what you are doing. It
-permits APT to temporarily remove an essential package to break a
-Conflicts/Conflicts or Conflicts/Pre-Depend loop between two essential
-packages. SUCH A LOOP SHOULD NEVER EXIST AND IS A GRAVE BUG. This option will
-work if the essential packages are not tar, gzip, libc, dpkg, bash or
-anything that those packages depend on.
-
-dit(bf(Cache-Limit))
-APT uses a fixed size memory mapped cache file to store the 'available'
-information. This sets the size of that cache.
-
-dit(bf(Get))
-The Get subsection controls the bf(apt-get(8)) tool, please see its
-documentation for more information about the options here.
-
-dit(bf(Cache))
-The Cache subsection controls the bf(apt-cache(8)) tool, please see its
-documentation for more information about the options here.
-
-dit(bf(CDROM))
-The CDROM subsection controls the bf(apt-cdrom(8)) tool, please see its
-documentation for more information about the options here.
-
-enddit()
-
-manpagesection(The Acquire Group)
-The bf(Acquire) group of options controls the download of packages and the
-URI handlers.
-
-startdit()
-dit(bf(Queue-Mode))
-Queuing mode; bf(Queue-Mode) can be one of bf(host) or bf(access) which
-determins how APT parallelizes outgoing connections. bf(host) means that
-one connection per target host will be opened, bf(access) means that one
-connection per URI type will be opened.
-
-dit(bf(Retries))
-Number of retries to perform. If this is non-zero apt will retry failed
-files the given number of times.
-
-dit(bf(Source-Symlinks))
-Use symlinks for source archives. If set to true then source archives will
-be symlinked when possible instead of copying. True is the default
-
-dit(bf(http))
-HTTP URIs; http::Proxy is the default http proxy to use. It is in the standard
-form of em(http://[[user][:pass]@]host[:port]/). Per host proxies can also
-be specified by using the form http::Proxy::<host> with the special keyword
-em(DIRECT) meaning to use no proxies. The em($http_proxy) environment variable
-will override all settings.
-
-Three settings are provided for cache control with HTTP/1.1 complient proxy
-caches. bf(No-Cache) tells the proxy to not used its cached response under
-any circumstances, bf(Max-Age) is sent only for index files and tells the
-cache to refresh its object if it is older than the given number of seconds.
-Debian updates its index files daily so the default is 1 day. bf(No-Store)
-specifies that the cache should never store this request, it is only
-set for archive files. This may be usefull to prevent polluting a proxy cache
-with very large .deb files. Note: Squid 2.0.2 does not support any of
-these options.
-
-The option bf(timeout) sets the timeout timer used by the method, this
-applies to all things including connection timeout and data timeout.
-
-One setting is provided to control the pipeline depth in cases where the
-remote server is not RFC conforming or buggy (such as Squid 2.0.2)
-Acquire::http::Pipeline-Depth can be a value from 0 to 5 indicating how many
-outstanding requests APT should send.
-
-dit(bf(ftp))
-FTP URis; ftp::Proxy is the default proxy server to use. It is in the
-standard form of em(ftp://[[user][:pass]@]host[:port]/) and is overriden
-by the ftp_proxy environment variable. To use a ftp proxy you will have to
-set the ftp::ProxyLogin script in the configuration file. This entry
-specifies the commands to send to tell the proxy server what to connect
-to. Please see em(/usr/doc/apt/examples/configure-index) for an example of how
-to do this. The subsitution variables available are $(PROXY_USER),
-$(PROXY_PASS), $(SITE_USER), $(SITE_PASS), $(SITE), and $(SITE_PORT).
-Each is taken from it's respective URI component.
-
-The option bf(timeout) sets the timeout timer used by the method, this
-applies to all things including connection timeout and data timeout.
-
-Several settings are provided to control passive mode. Generally it is safe
-to leave passive mode on, it works in nearly every environment. However some
-situations require that passive mode be disabled and port mode ftp used
-instead. This can be done globally, for connections that go through a proxy
-or for a specific host (See the sample config file for examples)
-
-
-It is possible to proxy FTP over HTTP by setting the em(ftp_proxy)
-environment variable to a http url - see the discussion of the http method
-above for syntax. You cannot set this in the configuration file and it is
-not recommended to use FTP over HTTP due to its low efficiency.
-
-dit(bf(cdrom))
-CDROM URIs; the only setting for CDROM URIs is the mount point, cdrom::Mount
-which must be the mount point for the CDROM drive as specified in /etc/fstab.
-It is possible to provide alternate mount and unmount commands if your
-mount point cannot be listed in the fstab (such as an SMB mount). The syntax
-is to put "/cdrom/"::Mount "foo"; within the cdrom block. It is important to
-have the trailing slash. Unmount commands can be specified using UMount.
-
-enddit()
-
-manpagesection(Directories)
-The bf(Dir::State) section has directories that pertain to local state
-information. bf(lists) is the directory to place downloaded package lists
-in and bf(status) is the name of the dpkg status file. bf(Dir::State)
-contains the default directory to prefix on all sub items if they do not
-start with em(/) or em(./). bf(xstatus) and bf(userstatus) are for future
-use.
-
-bf(Dir::Cache) contains locations pertaining to local cache information, such
-as the two package caches bf(srcpkgcache) and bf(pkgcache) as well as the
-location to place downloaded archives, bf(Dir::Cache::archives). Like
-bf(Dir::State) the default directory is contained in bf(Dir::Cache)
-
-bf(Dir::Etc) contains the location of configuration files, bf(sourcelist)
-gives the location of the sourcelist and bf(main) is the default configuration
-file (setting has no effect)
-
-Binary programs are pointed to by bf(Dir::Bin). bf(methods) specifies the
-location of the method handlers and bf(gzip), bf(dpkg), bf(apt-get),
-bf(dpkg-source), bf(dpkg-buildpackage) and
-bf(apt-cache) specify the location of the respective programs.
-
-manpagesection(APT in DSelect)
-When APT is used as a bf(dselect(8)) method several configuration directives
-control the default behavoir. These are in the bf(DSelect) section.
-
-startdit()
-dit(bf(Clean))
-Cache Clean mode; this value may be one of always, auto, prompt and never.
-always will remove all archives after they have been downloaded while auto
-will only remove things that are no longer downloadable (replaced with a new
-version for instance)
-
-dit(bf(Options))
-The contents of this variable is passed to bf(apt-get(8)) as command line
-options when it is run for the install phase.
-
-dit(bf(UpdateOptions))
-The contents of this variable is passed to bf(apt-get(8)) as command line
-options when it is run for the update phase.
-
-dit(bf(PromptAfterUpdate))
-If true the [U]pdate operation in dselect will always prompt to continue.
-The default is to prompt only on error.
-enddit()
-
-manpagesection(How APT calls DPkg)
-Several configuration directives control how APT invokes dpkg. These are in
-the bf(DPkg) section.
-
-startdit()
-dit(bf(Options))
-This is a list of options to pass to dpkg. The options must be specified
-using the list notation and each list item is passed as a single arugment
-to dpkg.
-
-dit(bf(Pre-Invoke), bf(Post-Invoke))
-This is a list of shell commands to run before/after invoking dpkg. Like
-bf(Options) this must be specified in list notation. The commands
-are invoked in order using /bin/sh, should any fail APT will abort.
-
-dit(bf(Pre-Install-Pkgs))
-This is a list of shell commands to run before invoking dpkg. Like
-bf(Options) this must be specified in list notation. The commands
-are invoked in order using /bin/sh, should any fail APT will abort.
-Apt will pass to the commands on standard input the filenames of all
-.deb files it is going to install, one per line.
-
-dit(bf(Run-Directory))
-APT chdirs to this directory before invoking dpkg, the default is /.
-
-dit(bf(Build-Options))
-These options are passed to dpkg-buildpackage when compiling packages,
-the default is to disable signing and produce all binaries.
-
-enddit()
-
-manpagesection(Debug Options)
-Most of the options in the bf(debug) section are not interesting to the
-normal user, however bf(Debug::pkgProblemResolver) shows interesting
-output about the decisions dist-upgrade makes. bf(Debug::NoLocking)
-disables file locking so apt can do some operations as non-root and
-bf(Debug::pkgDPkgPM) will print out the command line for each dpkg
-invokation. bf(Debug::IdentCdrom) will disable the inclusion of statfs
-data in CDROM IDs.
-
-manpagesection(EXAMPLES)
-bf(/usr/doc/apt/examples/configure-index.gz) contains a sample configuration
-file showing the default values for all possible options.
-
-manpagesection(FILES)
-/etc/apt/apt.conf
-
-manpageseealso()
-apt-cache (8),
-apt-get (8)
-
-manpagebugs()
-See http://bugs.debian.org/apt. If you wish to report a
-bug in bf(apt-get), please see bf(/usr/doc/debian/bug-reporting.txt)
-or the bf(bug(1)) command.
-
-manpageauthor()
-apt-get was written by the APT team <apt@packages.debian.org>.
diff --git a/doc/apt.ent b/doc/apt.ent
new file mode 100644
index 000000000..fb1aa3c91
--- /dev/null
+++ b/doc/apt.ent
@@ -0,0 +1,159 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+
+<!-- Some common paths.. -->
+<!ENTITY docdir "/usr/share/doc/apt/">
+<!ENTITY configureindex "<filename>&docdir;/examples/configure-index</>">
+<!ENTITY aptconfdir "<filename>/etc/apt.conf</>">
+<!ENTITY statedir "/var/lib/apt">
+<!ENTITY cachedir "/var/cache/apt">
+
+<!-- Cross references to other man pages -->
+<!ENTITY apt-conf "
+ <CiteRefEntry>
+ <RefEntryTitle><filename/apt.conf/</RefEntryTitle>
+ <ManVolNum/5/
+ </CiteRefEntry>
+">
+
+<!ENTITY apt-get "
+ <CiteRefEntry>
+ <RefEntryTitle><command/apt-get/</RefEntryTitle>
+ <ManVolNum/8/
+ </CiteRefEntry>
+">
+
+<!ENTITY apt-cdrom "
+ <CiteRefEntry>
+ <RefEntryTitle><command/apt-cdrom/</RefEntryTitle>
+ <ManVolNum/8/
+ </CiteRefEntry>
+">
+
+<!ENTITY apt-cache "
+ <CiteRefEntry>
+ <RefEntryTitle><command/apt-cache/</RefEntryTitle>
+ <ManVolNum/8/
+ </CiteRefEntry>
+">
+
+<!ENTITY sources-list "
+ <CiteRefEntry>
+ <RefEntryTitle><filename/sources.list/</RefEntryTitle>
+ <ManVolNum/5/
+ </CiteRefEntry>
+">
+
+<!ENTITY bug "
+ <CiteRefEntry>
+ <RefEntryTitle><command/bug/</RefEntryTitle>
+ <ManVolNum/1/
+ </CiteRefEntry>
+">
+
+<!ENTITY dpkg "
+ <CiteRefEntry>
+ <RefEntryTitle><command/dpkg/</RefEntryTitle>
+ <ManVolNum/8/
+ </CiteRefEntry>
+">
+
+<!ENTITY dpkg-buildpackage "
+ <CiteRefEntry>
+ <RefEntryTitle><command/dpkg-buildpackage/</RefEntryTitle>
+ <ManVolNum/1/
+ </CiteRefEntry>
+">
+
+<!ENTITY gzip "
+ <CiteRefEntry>
+ <RefEntryTitle><command/gzip/</RefEntryTitle>
+ <ManVolNum/1/
+ </CiteRefEntry>
+">
+
+<!ENTITY dpkg-scanpackages "
+ <CiteRefEntry>
+ <RefEntryTitle><command/dpkg-scanpackages/</RefEntryTitle>
+ <ManVolNum/8/
+ </CiteRefEntry>
+">
+
+<!ENTITY dpkg-scansources "
+ <CiteRefEntry>
+ <RefEntryTitle><command/dpkg-scansources/</RefEntryTitle>
+ <ManVolNum/8/
+ </CiteRefEntry>
+">
+
+<!ENTITY dselect "
+ <CiteRefEntry>
+ <RefEntryTitle><command/dselect/</RefEntryTitle>
+ <ManVolNum/8/
+ </CiteRefEntry>
+">
+
+<!-- Boiler plate docinfo section -->
+<!ENTITY apt-docinfo "
+ <docinfo>
+ <address><email>apt@packages.debian.org</></address>
+ <author><firstname>Jason</> <surname>Gunthorpe</></>
+ <copyright><year>1998-2000</> <holder>Jason Gunthorpe</></>
+ <date>20 September 2000</>
+ </docinfo>
+">
+
+<!-- Boiler plate Bug reporting section -->
+<!ENTITY manbugs "
+ <RefSect1><Title>Bugs</>
+ <para>
+ See the <ulink url='http://bugs.debian.org/apt'>APT bug page</>.
+ If you wish to report a bug in APT, please see
+ <filename>/usr/doc/debian/bug-reporting.txt</> or the &bug; command.
+ </RefSect1>
+">
+
+<!-- Boiler plate Author section -->
+<!ENTITY manauthor "
+ <RefSect1><Title>Author</>
+ <para>
+ APT was written by the APT team <email>apt@packages.debian.org</>.
+ </RefSect1>
+">
+
+<!-- Should be used within the option section of the text to
+ put in the blurb about -h, -v, -c and -o -->
+<!ENTITY apt-commonoptions "
+ <VarListEntry><term><option/-h/</><term><option/--help/</>
+ <ListItem><Para>
+ Show a short usage summary.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-v/</><term><option/--version/</>
+ <ListItem><Para>
+ Show the program verison.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-c/</><term><option/--config-file/</>
+ <ListItem><Para>
+ Configuration File; Specify a configuration file to use.
+ The program will read the default configuration file and then this
+ configuration file. See &apt-conf; for syntax information.
+ </VarListEntry>
+
+ <VarListEntry><term><option/-o/</><term><option/--option/</>
+ <ListItem><Para>
+ Set a Configuration Option; This will set an arbitary configuration
+ option. The syntax is <option>-o Foo::Bar=bar</>.
+ </VarListEntry>
+">
+
+<!-- Should be used within the option section of the text to
+ put in the blurb about -h, -v, -c and -o -->
+<!ENTITY apt-cmdblurb "
+ <para>
+ All command line options may be set using the configuration file, the
+ descriptions indicate the configuration option to set. For boolean
+ options you can override the config file by using something like
+ <option/-f-/,<option/--no-f/, <option/-f=no/ or several other variations.
+ </para>
+">
diff --git a/doc/apt_preferences.5.sgml b/doc/apt_preferences.5.sgml
new file mode 100644
index 000000000..fdac01d37
--- /dev/null
+++ b/doc/apt_preferences.5.sgml
@@ -0,0 +1,227 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V3.1//EN" [
+
+<!ENTITY % aptent SYSTEM "apt.ent">
+%aptent;
+
+]>
+
+<refentry>
+ &apt-docinfo;
+
+ <refmeta>
+ <refentrytitle>apt_preferences</>
+ <manvolnum>5</>
+ </refmeta>
+
+ <!-- Man page title -->
+ <refnamediv>
+ <refname>apt_preferences</>
+ <refpurpose>Preference control file for APT</>
+ </refnamediv>
+
+ <RefSect1><Title>Description</>
+ <para>
+ The APT preferences file controls various aspects of the APT system.
+ It is ment to be user editable and manipulatable from software. The file
+ consists of a number of records formed like the dpkg status file, space
+ seperated sections of text with at the start of each line tags seperated
+ by a colon. It is stored in <filename>/etc/apt/preferences</>.
+ </RefSect1>
+
+ <RefSect1><Title>Versioning</>
+ <para>
+ One purpose of the preferences file is to let the user select which version
+ of a package will be installed. This selection can be made in a number of
+ ways that fall into three categories, version, release and origin.
+ <para>
+ Selection by version can be done by exact match or prefix match. The format
+ is <literal/2.1.2/ or <literal/2.2*/ for a prefix match. Matching by prefix
+ can be used to ignore the <literal/r/ in the Debian release versioning, like
+ <literal/2.1r*/ or to ignore Debian specific revisions, <literal/1.1-*/.
+ When matching versions with a prefix the highest matching version will
+ always be picked.
+ <para>
+ Selection by release is more complicated and has three forms. The primary
+ purpose of release selections is to identify a set of packages that match
+ a specific vendor, or release (ie Debian 2.1). The first two forms are
+ shortcuts intended for quick command line use. If the first character of the
+ specification is a digit then it is considered to be a release version match,
+ otherwise a release label match. Specifications which contain equals are
+ full release data matches and are a comma seperated list of one letter keys
+ followed by an equals then by the string. Examples:
+<informalexample><programlisting>
+v=2.1*,o=Debian,c=main
+l=Debian
+a=stable
+</programlisting></informalexample>
+ <para>
+ The data for these matches are taken from the <filename/Release/ files
+ that APT downloads during an <literal/update/. The available keys are:
+ <VariableList>
+ <VarListEntry><term>a= Archive</term>
+ <ListItem><Para>
+ This is the common name we give our archives, such as <literal/stable/ or
+ <literal/unstable/. The special name <literal/now/ is used to designate
+ the set of packages that are currently installed.
+ </VarListEntry>
+
+ <VarListEntry><term>c= Component</term>
+ <ListItem><Para>
+ Referes to the sub-component of the archive, <literal/main/,
+ <literal/contrib/ etc. Component may be omitted if there are no
+ components for this archive.
+ </VarListEntry>
+
+ <VarListEntry><term>v= Version</term>
+ <ListItem><Para>
+ This is a version string with the same properties as in the Packages file.
+ It represents the release level of the archive. Typical Debian release
+ numbers look like <literal/2.1r2/ with the r designating the release of
+ 2.1. New releases are limited to security updates.
+ </VarListEntry>
+
+ <VarListEntry><term>o= Origin</term>
+ <ListItem><Para>
+ This specifies who is providing this archive. In the case of Debian the
+ string will read <literal/Debian/. Other providers may use their own
+ string.
+ </VarListEntry>
+
+ <VarListEntry><term><term>l= Label</term>
+ <ListItem><Para>
+ This carries the encompassing name of the distribution. For Debian proper
+ this field reads <literal/Debian/. For derived distributions it should
+ contain their proper name.
+ </VarListEntry>
+ </VariableList>
+ <para>
+ The final selection method is by origin. This is simply the site name
+ of the originating package files. The empty string is used for file URIs.
+ <para>
+ Version selection, particularly the latter two methods, are used in may
+ different part of APT, not just the preferences file.
+ </RefSect1>
+
+ <RefSect1><Title>Candidate Version Policy</>
+ <para>
+ Interaly APT maintains a list of all available versions for all packages.
+ If you place multiple releases or vendors in your &sources-list; file then
+ these features are available. By default APT selects the highest version
+ from all automatic sources. Some sources, such as
+ <filename>project/experimental</> are marked Not Automatic - these fall
+ to the bottom of the selection pile.
+ <para>
+ When deciding what version to use APT assigns a priority to each available
+ version of the package. It then does two things, first it selects
+ the highest priorty version that is newer than the installed version of the
+ package, then it selects the highest priority version that is older than
+ the installed version. Next, if the older versions have a priority greater
+ than 1000 they are compared with the priority of the upgrade set, the larger
+ becomes the selected result. Otherwise the downgrade versions are ignored
+ and the highest priority of the ugprade set is selected.
+ <para>
+ It is possible to think of the priorities in strata:
+ <VariableList>
+ <VarListEntry><term>1000 and up</term>
+ <ListItem><Para>
+ Downgradable priorities
+ </VarListEntry>
+
+ <VarListEntry><term>1000</term>
+ <ListItem><Para>
+ The downgrade prevention barrier
+ </VarListEntry>
+
+ <VarListEntry><term>100 to 1000</term>
+ <ListItem><Para>
+ Standard priorities. 990 is the priority set by the
+ <option/--target-release / &apt-get; option. 989 is the start for auto
+ priorities and 500 are all the default package files.
+ </VarListEntry>
+
+ <VarListEntry><term>100</term>
+ <ListItem><Para>
+ The currently installed version
+ </VarListEntry>
+
+ <VarListEntry><term>0 to 100</term>
+ <ListItem><Para>
+ Non automatic priorities. These are only used if the package
+ is not installed and there is no other version available.
+ </VarListEntry>
+
+ <VarListEntry><term>less than 0</term>
+ <ListItem><Para>
+ The version is never selected.
+ </VarListEntry>
+ </VariableList>
+ <para>
+ Giving a pin a priority greater than 1000 will allow APT to downgrade
+ in order to get to that version.
+ <para>
+ Each package may be pinned to a specific version and each Package file
+ has a priority for every package inside. The highest priority assigned
+ to a package is the one that is used.
+ <para>
+ A package pin looks like this:
+<informalexample><programlisting>
+Package: apt
+Pin: version 0.4.0
+Pin-Priority: 1001
+</programlisting></informalexample>
+ The first line specifies the package, the second gives the Pin specification
+ and the last gives the priority of this pin. The first word of the pin
+ specification may be version, release or origin, the remainder of the field
+ is described in the Versioning sectin above.
+ <para>
+ A default pin is how the priorities of package files are set. Any number
+ of default pins may be specified, the first matching default will select
+ the priority of the package file. Only release or origin may be used in
+ the Pin specification since they match Package files.
+<informalexample><programlisting>
+Package: *
+Pin: release v=2.1*
+Pin-Priority: 998
+</programlisting></informalexample>
+ <para>
+ If the Pin-Priorty field is omitted then the priority defaults to 989 for
+ both cases.
+
+ <RefSect2><title>Interesting Effects</>
+ <para>
+ Due to the downgrade prevention barrier at priority 1000 it is possible
+ that a lower priority version will be selected if the higher priority
+ would casue a downgrade. For instance, if package foo has versions
+ <literal/1.2/, <literal/1.1/ and <literal/1.0/ installed, with
+ <literal/1.1/ being the currently installed version and the priorities of
+ each version being 900, 100 and 950 repectively the winning version will be
+ <literal/1.2/.
+ <para>
+ In practice this is often desired. A user may use a default pin to
+ make the stable distribution the default and then use the
+ <option/--target-dist/ option with &apt-get; to select newer versions
+ from unstable. The packages that have been upgraded to unstable will
+ continue to follow the versions that are available in unstable since
+ the stable versions now fall below the downgrade prevention barrier.
+ <para>
+ If this is not desired then a default pin should be used to make unstable
+ have a priority less than 100.
+ <para>
+ Users of 3rd party add ons such as Helix GNOME can use this mechanism to
+ force the usage of Helix packages, or force the usage of Debian packages
+ by setting the priority of that source sufficiently high. It is even
+ possible to mass downgrade from one set of packages to another by
+ using a priority larger than 1000.
+ </RefSect2>
+ </RefSect1>
+
+ <RefSect1><Title>See Also</>
+ <para>
+ &apt-cache; &apt-conf;
+ </RefSect1>
+
+ &manbugs;
+ &manauthor;
+
+</refentry>
diff --git a/doc/cache.sgml b/doc/cache.sgml
index 6c2307579..7d2334761 100644
--- a/doc/cache.sgml
+++ b/doc/cache.sgml
@@ -4,7 +4,7 @@
<title>APT Cache File Format</title>
<author>Jason Gunthorpe <email>jgg@debian.org</email></author>
-<version>$Id: cache.sgml,v 1.7 1999/05/23 22:55:55 jgg Exp $</version>
+<version>$Id: cache.sgml,v 1.8 2001/02/20 07:03:17 jgg Exp $</version>
<abstract>
This document describes the complete implementation and format of the APT
@@ -140,11 +140,13 @@ This is the first item in the file.
unsigned long VersionCount;
unsigned long DependsCount;
unsigned long PackageFileCount;
- unsigned long MaxVerFileSize;
// Offsets
unsigned long FileList; // PackageFile
unsigned long StringList; // StringItem
+ unsigned long VerSysName; // StringTable
+ unsigned long Architecture; // StringTable
+ unsigned long MaxVerFileSize;
// Allocation pools
struct
@@ -155,7 +157,7 @@ This is the first item in the file.
} Pools[7];
// Package name lookup
- unsigned long HashTable[512]; // Package
+ unsigned long HashTable[2*1024]; // Package
};
</example>
<taglist>
@@ -191,9 +193,15 @@ the client should refuse the load the file.
<tag>DependsCount
<tag>PackageFileCount<item>
These indicate the number of each structure contianed in the cache.
-PackageCount is especially usefull for generating user state structures.
+PackageCount is especially useful for generating user state structures.
See Package::Id for more info.
+<tag>VerSysName<item>
+String representing the versiong system used for this cache
+
+<tag>Architecture<item>
+Architecture the cache was built against.
+
<tag>MaxVerFileSize<item>
The maximum size of a raw entry from the original Package file
(ie VerFile::Size) is stored here.
@@ -252,9 +260,7 @@ the Header->HashTable.
// Pointers
unsigned long Name; // Stringtable
unsigned long VersionList; // Version
- unsigned long TargetVer; // Version
unsigned long CurrentVer; // Version
- unsigned long TargetDist; // StringTable (StringItem)
unsigned long Section; // StringTable (StringItem)
// Linked lists
@@ -286,17 +292,10 @@ package. In this way multiple versions of a package can be cleanly handled
by the system. Furthermore, this linked list is guarenteed to be sorted
from Highest version to lowest version with no duplicate entries.
-<tag>TargetVer
<tag>CurrentVer<item>
-This is an index (pointer) to the sub version that is being targeted for
-upgrading. CurrentVer is an index to the installed version, either can be
+CurrentVer is an index to the installed version, either can be
0.
-<tag>TargetDist<item>
-This indicates the target distribution. Automatic upgrades should not go
-outside of the specified dist. If it is 0 then the global target dist should
-be used. The string should be contained in the StringItem list.
-
<tag>Section<item>
This indicates the deduced section. It should be "Unknown" or the section
of the last parsed item.
@@ -334,7 +333,7 @@ status file emitter uses this to track which packages have been emitted
already.
<tag>Flags<item>
-Flags are some usefull indicators of the package's state.
+Flags are some useful indicators of the package's state.
</taglist>
@@ -357,6 +356,8 @@ Header.FileList
unsigned long Origin; // Stringtable
unsigned long Label; // Stringtable
unsigned long Architecture; // Stringtable
+ unsigned long Site; // Stringtable
+ unsigned long IndexType; // Stringtable
unsigned long Size;
// Linked list
@@ -381,6 +382,12 @@ Refers the the physical disk file that this PacakgeFile represents.
This is the release information. Please see the files document for a
description of what the release information means.
+<tag>Site<item>
+The site the index file was fetched from.
+
+<tag>IndexType<item>
+A string indicating what sort of index file this is.
+
<tag>Size<item>
Size is provided as a simple check to ensure that the package file has not
been altered.
@@ -622,7 +629,7 @@ this version.
<sect>StringItem
<p>
StringItem is used for generating single instances of strings. Some things
-like Section Name are are usefull to have as unique tags. It is part of
+like Section Name are are useful to have as unique tags. It is part of
a linked list based at Header::StringList.
<example>
struct StringItem
diff --git a/doc/examples/configure-index b/doc/examples/configure-index
index 30ab29219..78171c9ba 100644
--- a/doc/examples/configure-index
+++ b/doc/examples/configure-index
@@ -1,4 +1,4 @@
-// $Id: configure-index,v 1.2 2000/05/13 01:52:59 jgg Exp $
+// $Id: configure-index,v 1.3 2001/02/20 07:03:17 jgg Exp $
/* This file is an index of all APT configuration directives. It should
NOT actually be used as a real config file, though it is a completely
valid file. Most of the options have sane default values, unless
@@ -32,15 +32,15 @@ APT
Fix-Broken "false";
Fix-Missing "false";
Show-Upgraded "false";
- No-Upgrade "false";
+ Upgrade "true";
Print-URIs "false";
Compile "false";
- No-Download "false";
+ Download "true";
Purge "false";
List-Cleanup "true";
ReInstall "false";
Trivial-Only "false";
- No-Remove "false";
+ Remove "true";
};
Cache
@@ -48,6 +48,7 @@ APT
Important "false";
AllVersions "false";
GivenOnly "false";
+ RecruseDepends "false";
};
CDROM
@@ -125,10 +126,10 @@ Acquire
};
// Directory layout
-Dir
+Dir "/"
{
// Location of the state dir
- State "/var/state/apt/"
+ State "var/lib/apt/"
{
lists "lists/";
xstatus "xstatus";
@@ -138,16 +139,17 @@ Dir
};
// Location of the cache dir
- Cache "/var/cache/apt/" {
+ Cache "var/cache/apt/" {
archives "archives/";
srcpkgcache "srcpkgcache.bin";
pkgcache "pkgcache.bin";
};
// Config files
- Etc "/etc/apt/" {
+ Etc "etc/apt/" {
sourcelist "sources.list";
main "apt.conf";
+ preferences "preferences";
};
// Locations of binaries
@@ -169,6 +171,7 @@ DSelect
Options "-f";
UpdateOptions "";
PromptAfterUpdate "no";
+ CheckDir "no";
}
DPkg
@@ -201,6 +204,7 @@ Debug
pkgAcquire "false";
pkgAcquire::Worker "false";
pkgDPkgPM "false";
+ pkgOrderList "false";
pkgInitialize "false"; // This one will dump the configuration space
NoLocking "false";
diff --git a/doc/examples/ftp-archive.conf b/doc/examples/ftp-archive.conf
new file mode 100644
index 000000000..a1866ba2d
--- /dev/null
+++ b/doc/examples/ftp-archive.conf
@@ -0,0 +1,81 @@
+/* This configuration file describes the standard Debian distribution
+ as it once looked */
+
+Dir
+{
+ ArchiveDir "/org/ftp.debian.org/ftp/";
+ OverrideDir "/org/ftp.debian.org/scripts/override/";
+ CacheDir "/org/ftp.debian.org/scripts/cache/";
+};
+
+Default
+{
+ Packages::Compress ". gzip";
+ Sources::Compress "gzip";
+ Contents::Compress "gzip";
+ DeLinkLimit 10000; // 10 Meg delink per day
+ MaxContentsChage 10000; // 10 Meg of new contents files per day
+};
+
+TreeDefault
+{
+ Contents::Header "/org/ftp.debian.org/scripts/masterfiles/Contents.top";
+ BinCacheDB "packages-$(ARCH).db";
+
+ // These are all defaults and are provided for completeness
+ Directory "$(DIST)/$(SECTION)/binary-$(ARCH)/";
+ Packages "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages";
+
+ SrcDirectory "$(DIST)/$(SECTION)/source/";
+ Sources "$(DIST)/$(SECTION)/source/Sources";
+
+ Contents "$(DIST)/Contents-$(ARCH)";
+};
+
+tree "dists/woody"
+{
+ Sections "main contrib non-free";
+ Architectures "alpha arm hurd-i386 i386 m68k powerpc sparc sparc64 source";
+ BinOverride "override.woody.$(SECTION)";
+ SrcOverride "override.woody.$(SECTION).src";
+};
+
+tree "dists/potato"
+{
+ Sections "main contrib non-free";
+ Architectures "alpha arm i386 m68k powerpc sparc source";
+ BinOverride "override.potato.$(SECTION)";
+ SrcOverride "override.woody.$(SECTION).src";
+};
+
+tree "dists/slink"
+{
+ Sections "main contrib non-free";
+ Architectures "alpha i386 m68k sparc source";
+ BinOverride "override.slink.$(SECTION)";
+ SrcOverride "override.woody.$(SECTION).src";
+ External-Links false; // Slink should contain no links outside itself
+};
+
+
+bindirectory "project/experimental"
+{
+ Sources "project/experimental/Sources";
+ Packages "project/experimental/Packages";
+
+ BinOverride "override.experimental";
+ BinCacheDB "packages-experimental.db";
+ SrcOverride "override.experimental.src";
+};
+
+bindirectory "dists/proposed-updates"
+{
+ Packages "project/proposed-updates/Packages";
+ Contents "project/proposed-updates/Contents";
+
+ BinOverride "override.slink.all3";
+ BinOverride "override.slink.all3.src";
+ BinCacheDB "packages-proposed-updates.db";
+};
+
+
diff --git a/doc/examples/sources.list b/doc/examples/sources.list
index ed5ad75ea..9f2343277 100644
--- a/doc/examples/sources.list
+++ b/doc/examples/sources.list
@@ -7,4 +7,4 @@ deb http://security.debian.org stable/updates main contrib non-free
# Uncomment if you want the apt-get source function to work
#deb-src http://http.us.debian.org/debian stable main contrib non-free
-#deb-src http://non-us.debian.org/debian-non-US stable non-US
+#deb-src http://non-us.debian.org/debian-non-US stable/non-US main contrib non-free
diff --git a/doc/files.sgml b/doc/files.sgml
index 2b68cf9bc..6a9f3ed27 100644
--- a/doc/files.sgml
+++ b/doc/files.sgml
@@ -4,7 +4,7 @@
<title>APT Files</title>
<author>Jason Gunthorpe <email>jgg@debian.org</email></author>
-<version>$Id: files.sgml,v 1.7 1999/02/15 06:38:03 jgg Exp $</version>
+<version>$Id: files.sgml,v 1.8 2001/02/20 07:03:17 jgg Exp $</version>
<abstract>
This document describes the complete implementation and format of the
@@ -41,7 +41,7 @@ multiple package files.
<p>
The var directory structure is as follows:
<example>
- /var/state/apt/
+ /var/lib/apt/
lists/
partial/
xstatus
@@ -66,7 +66,7 @@ The var directory structure is as follows:
</example>
<p>
-As is specified in the FHS 2.0 /var/state/apt is used for application
+As is specified in the FHS 2.1 /var/lib/apt is used for application
data that is not expected to be user modified. /var/cache/apt is used
for regeneratable data and is where the package cache and downloaded .debs
go.
@@ -156,7 +156,7 @@ URIs in the source list support a large number of access schemes.
<tag>file<item>
The file scheme allows an arbitary directory in the file system to be
- considered as a debian archive. This is usefull for NFS mounts and
+ considered as a debian archive. This is useful for NFS mounts and
local mirrors/archives.
<example>
file:/var/debian
@@ -183,10 +183,10 @@ from the ascii character set. Examples:
<example>
http://www.debian.org/archive/dists/stable/binary-i386/Packages
-/var/state/apt/lists/www.debian.org_archive_dists_stable_binary-i386_Packages
+/var/lib/apt/lists/www.debian.org_archive_dists_stable_binary-i386_Packages
cdrom:Debian 1.3/debian/Packages
-/var/state/apt/info/Debian%201.3_debian_Packages
+/var/lib/apt/info/Debian%201.3_debian_Packages
</example>
<p>
diff --git a/doc/guide.it.sgml b/doc/guide.it.sgml
new file mode 100644
index 000000000..e251fe053
--- /dev/null
+++ b/doc/guide.it.sgml
@@ -0,0 +1,585 @@
+<!doctype debiandoc system>
+<!-- -*- mode: sgml; mode: fold -*- -->
+<book>
+<title>Guida dell'utente di APT</title>
+
+<author>Jason Gunthorpe <email>jgg@debian.org</email></author>
+<author>Traduzione di Eugenia Franzoni <email>eugenia@linuxcare.com</email>
+</author>
+<version>$Id: guide.it.sgml,v 1.2 2001/02/20 07:03:17 jgg Exp $</version>
+
+<abstract>
+Guida per l'uso del gestore di pacchetti APT.
+</abstract>
+
+<copyright>
+Copyright &copy; Jason Gunthorpe, 1998.
+
+<p>Ed. italiana Copyright &copy; Eugenia Franzoni, 2000.
+
+<p>
+"APT" e questo documento sono software libero, e li si può ridistribuire
+e/o modificare secondo i termini della Licenza Pubblica Generica GNU
+(GPL), pubblicata dalla Free Software Foundation, nella versione 2 o
+(se preferite) qualsiasi versione successiva.
+
+<p>"APT" and this document are free software; you can redistribute them and/or
+modify them under the terms of the GNU General Public License as published
+by the Free Software Foundation; either version 2 of the License, or (at your
+option) any later version.
+
+<p>
+Per ulteriori dettagli sui sistemi GNU/Linux si veda il testo
+completo della licenza nel file
+/usr/doc/copyright/GPL.
+</copyright>
+
+<toc sect>
+
+<!-- General {{{ -->
+<!-- ===================================================================== -->
+<chapt>Descrizione generale
+
+<p>
+Il pacchetto APT al momento contiene due sezioni, il metodo APT
+<prgn>dselect</> e l'interfaccia utente a linea di comando <prgn>apt-get</>;
+entrambi danno modo di installare e rimuovere pacchetti, e di scaricarne
+altri da Internet.
+
+<sect>Anatomia del sistema di pacchettizzazione
+<p>
+Il sistema di pacchettizzazione di Debian contiene un gran numero di
+informazioni associate a ciascun pacchetto, per assicurarsi che si
+integri facilmente ed in maniera pulita nel sistema; la più
+importante di esse è il sistema di dipendenze.
+
+<p>
+Il sistema di dipendenze permette ai singoli programmi di fare uso
+degli elementi condivisi del sistema, quali le librerie; per
+ridurre il numero di elementi che l'utente medio debba installare,
+le porzioni di programmi che non vengono usate spesso vengono poste
+in pacchetti separati. Inoltre, è possibile avere più di una scelta per
+cose quali i programmi di posta elettronica, i server X e così via.
+
+<p>
+Il primo passo per capire il sistema di dipendenze è la
+comprensione del concetto di dipendenza semplice: un pacchetto richiede
+che un altro sia installato insieme ad esso per poter
+funzionare.
+
+
+<p>
+Ad esempio, mail-crypt è un'estensione di emacs che aiuta a criptare le
+mail con PGP. Se PGP non è installato, mail-crypt è inutile, quindi
+mail-crypt ha una dipendenza semplice da PGP. Inoltre, dato che si tratta
+di un'estensione di emacs, mail-crypt dipende anche da emacs, senza il
+quale è totalmente inutile.
+
+<p>
+L'altro tipo di dipendenza importante da capire è la dipendenza di
+conflitto; con questa, un pacchetto che venga installato insieme ad un
+altro pacchetto non funziona, e si hanno seri problemi al sistema.
+Come esempio, si consideri un programma di trasporto della posta,
+quale sendmail, exim o qmail: non è possibile averne due contemporaneamente,
+perché entrambi hanno bisogno di restare in ascolto sulla stessa porta di rete
+per ricevere la posta. Tentare di installarne due danneggerebbe seriamente il
+sistema, quindi ciascun programma di trasporto della posta ha una
+dipendenza di conflitto con tutti gli altri.
+
+<p>
+Come ulteriore complicazione, c'è la possibilità che un pacchetto
+voglia prendere il posto di un altro; ad esempio, exim e sendmail per molte
+cose sono identici, dato che entrambi gestiscono la posta e comprendono
+un'interfaccia comune, quindi il sistema di pacchettizzazione deve dichiarare
+che sono entrambi agenti di trasporto della posta, e che gli altri
+pacchetti a cui serve uno dei due devono dipendere da un pacchetto
+fittizio agente-di-trasporto-della-posta. Quando si modificano
+a mano i pacchetti, questo può portare a moltissima confusione.
+
+<p>
+In ciascun momento una singola dipendenza può essere soddisfatta o meno
+dai pacchetti già installati; APT cerca di risolvere i problemi
+di dipendenze con un buon numero di algoritmi automatici, che aiutano
+a selezionare i pacchetti da installare.
+</sect>
+
+</chapt>
+ <!-- }}} -->
+<!-- apt-get {{{ -->
+<!-- ===================================================================== -->
+<chapt>apt-get
+
+<p>
+<prgn>apt-get</> fornisce un modo semplice di installare i pacchetti dalla
+linea di comando. Diversamente da <prgn>dpkg</>, <prgn>apt-get</> non
+capisce i nomi dei file .deb, ma utilizza il vero nome dei pacchetti,
+e può installare archivi .deb solo da una fonte.
+
+<p>
+La prima <footnote>Se state usando un proxy server http, dovete prima ancora
+impostare la variabile d'ambiente http_proxy; vedere
+sources.list(5).</footnote>
+cosa da fare prima di usare <prgn>apt-get</> è impostare l'elenco dei
+pacchetti dalle fonti in modo che il programma sappia quali pacchetti
+sono disponibili. Lo si fa con <tt>apt-get update</>. Ad esempio,
+
+<p>
+<example>
+# apt-get update
+Get http://ftp.de.debian.org/debian-non-US/ stable/binary-i386/ Packages
+Get http://llug.sep.bnl.gov/debian/ frozen/contrib Packages
+Reading Package Lists... Done
+Building Dependency Tree... Done
+</example>
+
+<p>
+Dopo aver aggiornato l'elenco si possono usare molti comandi:
+<taglist>
+<tag>upgrade<item>
+Upgrade tenterà di fare un aggiornamento indolore del sistema completo,
+senza installare nuovi pacchetti o rimuoverne di esistenti, e senza
+aggiornare un pacchetto che possa rovinarne altri. Upgrade farà un elenco
+di tutti i pacchetti che non avrà potuto aggiornare, cosa che in genere
+significa che questi dipendono da nuovi pacchetti o vanno in conflitto
+con altri. Per forzare la loro installazione si può usare
+<prgn>dselect</> o <tt>apt-get install</>.
+
+<tag>install<item>
+Install viene usato per installare i singoli pacchetti dando il loro nome.
+Il pacchetto viene automaticamente scaricato ed installato, cosa molto utile
+se già se ne conosce il nome e non si vuole entrare in grafica per
+selezionarlo. Al comando si possono passare anche più pacchetti, che saranno
+tutti scaricati. L'installazione automatica cerca di risolvere i problemi
+di dipendenze con gli altri pacchetti elencati, stampa un riassunto e
+chiede conferma se si devono modificare altri pacchetti che non siano quelli
+sulla linea di comando.
+
+<tag>dist-upgrade<item>
+Dist-upgrade fa un aggiornamento completo, progettato in modo da rendere
+semplici gli aggiornamenti tra versioni di Debian. Usa un algoritmo
+sofisticato per determinare il miglior insieme di pacchetti da installare,
+aggiornare e rimuovere per arrivare alla versione più aggiornata
+del sistema possibile. In alcune situazioni può essere vantaggioso usare
+dist-upgrade invece che sprecare tempo a risolvere manualmente le
+dipendenze con <prgn>dselect</>. Una volta completato dist-upgrade, si può
+usare <prgn>dselect</> per installare eventuali pacchetti che sono stati
+tralasciati.
+
+<p>
+È importante controllare attentamente cosa intende fare dist-upgrade,
+dato che le sue decisioni a volte possono essere abbastanza sorprendenti.
+</taglist>
+
+<p>
+<prgn>apt-get</> ha diverse opzioni a linea di comando, che vengono
+documentate dettagliatamente nella sua pagina man,
+<manref name="apt-get" section="8">. L'opzione più utile è
+<tt>-d</>, che non installa i file scaricati: se il sistema deve
+scaricare un gran numero di pacchetti, non è bene farglieli installare
+subito, in caso dovesse andare male qualcosa. Dopo aver usato <tt>-d</>,
+gli archivi scaricati possono essere installati semplicemente dando di
+nuovo lo stesso comando senza l'opzione <tt>-d</>.
+
+</chapt>
+ <!-- }}} -->
+<!-- DSelect {{{ -->
+<!-- ===================================================================== -->
+<chapt>DSelect
+<p>
+Il metodo APT di <prgn>dselect</> fornisce tutte le funzionalità di APT
+all'interno dell'interfaccia grafica di selezione dei pacchetti
+<prgn>dselect</>. <prgn>dselect</> viene usato per selezionare i pacchetti
+da installare o rimuovere, ed APT li installa.
+
+<p>
+Per abilitare il metodo APT dovete selezionare [A]ccess in <prgn>dselect</>
+e scegliere il metodo APT; vi verrà chiesto un insieme di fonti
+(<em>Sources</>), cioè di posti da cui scaricare gli archivi.
+Tali fonti possono essere siti Internet remoti, mirror locali di Debian
+o CDROM; ciascuna di esse può fornire una parte dell'archivio Debian,
+ed APT le combinerà insieme in un set completo di pacchetti. Se avete un
+CDROM è una buona idea indicare quello per primo, e poi i mirror, in modo
+da avere accesso alle ultime versioni; APT userà in questo modo automaticamente
+i pacchetti sul CDROM prima di scaricarli da Internet.
+
+<p>
+<example>
+ Set up a list of distribution source locations
+
+ Please give the base URL of the debian distribution.
+ The access schemes I know about are: http file
+
+ For example:
+ file:/mnt/debian,
+ ftp://ftp.debian.org/debian,
+ http://ftp.de.debian.org/debian,
+
+
+ URL [http://llug.sep.bnl.gov/debian]:
+</example>
+
+<p>
+La configurazione delle fonti inizia chiedendo la base dell'archivio Debian,
+propone come default un mirror HTTP, e poi chiede la distribuzione
+da scaricare.
+
+<p>
+<example>
+ Please give the distribution tag to get or a path to the
+ package file ending in a /. The distribution
+ tags are typically something like: stable unstable frozen non-US
+
+ Distribution [stable]:
+</example>
+
+<p>
+La distribuzione (``distribution'') fa riferimento alla versione Debian
+dell'archivio: <em>stable</> è l'ultima rilasciata, ed <em>unstable</>
+è quella di sviluppo. <em>non-US</> è disponibile solo su alcuni mirror,
+e contiene dei pacchetti in cui viene usata della tecnologia di criptazione
+o altre cose che non possano essere esportate dagli Stati Uniti; importare
+questi pacchetti negli US è però legale.
+
+<p>
+<example>
+ Please give the components to get
+ The components are typically something like: main contrib non-free
+
+ Components [main contrib non-free]:
+</example>
+
+<p>
+L'elenco dei componenti (``components'') si riferisce alla lista di
+sotto-distribuzioni da scaricare. Ciascuna distribuzione viene divisa in
+base al copyright del software: la main contiene pacchetti la cui licenza
+soddisfa le DFSG, mentre contrib e non-free contengono software che ha
+diverse restrizioni sull'uso e sulla distribuzione.
+
+<p>
+Si possono inserire un qualsiasi numero di fonti, e lo script di
+configurazione continuerà a chiedere fino a che abbiate specificato tutti gli
+elementi che volete.
+
+<p>
+Prima di cominciare ad usare <prgn>dselect</> è necessario aggiornare
+l'elenco dei pacchetti disponibili selezionando [U]pdate dal menù:
+si tratta di un sovrainsieme di ciò che fa <tt>apt-get update</>,
+che rende l'informazione scaricata disponibile a
+<prgn>dselect</>. [U]pdate deve essere fatto anche se prima è stato dato
+<tt>apt-get update</>.
+
+<p>
+Si può a questo punto continuare selezionando i pacchetti desiderati
+usando [S]elect e poi installando con [I]nstall. Se si usa il metodo APT,
+i comandi [C]onfig e [R]emove non hanno significato, dato che entrambe le
+fasi sono contenute in [I]nstall.
+
+<p>
+Per default APT rimuoverà automaticamente i pacchetti che sono stati installati
+con successo. Per modificare questo comportamento, si inserisca
+<tt>Dselect::clean "prompt";</> in /etc/apt/apt.conf.
+
+</chapt>
+ <!-- }}} -->
+<!-- The Interfaces {{{ -->
+<!-- ===================================================================== -->
+<chapt>L'interfaccia
+
+<p>
+Entrambi i metodi, <prgn>dselect</> APT ed <prgn>apt-get</>, condividono la
+stessa interfaccia; si tratta di un sistema semplice che indica in genere
+cosa sta per fare, e poi lo fa.
+<footnote>
+Il metodo <prgn>dselect</> è in realtà un insieme di script di wrapper ad
+<prgn>apt-get</>. Il metodo fornisce delle funzionalità maggiori del
+solo <prgn>apt-get</>.
+</footnote>
+Dopo la stampa di un riassunto delle operazioni che saranno fatte,
+APT stampa dei messaggi informativi sullo stato del sistema, in modo che
+possiate avere davanti agli occhi a quale punto dell'operazione si trova,
+e quanto ancora si deve aspettare.
+
+<!-- ===================================================================== -->
+<sect>Avvio
+
+<p>
+Prima di ciascuna operazione, eccetto l'aggiornamento della lista, APT
+compie alcune operazioni per prepararsi, oltre a dei controlli dello
+stato del sistema. In qualsiasi momento le stesse operazioni possono essere
+fatte con <tt>apt-get check</>
+<p>
+<example>
+# apt-get check
+Reading Package Lists... Done
+Building Dependancy Tree... Done
+</example>
+
+<p>
+La prima cosa che fa è leggere tutti i file dei pacchetti in memoria,
+usando uno schema di caching in modo da rendere la stessa operazione più
+veloce la seconda volta che la si fa. Se alcuni dei file dei pacchetti
+non vengono trovati, sono ignorati e viene stampato un avvertimento
+all'uscita di apt-get.
+
+<p>
+L'operazione finale consiste in un'analisi dettagliata delle
+dipendenze del sistema: viene controllato che tutte le dipendenze dei
+singoli pacchetti installati o non scompattati siano soddisfatte.
+Se vengono individuati dei problemi, viene stampato un resoconto,
+ed <prgn>apt-get</> esce senza eseguire alcuna operazione.
+
+<p>
+<example>
+# apt-get check
+Reading Package Lists... Done
+Building Dependancy Tree... Done
+You might want to run apt-get -f install' to correct these.
+Sorry, but the following packages have unmet dependencies:
+ 9fonts: Depends: xlib6g but it is not installed
+ uucp: Depends: mailx but it is not installed
+ blast: Depends: xlib6g (>= 3.3-5) but it is not installed
+ adduser: Depends: perl-base but it is not installed
+ aumix: Depends: libgpmg1 but it is not installed
+ debiandoc-sgml: Depends: sgml-base but it is not installed
+ bash-builtins: Depends: bash (>= 2.01) but 2.0-3 is installed
+ cthugha: Depends: svgalibg1 but it is not installed
+ Depends: xlib6g (>= 3.3-5) but it is not installed
+ libreadlineg2: Conflicts:libreadline2 (<< 2.1-2.1)
+</example>
+
+<p>
+In questo esempio il sistema ha molti problemi, tra cui uno piuttosto serio
+con la libreadlineg2. Per ciascun pacchetto che ha dipendenze non soddisfatte,
+viene stampata una linea che indica il pacchetto che crea il problema e
+quali problemi ci sono. Viene inclusa inoltre una breve spiegazione
+del perché il pacchetto ha un problema di dipendenze.
+
+<p>
+Ci sono due modi in cui un sistema possa arrivare in uno stato problematico
+di questo genere: il primo è causato dal fatto che <prgn>dpkg</> possa
+mancare alcune relazioni sottili tra pacchetti durante un aggiornamento
+del sistema<footnote>APT considera comunque tutte le dipendenze note,
+e cerca di prevenire problemi ai pacchetti</footnote>; il secondo è possibile
+se l'installazione di un pacchetto fallisce, ed in questo caso è possibile
+che un pacchetto venga scompattato senza che tutti quelli da cui dipende
+siano stati installati.
+
+<p>
+La seconda possibilità è meno seria della prima, dato che APT gestisce
+l'ordine di installazione dei pacchetti; in entrambi i casi l'opzione
+<tt>-f</> di <prgn>apt-get</> gli farà trovare una soluzione e lo farà
+continuare. Il metodo APT di <prgn>dselect</> comprende sempre l'opzione
+<tt>-f</> per permettere di configurare facilmente anche i pacchetti con
+script errati.
+
+<p>
+Se viene usata però l'opzione <tt>-f</> per correggere un sistema in uno
+stato molto problematico, è possibile che anche con l'opzione il programma
+fallisca, subito o durante la sequenza di installazione. In entrambi i casi
+è necessario usare dpkg a mano (probabilmente usando delle opzioni
+di forzatura) per correggere quanto basta per poter fare continuare APT.
+</sect>
+
+<!-- ===================================================================== -->
+<sect>Il resoconto sullo stato
+
+<p>
+Prima di procedere, <prgn>apt-get</> presenterà un resoconto delle operazioni
+che sta per fare. In genere tale resoconto varierà con il tipo di operazioni
+da fare, ma ci sono alcuni elementi comuni: in tutti i casi gli elenchi
+dipendono dallo stato finale delle cose, e tengono conto dell'opzione
+<tt>-f</> e di altre attività rilevanti per il comando da eseguire.
+
+<sect1>L'elenco dei pacchetti Extra
+<p>
+<example>
+The following extra packages will be installed:
+ libdbd-mysql-perl xlib6 zlib1 xzx libreadline2 libdbd-msql-perl
+ mailpgp xdpkg fileutils pinepgp zlib1g xlib6g perl-base
+ bin86 libgdbm1 libgdbmg1 quake-lib gmp2 bcc xbuffy
+ squake pgp-i python-base debmake ldso perl libreadlineg2
+ ssh
+</example>
+
+<p>
+L'elenco dei pacchetti Extra mostra tutti i pacchetti che verranno installati
+o aggiornati oltre a quelli indicati sulla linea di comando. Viene generato
+solo per il comando <tt>install</>. I pacchetti elencati sono spesso il
+risultato di un'operazione di auto installazione (Auto Install).
+</sect1>
+
+<sect1>I pacchetti da rimuovere
+<p>
+<example>
+The following packages will be REMOVED:
+ xlib6-dev xpat2 tk40-dev xkeycaps xbattle xonix
+ xdaliclock tk40 tk41 xforms0.86 ghostview xloadimage xcolorsel
+ xadmin xboard perl-debug tkined xtetris libreadline2-dev perl-suid
+ nas xpilot xfig
+</example>
+
+<p>
+L'elenco dei pacchetti da rimuovere (Remove) indica tutti i pacchetti che
+verranno rimossi dal sistema. Può essere mostrato per una qualsiasi delle
+operazioni, e deve sempre essere esaminato attentamente per assicurarsi
+che non venga eliminato qualcosa di importante. Con l'opzione <tt>-f</>
+è particolarmente probabile che vengano eliminati dei pacchetti, ed in questo
+caso va fatta estrema attenzione. La lista può contenere dei pacchetti
+che verranno rimossi perché sono già rimossi parzialmente, forse a causa
+di un'installazione non terminata correttamente.
+</sect1>
+
+<sect1>L'elenco dei nuovi pacchetti installati
+<p>
+<example>
+The following NEW packages will installed:
+ zlib1g xlib6g perl-base libgdbmg1 quake-lib gmp2 pgp-i python-base
+</example>
+
+<p>
+L'elenco dei nuovi pacchetti installati (New) è semplicemente un appunto
+su quello che accadrà. I pacchetti nell'elenco non sono al momento installati
+nel sistema, ma lo saranno alla fine delle operazioni di APT.
+</sect1>
+
+<sect1>L'elenco dei pacchetti trattenuti
+<p>
+<example>
+The following packages have been kept back
+ compface man-db tetex-base msql libpaper svgalib1
+ gs snmp arena lynx xpat2 groff xscreensaver
+</example>
+
+<p>
+In ogni caso in cui il sistema viene aggiornato nel suo insieme, c'è la
+possibilità che non possano venire installate nuove versioni di alcuni
+pacchetti, dato che potrebbero richiedere l'installazione di pacchetti non
+presenti nel sistema, o entrare in conflitto con altri già presenti.
+In questo caso, il pacchetto viene elencato nella lista di quelli
+trattenuti (Kept Back). Il miglior modo per convincere i pacchetti
+elencati in questa lista è di installarli con <tt>apt-get install</> o
+usare <prgn>dselect</> per risolvere i problemi.
+</sect1>
+
+<sect1>Messaggi di attenzione sui pacchetti trattenuti
+<p>
+<example>
+The following held packages will be changed:
+ cvs
+</example>
+
+<p>
+A volte si può richiedere ad APT di installare un pacchetto
+che è stato trattenuto; in questi casi viene stampato un messaggio di
+attenzione, che avverte che il pacchetto verrà modificato. Questo
+dovrebbe accadere solo durante operazioni di dist-upgrade o di install.
+</sect1>
+
+<sect1>Resoconto finale
+<p>
+Infine, APT stamperà un riassunto di tutte le modifiche che accadranno.
+
+<p>
+<example>
+206 packages upgraded, 8 newly installed, 23 to remove and 51 not upgraded.
+12 packages not fully installed or removed.
+Need to get 65.7M/66.7M of archives. After unpacking 26.5M will be used.
+</example>
+
+<p>
+La prima linea del riassunto è semplicemente una versione ridotta di tutte
+le liste, ed include il numero di aggiornamenti -- cioè dei pacchetti
+già installati per cui sono disponibili nuove versioni. La seconda
+linea indica il numero di pacchetti con problemi di configurazione,
+probabilmente in conseguenza di un'installazione non andata a buon fine.
+La linea finale indica i requisiti di spazio dell'installazione: i primi
+due numeri indicano rispettivamente il numero di byte che devono
+essere trasferiti da posizioni remote, ed il secondo la dimensione totale
+di tutti gli archivi necessari per l'installazione. Il numero successivo
+indica la differenza in dimensione tra i pacchetti già installati
+e quelli che lo saranno, ed è approssimativamente equivalente allo spazio
+richiesto in /usr dopo l'installazione. Se si stanno rimuovendo dei
+pacchetti, il valore può indicare lo spazio che verrà liberato.
+
+<p>
+Si possono generare altri resoconti usando l'opzione -u per mostrare
+i pacchetti da aggiornare, ma sono simili all'esempio precedente.
+</sect>
+
+<!-- ===================================================================== -->
+<sect>La visualizzazione dello stato
+<p>
+Durante il download degli archivi e dei file dei pacchetti, APT
+stampa una serie di messaggi di stato.
+
+<p>
+<example>
+# apt-get update
+Get:1 http://ftp.de.debian.org/debian-non-US/ stable/non-US/ Packages
+Get:2 http://llug.sep.bnl.gov/debian/ frozen/contrib Packages
+Hit http://llug.sep.bnl.gov/debian/ frozen/main Packages
+Get:4 http://ftp.de.debian.org/debian-non-US/ unstable/binary-i386/ Packages
+Get:5 http://llug.sep.bnl.gov/debian/ frozen/non-free Packages
+11% [5 frozen/non-free `Waiting for file' 0/32.1k 0%] 2203b/s 1m52s
+</example>
+
+<p>
+Le linee che cominciano con <em>Get</> vengono stampate quando APT inizia
+a scaricare un file, e l'ultima linea indica il progresso dell'operazione.
+Il primo valore in percentuale indica la percentuale totale di tutti i file;
+dato che la dimensione dei file Package non è nota, purtroppo a volte
+<tt>apt-get update</> fa una stima poco accurata.
+
+<p>
+La sezione successiva della linea di stato viene ripetuta una volta per
+ciascuna fase del download, ed indica l'operazione in corso, insieme
+ad alcune informazioni utili su cosa stia accadendo. A volte questa
+sezione contiene solamente <em>Forking</>, che significa che il sistema
+operativo sta caricando il modulo. La prima parola dopo la parentesi quadra
+aperta è il nome breve dell'oggetto che si sta scaricando, che per gli archivi
+è il nome del pacchetto.
+
+<p>
+All'interno delle virgolette c'è una stringa informativa, che indica il
+progresso della fase di negoziazione del download. Tipicamente comincia con
+<em>Connecting</>, procede con <em>Waiting for file</> e poi con
+<em>Downloading</> o <em>Resuming</>. Il valore finale è il numero di byte
+che sono stati scaricati dal sito remoto: una volta cominciato il
+download viene rappresentato come <tt>102/10.2k</>, che indica che
+sono stati scaricati 102 byte di 10.2 kilobyte. La dimensione totale
+viene sempre espressa in notazione a quattro cifre, per risparmiare
+spazio. Dopo la dimensione viene indicato un indicatore
+progressivo della percentuale del file. Il penultimo elemento è la velocità
+istantanea media, che viene aggiornata ogni 5 secondi, e riflette la
+velocità di trasferimento dei dati in quel periodo. Infine, viene
+visualizzato il tempo stimato per il trasferimento, che viene aggiornato
+periodicamente e riflette il tempo necessario per completare tutte le
+operazioni alla velocità di trasferimento mostrata.
+
+<p>
+La visualizzazione dello stato viene aggiornata ogni mezzo secondo per
+fornire un feedback costante del processo di download, e le linee Get
+scorrono indietro quando viene cominciato il download di un nuovo file.
+Dato che la visualizzazione dello stato viene costantemente
+aggiornata, non è adatta per essere registrata in un file; per non
+visualizzarla si può usare l'opzione <tt>-q</>.
+</sect>
+
+<!-- ===================================================================== -->
+<sect>Dpkg
+
+<p>
+APT usa <prgn>dpkg</> per installare gli archivi e passerà all'interfaccia
+<prgn>dpkg</> una volta finito il download.
+<prgn>dpkg</> porrà anche alcune domande durante la manipolazione dei
+pacchetti, ed i pacchetti stessi
+potranno farne altre. Prima di ciascuna domanda viene
+proposta una descrizione di quello che sta per chiedere, e le domande
+sono troppo diverse per poter essere discusse in maniera completa in questa
+occasione.
+</sect>
+
+</chapt>
+ <!-- }}} -->
+
+</book>
diff --git a/doc/guide.sgml b/doc/guide.sgml
index 67efbc93f..1d6923aad 100644
--- a/doc/guide.sgml
+++ b/doc/guide.sgml
@@ -4,7 +4,7 @@
<title>APT User's Guide</title>
<author>Jason Gunthorpe <email>jgg@debian.org</email></author>
-<version>$Id: guide.sgml,v 1.2 1998/11/23 01:15:59 jgg Exp $</version>
+<version>$Id: guide.sgml,v 1.3 2001/02/20 07:03:17 jgg Exp $</version>
<abstract>
This document provides an overview of how to use the the APT package manager.
@@ -55,9 +55,9 @@ of a simple dependency. The meaning of a simple dependency is that a package
requires another package to be installed at the same time to work properly.
<p>
-For instance, mail-crypt is an emacs extension that aids in encrypting email
-with PGP. Without PGP installed mail-crypt is useless, so mail-crypt has a
-simple dependency on PGP. Also, because it is an emacs extension it has a
+For instance, mailcrypt is an emacs extension that aids in encrypting email
+with GPG. Without GPGP installed mail-crypt is useless, so mailcrypt has a
+simple dependency on GPG. Also, because it is an emacs extension it has a
simple dependency on emacs, without emacs it is completely useless.
<p>
@@ -280,7 +280,7 @@ how much is left to do.
<p>
Before all operations, except update, APT performs a number of actions to
prepare its internal state. It also does some checks of the systems state.
-At any time these operations can be performed by running <tt>apt-get chec</>
+At any time these operations can be performed by running <tt>apt-get check</>.
<p>
<example>
# apt-get check
@@ -295,7 +295,7 @@ is run. If some of the package files are not found then they will be ignored
and a warning will be printed when apt-get exits.
<p>
-The final operation performs a detailed analysis of the systems dependencies.
+The final operation performs a detailed analysis of the system's dependencies.
It checks every dependency of every installed or unpacked package and considers
if it is ok. Should this find a problem then a report will be printed out and
<prgn>apt-get</> will refuse to run.
@@ -328,7 +328,7 @@ problem is also included.
<p>
There are two ways a system can get into a broken state like this. The
-first is caused by <prgn>dpkg missing</> some subtle relationships between
+first is caused by <prgn>dpkg</> missing some subtle relationships between
packages when performing upgrades. <footnote>APT however considers all known
dependencies and attempts to prevent broken packages</footnote>. The second is
if a package installation fails during an operation. In this situation a
@@ -337,7 +337,7 @@ package may have been unpacked without its dependents being installed.
<p>
The second situation is much less serious than the first because APT places
certain assurances on the order that packages are installed. In both cases
-supplying the <tt>-f</> option to <prgn>atp-get</> will cause APT to deduce a
+supplying the <tt>-f</> option to <prgn>apt-get</> will cause APT to deduce a
possible solution to the problem and then continue on. The APT <prgn>dselect</>
method always supplies the <tt>-f</> option to allow for easy continuation
of failed maintainer scripts.
@@ -476,7 +476,7 @@ to upgrade, they are similar to the previous examples.
<sect>The Status Display
<p>
During the download of archives and package files APT prints out a series of
-status messages,
+status messages.
<p>
<example>
@@ -499,7 +499,7 @@ inaccuracies.
<p>
The next section of the status line is repeated once for each dowload thread
-and indicates the operation being performed and some usefull information
+and indicates the operation being performed and some useful information
about what is happening. Sometimes this section will simply read <em>Forking</>
which means the OS is loading the download module. The first word after the [
is the fetch number as shown on the history lines. The next word
@@ -511,7 +511,7 @@ Inside of the single quote is an informative string indicating the progress
of the negotiation phase of the download. Typically it progresses from
<em>Connecting</> to <em>Waiting for file</> to <em>Downloading</> or
<em>Resuming</>. The final value is the number of bytes downloaded from the
-remote site. Once the download beings this is represented as <tt>102/10.2k</>
+remote site. Once the download begings this is represented as <tt>102/10.2k</>
indicating that 102 bytes have been fetched and 10.2 kilobytes is expected.
The total size is always shown in 4 figure notation to preserve space. After
the size display is a percent meter for the file itself.
@@ -535,7 +535,7 @@ status display.
<p>
APT uses <prgn>dpkg</> for installing the archives and will switch
over to the <prgn>dpkg</> interface once downloading is completed.
-<prgn>dpkg</> will also as a number of questions as it processes the packages
+<prgn>dpkg</> will also ask a number of questions as it processes the packages
and the packages themselves may also ask several questions. Before each
question there is usually a description of what it is asking and the
questions are too varied to discuss completely here.
diff --git a/doc/libapt-pkg2_to_3.txt b/doc/libapt-pkg2_to_3.txt
new file mode 100644
index 000000000..c1f71f9f2
--- /dev/null
+++ b/doc/libapt-pkg2_to_3.txt
@@ -0,0 +1,89 @@
+libapt-pkg v2 to v3 incorperates several source-incompatible changes that
+people need to be aware of.. Many of this changes are done so that most old
+source will continue to function, but perhaps at reduced functionality.
+
+* pkgDepCache is no longer self initilizing, you have to call the Init
+ method seperately after constructing it. Users of pkgCacheFile do not
+ need to worry about this
+* GetCandidateVer/etc is gone from the pkgCache. It exists only in the
+ DepCache and is just an inline around the new Policy class
+* TargetVer/TargetDist have been eliminated. Nothing should have been using
+ these.
+* There is a policy class. The v0 policy engine which has been used since
+ APT 0.0.0 is instantiated by the DepCache by default. However pkgCacheFile
+ constructs and initializes the new v4 engine. People accessing GetCandidate
+ version outside of a CacheFile/DepCache will need to instantiate and
+ initialize a policy engine on their own.
+* All byte counters are now doubles to advoid 4G wraparound. The compiler
+ should generate warnings on any incorrect use of these.
+* The PriorityType/CompType/DepType functions have been moved out of the
+ iterators and into generate static functions of pkgCache - inline stubs
+ are left in the iterators.
+* The deb dependency element parser has been made into a static function
+ of the list parser and enhanced to optionally understand architecture
+ restrictions.
+* TagSections no longer include the trailing \n. This means that the
+ Offset/Length of a package record in the version structure also does not
+ include the trailing \n.
+* GenCaches::SelectFile accepts a site parameter now too.
+* Global version compare functions are gone. If you
+ #define APT_COMPATABILITY 1
+ They will come back as they were before. Code should be updated to
+ reference the compare functions to the VersioningSystem (VS) referenced
+ by the Cache or _system structures.
+* Initialization is now two stage (define APT_COMPATABILITY..) The first
+ stage, pkgInitConfig is called before commandline parsing, and
+ pkgInitSystem is called after. This gives the user the oppertunity to
+ override default settings from the config files before startup has been
+ finalized.
+* pkgSourceList has been gutted. All the junk that was in there before is
+ cleaned up and put in the pkgIndexFile class. There is very little API
+ corrispondence here..
+* pkgMakeStatusCacheMem is gone, pkgMakeStatusCache does the same thing if
+ you set the AllowMem flag. Also, you can get a copy of the map used to
+ store the cache to advoid having to remap it in the calling code. A bunch
+ of other cache related functions are gone, but nobody should have been using
+ them in the first place!
+* Downloading the 'Package' and 'Source' index files is different, use
+ the GetIndexes call in SourceList.
+* SourceRecords::Parser::Source is gone, replaced with Index which does
+ much the same thing.
+* DynamicMap has changed slightly, nobody should care
+* pkgMakeOnlyStatusCache exists, which creates a really small cache that
+ only contains the status file, and in memory.
+* The pkgRecords stuff is changed to abstract through the index file list
+ (should be transparent largely)
+* Locking is handled differently, there is no dpkg lock class, the _system
+ class provides Lock/UnLock methods
+* pkgDepCache is not a subclass of pkgCache, it agregates it now. Some
+ compatibility functions are provided that make this transition fairly
+ easy.
+* The following functions have had minor argument changes:
+ - pkgSimulate(pkgDepCache &Cache);
+ + pkgSimulate(pkgDepCache *Cache);
+
+ - pkgProblemResolver(pkgDepCache &Cache);
+ + pkgProblemResolver(pkgDepCache *Cache);
+
+ - pkgDepCache(MMap &Map,Policy *Plcy = 0);
+ + pkgDepCache(pkgCache *Cache,Policy *Plcy = 0);
+
+ - pkgOrderList(pkgDepCache &Cache);
+ + pkgOrderList(pkgDepCache *Cache);
+
+ - pkgPackageManager(pkgDepCache &Cache);
+ + pkgPackageManager(pkgDepCache *Cache);
+
+ - pkgCache(MMap &Map,bool DoMap = true);
+ + pkgCache(MMap *Map,bool DoMap = true);
+
+ - pkgCacheGenerator(DynamicMMap &Map,OpProgress &Progress);
+ + pkgCacheGenerator(DynamicMMap *Map,OpProgress *Progress);
+
+ - pkgTagFile(FileFd &F,unsigned long Size = 32*1024);
+ + pkgTagFile(FileFd *F,unsigned long Size = 32*1024);
+
+* Configuration class is const-correct
+* The legacy ability to create a PkgFileIterator that started at Begin
+ is gone, everyone should be using FileBegin().
+* A new dependency relation called obsoletes that is similar to conflicts.
diff --git a/doc/makefile b/doc/makefile
index a8f95b70e..45cb7a878 100644
--- a/doc/makefile
+++ b/doc/makefile
@@ -5,16 +5,18 @@ SUBDIR=doc
# Bring in the default rules
include ../buildlib/defaults.mak
-# SGML Documents
-SOURCE = dpkg-tech.sgml design.sgml files.sgml guide.sgml cache.sgml \
- method.sgml offline.sgml
+# Debian Doc SGML Documents
+SOURCE = dpkg-tech.sgml design.sgml files.sgml guide.sgml guide.it.sgml \
+ cache.sgml method.sgml offline.sgml
include $(DEBIANDOC_H)
# Man pages
-SOURCE = apt-cache.8 apt-get.8 apt-cdrom.8 apt.conf.5 sources.list.5 apt-config.8
-include $(YODL_MANPAGE_H)
+SOURCE = apt-cache.8 apt-get.8 apt-cdrom.8 apt.conf.5 sources.list.5 \
+ apt-config.8 apt-sortpkgs.1 apt-ftparchive.1 apt_preferences.5
+INCLUDES = apt.ent
+include $(SGML_MANPAGE_H)
# Examples
-SOURCE = examples/apt.conf examples/sources.list examples/configure-index
+SOURCE = examples/apt.conf examples/sources.list examples/configure-index
TO = $(DOC)
include $(COPY_H)
diff --git a/doc/offline.sgml b/doc/offline.sgml
index 9a664ac3d..0db89f12e 100644
--- a/doc/offline.sgml
+++ b/doc/offline.sgml
@@ -4,7 +4,7 @@
<title>Using APT Offline</title>
<author>Jason Gunthorpe <email>jgg@debian.org</email></author>
-<version>$Id: offline.sgml,v 1.2 2000/02/13 07:20:47 jgg Exp $</version>
+<version>$Id: offline.sgml,v 1.3 2001/02/20 07:03:17 jgg Exp $</version>
<abstract>
This document describes how to use APT in a non-networked environment,
@@ -43,7 +43,7 @@ SuperDisk disc. These discs are not large enough to store the entire Debian
archive but can easily fit a subset large enough for most users. The idea
is to use APT to generate a list of packages that are required and then fetch
them onto the disc using another machine with good connectivity. It is
-even Possible to use another Debian machine with APT or to use a completely
+even possible to use another Debian machine with APT or to use a completely
different OS and a download tool like wget.
<p>
@@ -124,7 +124,7 @@ More details can be seen by examining the apt.conf man page and the sample
configuration file in <em>/usr/doc/apt/examples/apt.conf</em>.
<p>
-On the Debian machine the first thing to do is mount the disc and copy
+On the remote Debian machine the first thing to do is mount the disc and copy
<em>/var/lib/dpkg/status</em> to it. You will also need to create the directories
outlined in the Overview, <em>archives/partial/</em> and <em>lists/partial/</em>
Then take the disc to the remote machine and configure the sources.list.
@@ -139,8 +139,9 @@ On the remote machine execute the following:
</example>
The dist-upgrade command can be replaced with any-other standard APT commands,
-you can even use an APT front end such as <em>gnome-apt</em> [still in
-development].
+particularly dselect-upgrad. You can even use an APT front end such as
+<em>dselect</em> However this presents a problem in communicating your
+selections back to the local computer.
<p>
Now the disc contains all of the index files and archives needed to upgrade
@@ -158,6 +159,13 @@ the Debian machine. Take the disc back and run:
It is necessary for proper function to re-specify the status file to be the
local one. This is very important!
+<p>
+If you are using dselect you can do the very risky operation of copying
+disc/status to /var/lib/dpkg/status so that any selections you made on the
+remote machine are updated. I highly recommend that people only make selections
+on the local machine - but this may not always be possible. DO NOT copy
+the status file if dpkg or APT have been run in the mean time!!
+
</sect>
<!-- }}} -->
@@ -193,6 +201,10 @@ merely use the standard APT commands to generate the file list.
# awk '{print "wget -O " $2 " " $1}' < uris > /disc/wget-script
</example>
+Any command other than dist-upgrade could be used here, including
+dselect-upgrade.
+
+<p>
The /disc/wget-script file will now contain a list of wget commands to execute
in order to fetch the necessary archives. This script should be run with the
current directory as the disc's mount point so as to save the output on the
diff --git a/doc/sources.list.5.sgml b/doc/sources.list.5.sgml
new file mode 100644
index 000000000..d630e12fd
--- /dev/null
+++ b/doc/sources.list.5.sgml
@@ -0,0 +1,199 @@
+<!-- -*- mode: sgml; mode: fold -*- -->
+<!doctype refentry PUBLIC "-//OASIS//DTD DocBook V3.1//EN" [
+
+<!ENTITY % aptent SYSTEM "apt.ent">
+%aptent;
+
+]>
+
+<refentry>
+ &apt-docinfo;
+
+ <refmeta>
+ <refentrytitle>sources.list</>
+ <manvolnum>5</>
+ </refmeta>
+
+ <!-- Man page title -->
+ <refnamediv>
+ <refname>sources.list</>
+ <refpurpose>Package resource list for APT</>
+ </refnamediv>
+
+ <RefSect1><Title>Description</>
+ <para>
+ The package resource list is used to locate archives of the package
+ distribution system in use on the system. At this time, this manual page
+ documents only the packaging system used by the Debian GNU/Linux system.
+ This control file is located in <filename>/etc/apt/sources.list</>
+ <para>
+ The source list is designed to support any number of active sources and a
+ variety of source media. The file lists one source per line, with the
+ most preferred source listed first. The format of each line is:
+ <literal/type uri args/. The first item, <literal/type/, determines the
+ format for <literal/args/. <literal/uri/ is a Universal Resource Identifier
+ (URI), which is a superset of the more specific and well-known Universal
+ Resource Locator, or URL. The rest of the line can be marked as a comment
+ by using a #.
+ </RefSect1>
+
+ <RefSect1><Title>The deb and deb-src types</>
+ <para>
+ The <literal/deb/ type describes a typical two-level Debian archive,
+ <filename>distribution/component</>. Typically, <literal/distribution/ is
+ generally one of <literal/stable/, <literal/unstable/, or
+ <literal/frozen/, while component is one of <literal/main/,
+ <literal/contrib/, <literal/non-free/, or <literal/non-us/. The
+ <literal/deb-src/ type describes a debian distribution's source code in
+ the same form as the <literal/deb/ type. A <literal/deb-src/ line is
+ required to fetch source indexes.
+ <para>
+ The format for a <filename/sources.list/ entry using the <literal/deb/
+ and <literal/deb-src/ types are:
+ <literallayout>deb uri distribution [component1] [componenent2] [...]</literallayout>
+ <para>
+ The URI for the <literal/deb/ type must specify the base of the Debian
+ distribution, from which APT will find the information it needs.
+ <literal/distribution/ can specify an exact path, in which case the
+ components must be omitted and <literal/distribution/ must end with a
+ slash (/). This is useful for when only a particular sub-section of the
+ archive denoted by the URI is of interest. If <literal/distribution/ does
+ not specify an exact path, at least one <literal/component/ must be present.
+ <para>
+ <literal/distribution/ may also contain a variable, <literal/$(ARCH)/,
+ which expands to the Debian architecture (i386, m68k, powerpc, ...)
+ used on the system. This permits archiecture-independent
+ <filename/sources.list/ files to be used. In general this is only of
+ interest when specifying an exact path, <literal/APT/ will automatically
+ generate a URI with the current architecture otherwise.
+ <para>
+ Since only one distribution can be specified per line it may be necessary
+ to have multiple lines for the same URI, if a subset of all available
+ distributions or components at that location is desired.
+ APT will sort the URI list after it has generated a complete set
+ internally, and will collapse multiple references to the same Internet
+ host, for instance, into a single connection, so that it does not
+ inefficiently establish an FTP connection, close it, do something else,
+ and then re-establish a connection to that same host. This feature is
+ useful for accessing busy FTP sites with limits on the number of
+ simultaneous anonymous users. bf(APT) also parallizes connections to
+ different hosts to more effectively deal with sites with low bandwidth.
+ <para>
+ It is important to list sources in order of preference, with the most
+ preferred source listed first. Typically this will result in sorting
+ by speed from fastest to slowest (CD-ROM followed by hosts on a local
+ network, followed by distant Internet hosts, for example).
+ <para>
+ Some examples:
+ <literallayout>
+deb http://http.us.debian.org/debian stable main contrib non-free
+deb http://http.us.debian.org/debian dists/stable-updates/
+ </literallayout>
+ </RefSect1>
+
+ <RefSect1><title>URI specification</title>
+ <para>
+ The currently recognized URI types are cdrom, file, http, and ftp.
+ <VariableList>
+ <VarListEntry><term>file</term>
+ <ListItem><Para>
+ The file scheme allows an arbitrary directory in the file system to be
+ considered an archive. This is useful for NFS mounts and local mirrors or
+ archives.
+ </VarListEntry>
+
+ <VarListEntry><term>cdrom</term>
+ <ListItem><Para>
+ The cdrom scheme allows APT to use a local CDROM drive with media
+ swapping. Use the &apt-cdrom; program to create cdrom entries in the
+ source list.
+ </VarListEntry>
+
+ <VarListEntry><term>http</term>
+ <ListItem><Para>
+ The http scheme specifies an HTTP server for the archive. If an environment
+ variable <EnVar/http_proxy/ is set with the format
+ http://server:port/, the proxy server specified in
+ <EnVar/http_proxy/ will be used. Users of authenticated HTTP/1.1 proxies
+ may use a string of the format http://user:pass@server:port/
+ Note that this is an insecure method of authentication.
+ </VarListEntry>
+
+ <VarListEntry><term>ftp</term>
+ <ListItem><Para>
+ The ftp scheme specifies an FTP server for the archive. APT's FTP behavior
+ is highly configurable; for more information see the
+ &apt-conf; manual page. Please note that a ftp proxy can be specified
+ by using the <EnVar/ftp_proxy/ environment variable. It is possible to
+ specify a http proxy (http proxy servers often understand ftp urls) using
+ this method and ONLY this method. ftp proxies using http specified in the
+ configuration file will be ignored.
+ </VarListEntry>
+
+ <VarListEntry><term>copy</term>
+ <ListItem><Para>
+ The copy scheme is identical to the file scheme except that packages are
+ copied into the cache directory instead of used directly at their location.
+ This is useful for people using a zip disk to copy files around with APT.
+ </VarListEntry>
+
+ <VarListEntry><term>rsh</term><term>ssh</term>
+ <ListItem><Para>
+ The rsh/ssh method method invokes rsh/ssh to connect to a remote host
+ as a given user and access the files. No password authentication is
+ possible, prior arrangements with RSA keys or rhosts must have been made.
+ Access to files on the remote uses standard <command/find/ and <command/dd/
+ commands to perform the file transfers from the remote.
+ </VarListEntry>
+ </VariableList>
+ </RefSect1>
+
+ <RefSect1><title>Examples</title>
+ <para>
+ Uses the archive stored locally (or NFS mounted) at /home/jason/debian
+ for stable/main, stable/contrib, and stable/non-free.
+ <literallayout>deb file:/home/jason/debian stable main contrib non-free</literallayout>
+ <para>
+ As above, except this uses the unstable (development) distribution.
+ <literallayout>deb file:/home/jason/debian unstable main contrib non-free</literallayout>
+ <para>
+ Source line for the above
+ <literallayout>deb-src file:/home/jason/debian unstable main contrib non-free</literallayout>
+ <para>
+ Uses HTTP to access the archive at archive.debian.org, and uses only the
+ hamm/main area.
+ <literallayout>deb http://archive.debian.org/debian-archive hamm main</literallayout>
+ <para>
+ Uses FTP to access the archive at ftp.debian.org, under the debian
+ directory, and uses only the stable/contrib area.
+ <literallayout>deb ftp://ftp.debian.org/debian stable contrib</literallayout>
+ <para>
+ Uses FTP to access the archive at ftp.debian.org, under the debian
+ directory, and uses only the unstable/contrib area. If this line appears as
+ well as the one in the previous example in <filename/sources.list/,
+ a single FTP session will be used for both resource lines.
+ <literallayout>deb ftp://ftp.debian.org/debian unstable contrib</literallayout>
+ <para>
+ Uses HTTP to access the archive at nonus.debian.org, under the debian-non-US
+ directory.
+ <literallayout>deb http://nonus.debian.org/debian-non-US stable/non-US main contrib non-free</literallayout>
+ <para>
+ Uses HTTP to access the archive at nonus.debian.org, under the
+ debian-non-US directory, and uses only files found under
+ <filename>unstable/binary-i386</> on i386 machines,
+ <filename>unstable/binary-m68k</> on m68k, and so
+ forth for other supported architectures. [Note this example only
+ illustrates how to use the substitution variable; non-us is no longer
+ structured like this]
+ <literallayout>deb http://ftp.de.debian.org/debian-non-US unstable/binary-$(ARCH)/</literallayout>
+ </RefSect1>
+
+ <RefSect1><Title>See Also</>
+ <para>
+ &apt-cache; &apt-conf;
+ </RefSect1>
+
+ &manbugs;
+ &manauthor;
+
+</refentry>
diff --git a/doc/sources.list.5.yo b/doc/sources.list.5.yo
deleted file mode 100644
index 2a501f776..000000000
--- a/doc/sources.list.5.yo
+++ /dev/null
@@ -1,148 +0,0 @@
-mailto(apt@packages.debian.org)
-manpage(sources.list)(5)(5 Dec 1998)(apt)()
-manpagename(sources.list)(package resource list for APT)
-
-manpagedescription()
-The package resource list is used to locate archives of the package
-distribution system in use on the system. At this time, this manual page
-documents only the packaging system used by the Debian GNU/Linux system.
-
-The source list is designed to support any number of active sources and a
-variety of source media. The file lists one source per line, with the
-most preferred source listed first. The format of each line is:
-em(type uri args) The first item, em(type), determines the format for
-em(args). em(uri) is a Universal Resource Identifier (URI), which is a
-superset of the more specific and well-known Universal Resource Locator, or
-URL.
-
-manpagesection(The deb and deb-src types)
-The bf(deb) type describes a typical two-level Debian archive,
-em(distribution/component). Typically, em(distribution) is one of
-em(stable), em(unstable), or em(frozen), while component is one of
-em(main), em(contrib), em(non-free), or em(non-us). The bf(deb-src) type
-describes a debian distribution's source code in the same form as the bf(deb)
-type. A bf(deb-src) line is required to fetch source indexes.
-The format for a bf(sources.list) entry using the em(deb) and em(deb-src)
-types are:
-verb(deb uri distribution [component1] [componenent2] [...])
-The URI for the em(deb) type must specify the base of the Debian distribution,
-from which bf(APT) will find the information it needs. em(distribution)
-can specify an exact path, in which case the em(component)s
-must be omitted and bf(distribution) must end with a slash (/). This is
-useful for when only a particular sub-section of the archive denoted by the
-URI is of interest. If bf(distribution) does not specify an exact path, at
-least one bf(component) must be present.
-
-bf(distribution) may also contain a variable, bf($(ARCH)),
-which expands to the Debian architecture (i386, m68k, powerpc, ...)
-used on the system. This permits archiecture-independent
-bf(sources.list) files to be used. In general this is only of interest
-when specifying an exact path, bf(APT) will automatically generate a URI
-with the current architecture otherwise.
-
-Since only one distribution can be specified per line it may be necessary
-to have multiple lines for the same URI, if a subset of all available
-distributions or components at that location is desired.
-bf(APT) will sort the URI list after it has generated a complete set
-internally, and will collapse multiple references to the same Internet host,
-for instance, into a single connection, so that it does not inefficiently
-establish an FTP connection, close it, do something else, and then
-re-establish a connection to that same host. This feature is useful
-for accessing busy FTP sites with limits on the number of simultaneous
-anonymous users. bf(APT) also parallizes connections to different hosts
-to more effectively deal with sites with low bandwidth.
-
-It is important to list sources in order of preference, with the most
-preferred source listed first. Typically this will result in sorting
-by speed from fastest to slowest (CD-ROM followed by hosts on a local
-network, followed by distant Internet hosts, for example).
-
-Some examples:
-verb(deb http://http.us.debian.org/debian stable main contrib non-free)
-verb(deb http://http.us.debian.org/debian dists/stable-updates)
-
-manpagesection(URI specification)
-The currently recognized URI types are cdrom, file, http, and ftp.
-
-startdit()
-dit(bf(file))
-The file scheme allows an arbitrary directory in the file system to be
-considered an archive. This is useful for NFS mounts and local mirrors or
-archives.
-
-dit(bf(cdrom))
-The cdrom scheme allows bf(APT) to use a local CDROM drive with media
-swapping. Use the bf(apt-cdrom(8)) program to create cdrom entires in the
-source list.
-
-dit(bf(http))
-The http scheme specifies an HTTP server for the archive. If an environment
-variable bf($http_proxy) is set with the format
-bf(http://server:port/), the proxy server specified in
-bf($http_proxy) will be used. Users of authenticated HTTP/1.1 proxies may
-use a string of the format bf(http://user:pass@server:port/)
-Note that this is an insecure method of authentication.
-
-dit(bf(ftp))
-The ftp scheme specifies an FTP server for the archive. APT's FTP behavior
-is highly configurable; for more information see the
-bf(apt.conf(5)) manual page. Please note that a ftp proxy can be specified
-by using the ftp_proxy environment variable. It is possible to specify a http
-proxy (http proxy servers often understand ftp urls) using this method and
-ONLY this method. ftp proxies using http specified in the configuration
-file will be ignored.
-
-dit(bf(copy))
-The copy scheme is identical to the file scheme except that packages are
-copied into the cache directory instead of used directly at their location.
-This is usefull for people using a zip disk to copy files around with APT.
-
-enddit()
-
-manpagesection(EXAMPLES)
-Uses the archive stored locally (or NFS mounted) at /home/jason/debian
-for stable/main, stable/contrib, and stable/non-free.
-quote("deb file:/home/jason/debian stable main contrib non-free")
-
-As above, except this uses the unstable (development) distribution.
-quote("deb file:/home/jason/debian unstable main contrib non-free")
-
-Source line for the above
-quote("deb-src file:/home/jason/debian unstable main contrib non-free")
-
-Uses HTTP to access the archive at archive.debian.org, and uses only the
-hamm/main area.
-quote("deb http://archive.debian.org/debian-archive hamm main")
-
-Uses FTP to access the archive at ftp.debian.org, under the debian
-directory, and uses only the stable/contrib area.
-quote("deb ftp://ftp.debian.org/debian stable contrib")
-
-Uses FTP to access the archive at ftp.debian.org, under the debian
-directory, and uses only the unstable/contrib area. If this line appears as
-well as the one in the previous example in bf(sources.list),
-a single FTP session will be used for both resource lines.
-quote("deb ftp://ftp.debian.org/debian unstable contrib")
-
-Uses HTTP to access the archive at nonus.debian.org, under the debian-non-US
-directory.
-quote("deb http://nonus.debian.org/debian-non-US stable/non-US main contrib non-free")
-
-Uses HTTP to access the archive at nonus.debian.org, under the
-debian-non-US directory, and uses only files found under
-unstable/binary-i386 on i386 machines, unstable/binary-m68k on m68k, and so
-forth for other supported architectures. [Note this example only illistrates
-how to use the substitation variable non-us is no longer structured like this]
-quote("deb http://ftp.de.debian.org/debian-non-US unstable/binary-$(ARCH)/")
-
-manpageseealso()
-apt-cache (8),
-apt.conf (5)
-
-manpagebugs()
-See http://bugs.debian.org/apt. If you wish to report a
-bug in bf(apt-get), please see bf(/usr/doc/debian/bug-reporting.txt)
-or the bf(bug(1)) command.
-
-manpageauthor()
-apt-get was written by the APT team <apt@packages.debian.org>.
diff --git a/doc/style.txt b/doc/style.txt
new file mode 100644
index 000000000..8d0778b4a
--- /dev/null
+++ b/doc/style.txt
@@ -0,0 +1,75 @@
+Acronyms
+~~~~~~~~
+* dpkg is a 'word' the first d may be upper case - Dpkg
+* APT is a proper Acronym, all upper case please.
+
+Pkg - A Package
+Ver - A version
+
+Indenting, Comments, Etc
+~~~~~~~~~~~~~~~~~~~~~~~~
+Would make Linus cry :P However it is what I prefer. 3 space indent,
+8 space tab all braces on seperate lines, function return on the same line
+as the function, cases aligned with their code. The 'indent' options for
+this style are:
+ indent -bl -bli0 -di1 -i3 -nsc -ts8 -npcs -npsl
+
+Each file gets a block at the top that should describe what the file does,
+basically a summary of purpose along with any special notes and
+attributions. The }}} and {{{ are folding marks if you have a folding
+editor such as jed, the function seperators are intended to give
+a visual seperate between functions for easier browsing of the larger files,
+or indexed folding if you have such an editor.
+
+Each file should have 1 or 0 primary include files, that include
+file must always be the first include file included by the .cc. G++
+#pragma interface/implementation is used, as well as anti-include-twice
+#ifdefs.
+
+Include files, since there are so many, get their own subdirectory off
+the include search path, this is used consistently throughout all the code.
+#include "" should never be used for a global exported header file, only
+local ones.
+
+C++ Features
+~~~~~~~~~~~~
+Due to the legacy compiler heritage, exceptions, RTTI and name spaces are
+not used. Templates are used *sparingly* since G++ has traditionally had
+very weak support for them, this includes STL templates.
+
+Namespaces will probably be put in the code sometime after G++ 3, which will
+be a huge re-org again to make sanity, the majority of all nested things
+will go away.
+
+The C++ standard library's non parameterized types (string is included in
+this) are used freely when appropriate.
+
+The new C++ #include <iostream> (note the lack of a .h) is used for the
+standard library, but not for my code.
+
+Arguments and Ownership
+~~~~~~~~~~~~~~~~~~~~~~~
+[much of the code follows this now]
+These guidlines should be followed except in two cases.. the first
+is where it makes no sense, such as in a casting operator and the second is to
+retain API compatibility (this should be rare, since a change in the input
+almost always designates a change in ownership rules).
+
+ * Pass by value or pass by reference should borrow the object from the
+ caller
+ * Pass by non-const reference may be used to indicate a OUT type variable
+ * Pass by pointer (except in the case where the pointer is really an array)
+ should be used when the object will be retained or ownership will be
+ transfered. Ownership transference should be rare and noted by a comment.
+ * Standard C things (FILE * etc) should be left as is.
+
+ * Return by references should indicate a borrowed object
+ * Return by pointer (except arrays) should indicate ownership is
+ transfered. Return by pointer should not be used unless ownership is
+ transfered.
+ * Return by pointer to variable indicates ownership transfer unless the
+ pointer is an 'input' parameter (designated generally by an =0,
+ indicating a default of 'none')
+
+Non-ownership transfering arrays/lists should probably return an iterator
+typedef or references..
diff --git a/dselect/install b/dselect/install
index 8ac3523bd..f93414dc4 100755
--- a/dselect/install
+++ b/dselect/install
@@ -5,11 +5,15 @@ CLEAN="prompt"
OPTS="-f"
APTGET="/usr/bin/apt-get"
DPKG="/usr/bin/dpkg"
+DPKG_OPTS="--admindir=$1"
+APT_OPT0="-oDir::State::status=$1/status"
+APT_OPT1="-oDPkg::Options::=$DPKG_OPTS"
set -e
RES=`apt-config shell CLEAN DSelect::Clean OPTS DSelect::Options \
- DPKG Dir::Bin::dpkg APTGET Dir::Bin::apt-get \
- ARCHIVES Dir::Cache::Archives/ \
- WAIT DSelect::WaitAfterDownload`
+ DPKG Dir::Bin::dpkg/f APTGET Dir::Bin::apt-get/f \
+ ARCHIVES Dir::Cache::Archives/d \
+ WAIT DSelect::WaitAfterDownload/b \
+ CHECKDIR DSelect::CheckDir/b`
eval $RES
set +e
@@ -39,14 +43,13 @@ yesno() {
echo $ans | tr YN yn
}
-OLDLS=`ls -ld $ARCHIVES`
-if [ x$WAIT = "xyes" ]; then
- $APTGET $OPTS -d dselect-upgrade
+if [ x$WAIT = "xtrue" ]; then
+ $APTGET $OPTS "$APT_OPT0" "$APT_OPT1" -d dselect-upgrade
echo "Press enter to continue." && read RES
- $APTGET $OPTS dselect-upgrade
+ $APTGET $OPTS "$APT_OPT0" "$APT_OPT1" dselect-upgrade
RES=$?
else
- $APTGET $OPTS dselect-upgrade
+ $APTGET $OPTS "$APT_OPT0" "$APT_OPT1" dselect-upgrade
RES=$?
fi
@@ -64,22 +67,27 @@ if [ $RES -eq 0 ]; then
fi
NEWLS=`ls -ld $ARCHIVES`
- if [ "x$OLDLS" = "x$NEWLS" ]; then
- exit 0
+ if [ x$CHECKDIR = "xtrue" ]; then
+ if [ "x$OLDLS" = "x$NEWLS" ]; then
+ exit 0
+ fi
fi
# Check the cleaning mode
case `echo $CLEAN | tr '[:upper:]' '[:lower:]'` in
auto)
- $APTGET autoclean && echo "Press enter to continue." && read RES && exit 0;
+ $APTGET "$APT_OPT0" "$APT_OPT1" autoclean &&
+ echo "Press enter to continue." && read RES && exit 0;
;;
always)
- $APTGET clean && echo "Press enter to continue." && read RES && exit 0;
+ $APTGET "$APT_OPT0" "$APT_OPT1" clean &&
+ echo "Press enter to continue." && read RES && exit 0;
;;
prompt)
exec 3>&1
- if [ `yesno "Do you want to erase the downloaded .deb files?" y` = y ]; then
- $APTGET clean && echo "Press enter to continue." && read RES && exit 0;
+ if [ `yesno "Do you want to erase any previously downloaded .deb files?" y` = y ]; then
+ $APTGET "$APT_OPT0" "$APT_OPT1" clean &&
+ echo "Press enter to continue." && read RES && exit 0;
fi
;;
*)
@@ -91,7 +99,7 @@ else
echo "or errors caused by missing dependencies. This is OK, only the errors"
echo "above this message are important. Please fix them and run [I]nstall again"
echo "Press enter to continue."
- read RES && $DPKG --configure -a
+ read RES && $DPKG "$DPKG_OPTS" --configure -a
exit 100
fi
diff --git a/dselect/update b/dselect/update
index 9195912ea..a61086d79 100755
--- a/dselect/update
+++ b/dselect/update
@@ -6,12 +6,15 @@ OPTS="-f"
APTGET="/usr/bin/apt-get"
APTCACHE="/usr/bin/apt-cache"
DPKG="/usr/bin/dpkg"
+DPKG_OPTS="--admindir=$1"
+APT_OPT0="-oDir::State::status=$1/status"
+APT_OPT1="-oDPkg::Options::=$DPKG_OPTS"
CACHEDIR="/var/cache/apt"
-PROMPT="no"
+PROMPT="false"
RES=`apt-config shell OPTS DSelect::UpdateOptions \
- DPKG Dir::Bin::dpkg APTGET Dir::Bin::apt-get \
- APTCACHE Dir::Bin::apt-cache CACHEDIR Dir::Cache \
- PROMPT DSelect::PromptAfterUpdate`
+ DPKG Dir::Bin::dpkg/f APTGET Dir::Bin::apt-get/f \
+ APTCACHE Dir::Bin::apt-cache/f CACHEDIR Dir::Cache/d \
+ PROMPT DSelect::PromptAfterUpdate/b`
eval $RES
# It looks slightly ugly to have a double / in the dpkg output
@@ -19,16 +22,16 @@ CACHEDIR=`echo $CACHEDIR | sed -e "s|/$||"`
set +e
FAILED=0
-$APTGET $OPTS update || FAILED=1
+$APTGET $OPTS "$APT_OPT0" "$APT_OPT1" update || FAILED=1
set -e
echo "Merging Available information"
rm -f $CACHEDIR/available
$APTCACHE dumpavail > $CACHEDIR/available
-$DPKG --update-avail $CACHEDIR/available
+$DPKG "$DPKG_OPTS" --update-avail $CACHEDIR/available
rm -f $CACHEDIR/available
-if [ $PROMPT = "yes" ]; then
+if [ x$PROMPT = "xtrue" ]; then
echo "Press enter to continue." && read RES;
fi
diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc
new file mode 100644
index 000000000..055d876d0
--- /dev/null
+++ b/ftparchive/apt-ftparchive.cc
@@ -0,0 +1,919 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: apt-ftparchive.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ apt-scanpackages - Efficient work-alike for dpkg-scanpackages
+
+ Let contents be disabled from the conf
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "apt-ftparchive.h"
+#endif
+
+#include "apt-ftparchive.h"
+
+#include <apt-pkg/error.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/cmndline.h>
+#include <apt-pkg/strutl.h>
+#include <config.h>
+#include <apti18n.h>
+#include <algorithm>
+
+#include <sys/time.h>
+#include <regex.h>
+
+#include "contents.h"
+#include "multicompress.h"
+#include "writer.h"
+ /*}}}*/
+
+ostream c0out;
+ostream c1out;
+ostream c2out;
+ofstream devnull("/dev/null");
+unsigned Quiet = 0;
+
+// struct PackageMap - List of all package files in the config file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+struct PackageMap
+{
+ // General Stuff
+ string BaseDir;
+ string InternalPrefix;
+ string FLFile;
+ string PkgExt;
+ string SrcExt;
+
+ // Stuff for the Package File
+ string PkgFile;
+ string BinCacheDB;
+ string BinOverride;
+
+ // Stuff for the Source File
+ string SrcFile;
+ string SrcOverride;
+
+ // Contents
+ string Contents;
+ string ContentsHead;
+
+ // Random things
+ string Tag;
+ string PkgCompress;
+ string CntCompress;
+ string SrcCompress;
+ string PathPrefix;
+ unsigned int DeLinkLimit;
+ mode_t Permissions;
+
+ bool ContentsDone;
+ bool PkgDone;
+ bool SrcDone;
+ time_t ContentsMTime;
+
+ struct ContentsCompare : public binary_function<PackageMap,PackageMap,bool>
+ {
+ inline bool operator() (const PackageMap &x,const PackageMap &y)
+ {return x.ContentsMTime < y.ContentsMTime;};
+ };
+
+ struct DBCompare : public binary_function<PackageMap,PackageMap,bool>
+ {
+ inline bool operator() (const PackageMap &x,const PackageMap &y)
+ {return x.BinCacheDB < y.BinCacheDB;};
+ };
+
+ void GetGeneral(Configuration &Setup,Configuration &Block);
+ bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats);
+ bool GenSources(Configuration &Setup,struct CacheDB::Stats &Stats);
+ bool GenContents(Configuration &Setup,
+ PackageMap *Begin,PackageMap *End,
+ unsigned long &Left);
+
+ PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false),
+ PkgDone(false), SrcDone(false), ContentsMTime(0) {};
+};
+ /*}}}*/
+
+// PackageMap::GetGeneral - Common per-section definitions /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block)
+{
+ PathPrefix = Block.Find("PathPrefix");
+
+ if (Block.FindB("External-Links",true) == false)
+ DeLinkLimit = Setup.FindI("Default::DeLinkLimit",UINT_MAX);
+ else
+ DeLinkLimit = 0;
+
+ PkgCompress = Block.Find("Packages::Compress",
+ Setup.Find("Default::Packages::Compress",". gzip").c_str());
+ CntCompress = Block.Find("Contents::Compress",
+ Setup.Find("Default::Contents::Compress",". gzip").c_str());
+ SrcCompress = Block.Find("Sources::Compress",
+ Setup.Find("Default::Sources::Compress",". gzip").c_str());
+
+ SrcExt = Block.Find("Sources::Extensions",
+ Setup.Find("Default::Sources::Extensions",".dsc").c_str());
+ PkgExt = Block.Find("Packages::Extensions",
+ Setup.Find("Default::Packages::Extensions",".deb").c_str());
+
+ Permissions = Setup.FindI("Default::FileMode",0644);
+
+ if (FLFile.empty() == false)
+ FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile);
+
+ if (Contents == " ")
+ Contents= string();
+}
+ /*}}}*/
+// PackageMap::GenPackages - Actually generate a Package file /*{{{*/
+// ---------------------------------------------------------------------
+/* This generates the Package File described by this object. */
+bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats)
+{
+ if (PkgFile.empty() == true)
+ return true;
+
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ string OverrideDir = Setup.FindDir("Dir::OverrideDir");
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
+ struct timeval StartTime;
+ gettimeofday(&StartTime,0);
+
+ PkgDone = true;
+
+ // Create a package writer object.
+ PackagesWriter Packages(flCombine(CacheDir,BinCacheDB),
+ flCombine(OverrideDir,BinOverride));
+ if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false)
+ return _error->Error("Package extension list is too long");
+ if (_error->PendingError() == true)
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+
+ Packages.PathPrefix = PathPrefix;
+ Packages.DirStrip = ArchiveDir;
+ Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
+
+ Packages.Stats.DeLinkBytes = Stats.DeLinkBytes;
+ Packages.DeLinkLimit = DeLinkLimit;
+
+ // Create a compressor object
+ MultiCompress Comp(flCombine(ArchiveDir,PkgFile),
+ PkgCompress,Permissions);
+ Packages.Output = Comp.Input;
+ if (_error->PendingError() == true)
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+
+ c0out << ' ' << BaseDir << ":" << flush;
+
+ // Do recursive directory searching
+ if (FLFile.empty() == true)
+ {
+ if (Packages.RecursiveScan(flCombine(ArchiveDir,BaseDir)) == false)
+ return false;
+ }
+ else
+ {
+ if (Packages.LoadFileList(ArchiveDir,FLFile) == false)
+ return false;
+ }
+
+ Packages.Output = 0; // Just in case
+
+ // Finish compressing
+ unsigned long Size;
+ if (Comp.Finalize(Size) == false)
+ {
+ c0out << endl;
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ }
+
+ if (Size != 0)
+ c0out << " New "
+ << SizeToStr(Size) << "B ";
+ else
+ c0out << ' ';
+
+ struct timeval NewTime;
+ gettimeofday(&NewTime,0);
+ double Delta = NewTime.tv_sec - StartTime.tv_sec +
+ (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
+
+ c0out << Packages.Stats.Packages << " files " <<
+/* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */
+ SizeToStr(Packages.Stats.Bytes) << "B " <<
+ TimeToStr((long)Delta) << endl;
+
+ Stats.Add(Packages.Stats);
+ Stats.DeLinkBytes = Packages.Stats.DeLinkBytes;
+
+ return !_error->PendingError();
+}
+ /*}}}*/
+// PackageMap::GenSources - Actually generate a Package file /*{{{*/
+// ---------------------------------------------------------------------
+/* This generates the Sources File described by this object. */
+bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
+{
+ if (SrcFile.empty() == true)
+ return true;
+
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ string OverrideDir = Setup.FindDir("Dir::OverrideDir");
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
+ struct timeval StartTime;
+ gettimeofday(&StartTime,0);
+
+ SrcDone = true;
+
+ // Create a package writer object.
+ SourcesWriter Sources(flCombine(OverrideDir,BinOverride),
+ flCombine(OverrideDir,SrcOverride));
+ if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false)
+ return _error->Error("Source extension list is too long");
+ if (_error->PendingError() == true)
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+
+ Sources.PathPrefix = PathPrefix;
+ Sources.DirStrip = ArchiveDir;
+ Sources.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
+
+ Sources.DeLinkLimit = DeLinkLimit;
+ Sources.Stats.DeLinkBytes = Stats.DeLinkBytes;
+
+ // Create a compressor object
+ MultiCompress Comp(flCombine(ArchiveDir,SrcFile),
+ SrcCompress,Permissions);
+ Sources.Output = Comp.Input;
+ if (_error->PendingError() == true)
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+
+ c0out << ' ' << BaseDir << ":" << flush;
+
+ // Do recursive directory searching
+ if (FLFile.empty() == true)
+ {
+ if (Sources.RecursiveScan(flCombine(ArchiveDir,BaseDir))== false)
+ return false;
+ }
+ else
+ {
+ if (Sources.LoadFileList(ArchiveDir,FLFile) == false)
+ return false;
+ }
+ Sources.Output = 0; // Just in case
+
+ // Finish compressing
+ unsigned long Size;
+ if (Comp.Finalize(Size) == false)
+ {
+ c0out << endl;
+ return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ }
+
+ if (Size != 0)
+ c0out << " New "
+ << SizeToStr(Size) << "B ";
+ else
+ c0out << ' ';
+
+ struct timeval NewTime;
+ gettimeofday(&NewTime,0);
+ double Delta = NewTime.tv_sec - StartTime.tv_sec +
+ (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
+
+ c0out << Sources.Stats.Packages << " pkgs in " <<
+ TimeToStr((long)Delta) << endl;
+
+ Stats.Add(Sources.Stats);
+ Stats.DeLinkBytes = Sources.Stats.DeLinkBytes;
+
+ return !_error->PendingError();
+}
+ /*}}}*/
+// PackageMap::GenContents - Actually generate a Contents file /*{{{*/
+// ---------------------------------------------------------------------
+/* This generates the contents file partially described by this object.
+ It searches the given iterator range for other package files that map
+ into this contents file and includes their data as well when building. */
+bool PackageMap::GenContents(Configuration &Setup,
+ PackageMap *Begin,PackageMap *End,
+ unsigned long &Left)
+{
+ if (Contents.empty() == true)
+ return true;
+
+ if (Left == 0)
+ return true;
+
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+ string OverrideDir = Setup.FindDir("Dir::OverrideDir");
+
+ struct timeval StartTime;
+ gettimeofday(&StartTime,0);
+
+ // Create a package writer object.
+ ContentsWriter Contents("");
+ if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false)
+ return _error->Error("Package extension list is too long");
+ if (_error->PendingError() == true)
+ return false;
+
+ MultiCompress Comp(flCombine(ArchiveDir,this->Contents),
+ CntCompress,Permissions);
+ Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60;
+ Contents.Output = Comp.Input;
+ if (_error->PendingError() == true)
+ return false;
+
+ // Write the header out.
+ if (ContentsHead.empty() == false)
+ {
+ FileFd Head(flCombine(OverrideDir,ContentsHead),FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ unsigned long Size = Head.Size();
+ unsigned char Buf[4096];
+ while (Size != 0)
+ {
+ unsigned long ToRead = Size;
+ if (Size > sizeof(Buf))
+ ToRead = sizeof(Buf);
+
+ if (Head.Read(Buf,ToRead) == false)
+ return false;
+
+ if (fwrite(Buf,1,ToRead,Comp.Input) != ToRead)
+ return _error->Errno("fwrite","Error writing header to contents file");
+
+ Size -= ToRead;
+ }
+ }
+
+ /* Go over all the package file records and parse all the package
+ files associated with this contents file into one great big honking
+ memory structure, then dump the sorted version */
+ c0out << ' ' << this->Contents << ":" << flush;
+ for (PackageMap *I = Begin; I != End; I++)
+ {
+ if (I->Contents != this->Contents)
+ continue;
+
+ Contents.Prefix = ArchiveDir;
+ Contents.ReadyDB(flCombine(CacheDir,I->BinCacheDB));
+ Contents.ReadFromPkgs(flCombine(ArchiveDir,I->PkgFile),
+ I->PkgCompress);
+
+ I->ContentsDone = true;
+ }
+
+ Contents.Finish();
+
+ // Finish compressing
+ unsigned long Size;
+ if (Comp.Finalize(Size) == false || _error->PendingError() == true)
+ {
+ c0out << endl;
+ return _error->Error("Error Processing Contents %s",
+ this->Contents.c_str());
+ }
+
+ if (Size != 0)
+ {
+ c0out << " New " << SizeToStr(Size) << "B ";
+ if (Left > Size)
+ Left -= Size;
+ else
+ Left = 0;
+ }
+ else
+ c0out << ' ';
+
+ struct timeval NewTime;
+ gettimeofday(&NewTime,0);
+ double Delta = NewTime.tv_sec - StartTime.tv_sec +
+ (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
+
+ c0out << Contents.Stats.Packages << " files " <<
+ SizeToStr(Contents.Stats.Bytes) << "B " <<
+ TimeToStr((long)Delta) << endl;
+
+ return true;
+}
+ /*}}}*/
+
+// LoadTree - Load a 'tree' section from the Generate Config /*{{{*/
+// ---------------------------------------------------------------------
+/* This populates the PkgList with all the possible permutations of the
+ section/arch lists. */
+void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
+{
+ // Load the defaults
+ string DDir = Setup.Find("TreeDefault::Directory",
+ "$(DIST)/$(SECTION)/binary-$(ARCH)/");
+ string DSDir = Setup.Find("TreeDefault::SrcDirectory",
+ "$(DIST)/$(SECTION)/source/");
+ string DPkg = Setup.Find("TreeDefault::Packages",
+ "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages");
+ string DIPrfx = Setup.Find("TreeDefault::InternalPrefix",
+ "$(DIST)/$(SECTION)/");
+ string DContents = Setup.Find("TreeDefault::Contents",
+ "$(DIST)/Contents-$(ARCH)");
+ string DContentsH = Setup.Find("TreeDefault::Contents::Header","");
+ string DBCache = Setup.Find("TreeDefault::BinCacheDB",
+ "packages-$(ARCH).db");
+ string DSources = Setup.Find("TreeDefault::Sources",
+ "$(DIST)/$(SECTION)/source/Sources");
+ string DFLFile = Setup.Find("TreeDefault::FileList", "");
+ string DSFLFile = Setup.Find("TreeDefault::SourceFileList", "");
+
+ // Process 'tree' type sections
+ const Configuration::Item *Top = Setup.Tree("tree");
+ for (Top = (Top == 0?0:Top->Child); Top != 0;)
+ {
+ Configuration Block(Top);
+ string Dist = Top->Tag;
+
+ // Parse the sections
+ const char *Sections = Block.Find("Sections").c_str();
+ string Section;
+ while (ParseQuoteWord(Sections,Section) == true)
+ {
+ const char *Archs = Block.Find("Architectures").c_str();
+ string Arch;
+ while (ParseQuoteWord(Archs,Arch) == true)
+ {
+ struct SubstVar Vars[] = {{"$(DIST)",&Dist},
+ {"$(SECTION)",&Section},
+ {"$(ARCH)",&Arch},
+ {}};
+ PackageMap Itm;
+
+ Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars);
+ Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars);
+
+ if (stringcasecmp(Arch,"source") == 0)
+ {
+ Itm.SrcOverride = SubstVar(Block.Find("SrcOverride"),Vars);
+ Itm.BaseDir = SubstVar(Block.Find("SrcDirectory",DSDir.c_str()),Vars);
+ Itm.SrcFile = SubstVar(Block.Find("Sources",DSources.c_str()),Vars);
+ Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars);
+ Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars);
+ }
+ else
+ {
+ Itm.BinCacheDB = SubstVar(Block.Find("BinCacheDB",DBCache.c_str()),Vars);
+ Itm.BaseDir = SubstVar(Block.Find("Directory",DDir.c_str()),Vars);
+ Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars);
+ Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars);
+ Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars);
+ Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars);
+ Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars);
+ }
+
+ Itm.GetGeneral(Setup,Block);
+ PkgList.push_back(Itm);
+ }
+ }
+
+ Top = Top->Next;
+ }
+}
+ /*}}}*/
+// LoadBinDir - Load a 'bindirectory' section from the Generate Config /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup)
+{
+ // Process 'bindirectory' type sections
+ const Configuration::Item *Top = Setup.Tree("bindirectory");
+ for (Top = (Top == 0?0:Top->Child); Top != 0;)
+ {
+ Configuration Block(Top);
+
+ PackageMap Itm;
+ Itm.PkgFile = Block.Find("Packages");
+ Itm.SrcFile = Block.Find("Sources");
+ Itm.BinCacheDB = Block.Find("BinCacheDB");
+ Itm.BinOverride = Block.Find("BinOverride");
+ Itm.SrcOverride = Block.Find("SrcOverride");
+ Itm.BaseDir = Top->Tag;
+ Itm.FLFile = Block.Find("FileList");
+ Itm.InternalPrefix = Block.Find("InternalPrefix",Top->Tag.c_str());
+ Itm.Contents = Block.Find("Contents");
+ Itm.ContentsHead = Block.Find("Contents::Header");
+
+ Itm.GetGeneral(Setup,Block);
+ PkgList.push_back(Itm);
+
+ Top = Top->Next;
+ }
+}
+ /*}}}*/
+
+// ShowHelp - Show the help text /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ShowHelp(CommandLine &CmdL)
+{
+ ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
+ COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
+ if (_config->FindB("version") == true)
+ return true;
+
+ cout <<
+ "Usage: apt-ftparchive [options] command\n"
+ "Commands: packges binarypath [overridefile [pathprefix]]\n"
+ " sources srcpath [overridefile [pathprefix]]\n"
+ " contents path\n"
+ " generate config [groups]\n"
+ " clean config\n"
+ "\n"
+ "apt-ftparchive generates index files for Debian archives. It supports\n"
+ "many styles of generation from fully automated to functional replacements\n"
+ "for dpkg-scanpackages and dpkg-scansources\n"
+ "\n"
+ "apt-ftparchive generates Package files from a tree of .debs. The\n"
+ "Package file contains the contents of all the control fields from\n"
+ "each package as well as the MD5 hash and filesize. An override file\n"
+ "is supported to force the value of Priority and Section.\n"
+ "\n"
+ "Similarly apt-ftparchive generates Sources files from a tree of .dscs.\n"
+ "The --source-override option can be used to specify a src override file\n"
+ "\n"
+ "The 'packages' and 'sources' command should be run in the root of the\n"
+ "tree. BinaryPath should point to the base of the recursive search and \n"
+ "override file should contian the override flags. Pathprefix is\n"
+ "appended to the filename fields if present. Example usage from the \n"
+ "debian archive:\n"
+ " apt-ftparchive packages dists/potato/main/binary-i386/ > \\\n"
+ " dists/potato/main/binary-i386/Packages\n"
+ "\n"
+ "Options:\n"
+ " -h This help text\n"
+ " --md5 Control MD5 generation\n"
+ " -s=? Source override file\n"
+ " -q Quiet\n"
+ " -d=? Select the optional caching database\n"
+ " --no-delink Enable delinking debug mode\n"
+ " --contents Control contents file generation\n"
+ " -c=? Read this configuration file\n"
+ " -o=? Set an arbitary configuration option" << endl;
+
+ return true;
+}
+ /*}}}*/
+// SimpleGenPackages - Generate a Packages file for a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This emulates dpkg-scanpackages's command line interface. 'mostly' */
+bool SimpleGenPackages(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ string Override;
+ if (CmdL.FileSize() >= 3)
+ Override = CmdL.FileList[2];
+
+ // Create a package writer object.
+ PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"),
+ Override);
+ if (_error->PendingError() == true)
+ return false;
+
+ if (CmdL.FileSize() >= 4)
+ Packages.PathPrefix = CmdL.FileList[3];
+
+ // Do recursive directory searching
+ if (Packages.RecursiveScan(CmdL.FileList[1]) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
+// SimpleGenContents - Generate a Contents listing /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool SimpleGenContents(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ // Create a package writer object.
+ ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"));
+ if (_error->PendingError() == true)
+ return false;
+
+ // Do recursive directory searching
+ if (Contents.RecursiveScan(CmdL.FileList[1]) == false)
+ return false;
+
+ Contents.Finish();
+
+ return true;
+}
+ /*}}}*/
+// SimpleGenSources - Generate a Sources file for a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This emulates dpkg-scanpackages's command line interface. 'mostly' */
+bool SimpleGenSources(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ string Override;
+ if (CmdL.FileSize() >= 3)
+ Override = CmdL.FileList[2];
+
+ string SOverride;
+ if (Override.empty() == false)
+ SOverride = Override + ".src";
+
+ SOverride = _config->Find("APT::FTPArchive::SourceOverride",
+ SOverride.c_str());
+
+ // Create a package writer object.
+ SourcesWriter Sources(Override,SOverride);
+ if (_error->PendingError() == true)
+ return false;
+
+ if (CmdL.FileSize() >= 4)
+ Sources.PathPrefix = CmdL.FileList[3];
+
+ // Do recursive directory searching
+ if (Sources.RecursiveScan(CmdL.FileList[1]) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
+// Generate - Full generate, using a config file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool Generate(CommandLine &CmdL)
+{
+ struct CacheDB::Stats SrcStats;
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ struct timeval StartTime;
+ gettimeofday(&StartTime,0);
+ struct CacheDB::Stats Stats;
+
+ // Read the configuration file.
+ Configuration Setup;
+ if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false)
+ return false;
+
+ vector<PackageMap> PkgList;
+ LoadTree(PkgList,Setup);
+ LoadBinDir(PkgList,Setup);
+
+ // Sort by cache DB to improve IO locality.
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
+
+ // Generate packages
+ if (CmdL.FileSize() <= 2)
+ {
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ if (I->GenPackages(Setup,Stats) == false)
+ _error->DumpErrors();
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ if (I->GenSources(Setup,SrcStats) == false)
+ _error->DumpErrors();
+ }
+ else
+ {
+ // Make a choice list out of the package list..
+ RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1];
+ RxChoiceList *End = List;
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ {
+ End->UserData = I;
+ End->Str = I->BaseDir.c_str();
+ End++;
+
+ End->UserData = I;
+ End->Str = I->Tag.c_str();
+ End++;
+ }
+ End->Str = 0;
+
+ // Regex it
+ if (RegexChoice(List,CmdL.FileList + 2,CmdL.FileList + CmdL.FileSize()) == 0)
+ {
+ delete [] List;
+ return _error->Error("No selections matched");
+ }
+ _error->DumpErrors();
+
+ // Do the generation for Packages
+ for (End = List; End->Str != 0; End++)
+ {
+ if (End->Hit == false)
+ continue;
+
+ PackageMap *I = (PackageMap *)End->UserData;
+ if (I->PkgDone == true)
+ continue;
+ if (I->GenPackages(Setup,Stats) == false)
+ _error->DumpErrors();
+ }
+
+ // Do the generation for Sources
+ for (End = List; End->Str != 0; End++)
+ {
+ if (End->Hit == false)
+ continue;
+
+ PackageMap *I = (PackageMap *)End->UserData;
+ if (I->SrcDone == true)
+ continue;
+ if (I->GenSources(Setup,SrcStats) == false)
+ _error->DumpErrors();
+ }
+
+ delete [] List;
+ }
+
+ if (_config->FindB("APT::FTPArchive::Contents",true) == false)
+ return true;
+
+ c1out << "Done Packages, Starting contents." << endl;
+
+ // Sort the contents file list by date
+ string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ {
+ struct stat A;
+ if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),
+ I->CntCompress,A) == false)
+ time(&I->ContentsMTime);
+ else
+ I->ContentsMTime = A.st_mtime;
+ }
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::ContentsCompare());
+
+ /* Now for Contents.. The process here is to do a make-like dependency
+ check. Each contents file is verified to be newer than the package files
+ that describe the debs it indexes. Since the package files contain
+ hashes of the .debs this means they have not changed either so the
+ contents must be up to date. */
+ unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024;
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ {
+ // This record is not relevent
+ if (I->ContentsDone == true ||
+ I->Contents.empty() == true)
+ continue;
+
+ // Do not do everything if the user specified sections.
+ if (CmdL.FileSize() > 2 && I->PkgDone == false)
+ continue;
+
+ struct stat A,B;
+ if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),I->CntCompress,A) == true)
+ {
+ if (MultiCompress::GetStat(flCombine(ArchiveDir,I->PkgFile),I->PkgCompress,B) == false)
+ {
+ _error->Warning("Some files are missing in the package file group `%s'",I->PkgFile.c_str());
+ continue;
+ }
+
+ if (A.st_mtime > B.st_mtime)
+ continue;
+ }
+
+ if (I->GenContents(Setup,PkgList.begin(),PkgList.end(),
+ MaxContentsChange) == false)
+ _error->DumpErrors();
+
+ // Hit the limit?
+ if (MaxContentsChange == 0)
+ {
+ c1out << "Hit contents update byte limit" << endl;
+ break;
+ }
+ }
+
+ struct timeval NewTime;
+ gettimeofday(&NewTime,0);
+ double Delta = NewTime.tv_sec - StartTime.tv_sec +
+ (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
+ c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages
+ << " archives. Took " << TimeToStr((long)Delta) << endl;
+
+ return true;
+}
+ /*}}}*/
+// Clean - Clean out the databases /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool Clean(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() != 2)
+ return ShowHelp(CmdL);
+
+ // Read the configuration file.
+ Configuration Setup;
+ if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false)
+ return false;
+
+ vector<PackageMap> PkgList;
+ LoadTree(PkgList,Setup);
+ LoadBinDir(PkgList,Setup);
+
+ // Sort by cache DB to improve IO locality.
+ stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
+
+ string CacheDir = Setup.FindDir("Dir::CacheDir");
+
+ for (PackageMap *I = PkgList.begin(); I != PkgList.end(); )
+ {
+ c0out << I->BinCacheDB << endl;
+ CacheDB DB(flCombine(CacheDir,I->BinCacheDB));
+ if (DB.Clean() == false)
+ _error->DumpErrors();
+
+ string CacheDB = I->BinCacheDB;
+ for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++);
+ }
+
+ return true;
+}
+ /*}}}*/
+
+int main(int argc, const char *argv[])
+{
+ CommandLine::Args Args[] = {
+ {'h',"help","help",0},
+ {0,"md5","APT::FTPArchive::MD5",0},
+ {'v',"version","version",0},
+ {'d',"db","APT::FTPArchive::DB",CommandLine::HasArg},
+ {'s',"source-override","APT::FTPArchive::SourceOverride",CommandLine::HasArg},
+ {'q',"quiet","quiet",CommandLine::IntLevel},
+ {'q',"silent","quiet",CommandLine::IntLevel},
+ {0,"delink","APT::FTPArchive::DeLinkAct",0},
+ {0,"readonly","APT::FTPArchive::ReadOnlyDB",0},
+ {0,"contents","APT::FTPArchive::Contents",0},
+ {'c',"config-file",0,CommandLine::ConfigFile},
+ {'o',"option",0,CommandLine::ArbItem},
+ {0,0,0,0}};
+ CommandLine::Dispatch Cmds[] = {{"packages",&SimpleGenPackages},
+ {"contents",&SimpleGenContents},
+ {"sources",&SimpleGenSources},
+ {"generate",&Generate},
+ {"clean",&Clean},
+ {"help",&ShowHelp},
+ {0,0}};
+
+ // Parse the command line and initialize the package library
+ CommandLine CmdL(Args,_config);
+ if (CmdL.Parse(argc,argv) == false)
+ {
+ _error->DumpErrors();
+ return 100;
+ }
+
+ // See if the help should be shown
+ if (_config->FindB("help") == true ||
+ _config->FindB("version") == true ||
+ CmdL.FileSize() == 0)
+ {
+ ShowHelp(CmdL);
+ return 0;
+ }
+
+ // Setup the output streams
+ c0out.rdbuf(cout.rdbuf());
+ c1out.rdbuf(cout.rdbuf());
+ c2out.rdbuf(cout.rdbuf());
+ Quiet = _config->FindI("quiet",0);
+ if (Quiet > 0)
+ c0out.rdbuf(devnull.rdbuf());
+ if (Quiet > 1)
+ c1out.rdbuf(devnull.rdbuf());
+
+ // Match the operation
+ CmdL.DispatchArg(Cmds);
+
+ if (_error->empty() == false)
+ {
+ bool Errors = _error->PendingError();
+ _error->DumpErrors();
+ return Errors == true?100:0;
+ }
+ return 0;
+}
diff --git a/ftparchive/apt-ftparchive.h b/ftparchive/apt-ftparchive.h
new file mode 100644
index 000000000..c228903ba
--- /dev/null
+++ b/ftparchive/apt-ftparchive.h
@@ -0,0 +1,28 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: apt-ftparchive.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Writer
+
+ The file writer classes. These write various types of output, sources,
+ packages and contents.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef APT_FTPARCHIVE_H
+#define APT_FTPARCHIVE_H
+
+#ifdef __GNUG__
+#pragma interface "apt-ftparchive.h"
+#endif
+
+#include <fstream>
+
+extern ostream c0out;
+extern ostream c1out;
+extern ostream c2out;
+extern ofstream devnull;
+extern unsigned Quiet;
+
+#endif
diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc
new file mode 100644
index 000000000..dd63f215a
--- /dev/null
+++ b/ftparchive/cachedb.cc
@@ -0,0 +1,284 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: cachedb.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ CacheDB
+
+ Simple uniform interface to a cache database.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "cachedb.h"
+#endif
+
+#include "cachedb.h"
+
+#include <apt-pkg/error.h>
+#include <apt-pkg/md5.h>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/configuration.h>
+
+#include <netinet/in.h> // htonl, etc
+ /*}}}*/
+
+// CacheDB::ReadyDB - Ready the DB2 /*{{{*/
+// ---------------------------------------------------------------------
+/* This opens the DB2 file for caching package information */
+bool CacheDB::ReadyDB(string DB)
+{
+ ReadOnly = _config->FindB("APT::FTPArchive::ReadOnlyDB",false);
+
+ // Close the old DB
+ if (Dbp != 0)
+ Dbp->close(Dbp,0);
+
+ /* Check if the DB was disabled while running and deal with a
+ corrupted DB */
+ if (DBFailed() == true)
+ {
+ _error->Warning("DB was corrupted, file renamed to %s.old",DBFile.c_str());
+ rename(DBFile.c_str(),(DBFile+".old").c_str());
+ }
+
+ DBLoaded = false;
+ Dbp = 0;
+ DBFile = string();
+
+ if (DB.empty())
+ return true;
+
+ if ((errno = db_open(DB.c_str(),DB_HASH,
+ (ReadOnly?DB_RDONLY:DB_CREATE),
+ 0644,0,0,&Dbp)) != 0)
+ {
+ Dbp = 0;
+ return _error->Errno("db_open","Unable to open DB2 file %s",DB.c_str());
+ }
+
+ DBFile = DB;
+ DBLoaded = true;
+ return true;
+}
+ /*}}}*/
+// CacheDB::SetFile - Select a file to be working with /*{{{*/
+// ---------------------------------------------------------------------
+/* All future actions will be performed against this file */
+bool CacheDB::SetFile(string FileName,struct stat St,FileFd *Fd)
+{
+ delete DebFile;
+ DebFile = 0;
+ this->FileName = FileName;
+ this->Fd = Fd;
+ this->FileStat = St;
+ FileStat = St;
+ memset(&CurStat,0,sizeof(CurStat));
+
+ Stats.Bytes += St.st_size;
+ Stats.Packages++;
+
+ if (DBLoaded == false)
+ return true;
+
+ InitQuery("st");
+
+ // Ensure alignment of the returned structure
+ Data.data = &CurStat;
+ Data.ulen = sizeof(CurStat);
+ Data.flags = DB_DBT_USERMEM;
+ // Lookup the stat info and confirm the file is unchanged
+ if (Get() == true)
+ {
+ if (CurStat.st_mtime != htonl(St.st_mtime))
+ {
+ CurStat.st_mtime = htonl(St.st_mtime);
+ CurStat.Flags = 0;
+ _error->Warning("File date has changed %s",FileName.c_str());
+ }
+ }
+ else
+ {
+ CurStat.st_mtime = htonl(St.st_mtime);
+ CurStat.Flags = 0;
+ }
+ CurStat.Flags = ntohl(CurStat.Flags);
+ OldStat = CurStat;
+ return true;
+}
+ /*}}}*/
+// CacheDB::LoadControl - Load Control information /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::LoadControl()
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlControl) == FlControl)
+ {
+ // Lookup the control information
+ InitQuery("cl");
+ if (Get() == true && Control.TakeControl(Data.data,Data.size) == true)
+ return true;
+ CurStat.Flags &= ~FlControl;
+ }
+
+ // Create a deb instance to read the archive
+ if (DebFile == 0)
+ {
+ DebFile = new debDebFile(*Fd);
+ if (_error->PendingError() == true)
+ return false;
+ }
+
+ Stats.Misses++;
+ if (Control.Read(*DebFile) == false)
+ return false;
+
+ if (Control.Control == 0)
+ return _error->Error("Archive has no control record");
+
+ // Write back the control information
+ InitQuery("cl");
+ if (Put(Control.Control,Control.Length) == true)
+ CurStat.Flags |= FlControl;
+ return true;
+}
+ /*}}}*/
+// CacheDB::LoadContents - Load the File Listing /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::LoadContents(bool GenOnly)
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlContents) == FlContents)
+ {
+ if (GenOnly == true)
+ return true;
+
+ // Lookup the contents information
+ InitQuery("cn");
+ if (Get() == true)
+ {
+ if (Contents.TakeContents(Data.data,Data.size) == true)
+ return true;
+ }
+
+ CurStat.Flags &= ~FlContents;
+ }
+
+ // Create a deb instance to read the archive
+ if (DebFile == 0)
+ {
+ DebFile = new debDebFile(*Fd);
+ if (_error->PendingError() == true)
+ return false;
+ }
+
+ if (Contents.Read(*DebFile) == false)
+ return false;
+
+ // Write back the control information
+ InitQuery("cn");
+ if (Put(Contents.Data,Contents.CurSize) == true)
+ CurStat.Flags |= FlContents;
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetMD5 - Get the MD5 hash /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::GetMD5(string &MD5Res,bool GenOnly)
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlMD5) == FlMD5)
+ {
+ if (GenOnly == true)
+ return true;
+
+ InitQuery("m5");
+ if (Get() == true)
+ {
+ MD5Res = string((char *)Data.data,Data.size);
+ return true;
+ }
+ CurStat.Flags &= ~FlMD5;
+ }
+
+ Stats.MD5Bytes += FileStat.st_size;
+
+ MD5Summation MD5;
+ if (Fd->Seek(0) == false || MD5.AddFD(Fd->Fd(),FileStat.st_size) == false)
+ return false;
+
+ MD5Res = MD5.Result();
+ InitQuery("m5");
+ if (Put(MD5Res.begin(),MD5Res.length()) == true)
+ CurStat.Flags |= FlMD5;
+ return true;
+}
+ /*}}}*/
+// CacheDB::Finish - Write back the cache structure /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::Finish()
+{
+ // Optimize away some writes.
+ if (CurStat.Flags == OldStat.Flags &&
+ CurStat.st_mtime == OldStat.st_mtime)
+ return true;
+
+ // Write the stat information
+ CurStat.Flags = htonl(CurStat.Flags);
+ InitQuery("st");
+ Put(&CurStat,sizeof(CurStat));
+ CurStat.Flags = ntohl(CurStat.Flags);
+ return true;
+}
+ /*}}}*/
+// CacheDB::Clean - Clean the Database /*{{{*/
+// ---------------------------------------------------------------------
+/* Tidy the database by removing files that no longer exist at all. */
+bool CacheDB::Clean()
+{
+ if (DBLoaded == false)
+ return true;
+
+ /* I'm not sure what VERSION_MINOR should be here.. 2.4.14 certainly
+ needs the lower one and 2.7.7 needs the upper.. */
+#if DB_VERSION_MAJOR >= 2 && DB_VERSION_MINOR >= 7
+ DBC *Cursor;
+ if ((errno = Dbp->cursor(Dbp,0,&Cursor,0)) != 0)
+ return _error->Error("Unable to get a cursor");
+#else
+ DBC *Cursor;
+ if ((errno = Dbp->cursor(Dbp,0,&Cursor)) != 0)
+ return _error->Error("Unable to get a cursor");
+#endif
+
+ DBT Key;
+ DBT Data;
+ memset(&Key,0,sizeof(Key));
+ memset(&Data,0,sizeof(Data));
+ while ((errno = Cursor->c_get(Cursor,&Key,&Data,DB_NEXT)) == 0)
+ {
+ const char *Colon = (char *)Key.data;
+ for (; Colon != (char *)Key.data+Key.size && *Colon != ':'; Colon++);
+ if ((char *)Key.data+Key.size - Colon > 2)
+ {
+ if (stringcmp((char *)Key.data,Colon,"st") == 0 ||
+ stringcmp((char *)Key.data,Colon,"cn") == 0 ||
+ stringcmp((char *)Key.data,Colon,"m5") == 0 ||
+ stringcmp((char *)Key.data,Colon,"cl") == 0)
+ {
+ if (FileExists(string(Colon+1,(const char *)Key.data+Key.size)) == true)
+ continue;
+ }
+ }
+
+ Cursor->c_del(Cursor,0);
+ }
+
+ return true;
+}
+ /*}}}*/
diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h
new file mode 100644
index 000000000..89b1a2320
--- /dev/null
+++ b/ftparchive/cachedb.h
@@ -0,0 +1,119 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: cachedb.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ CacheDB
+
+ Simple uniform interface to a cache database.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef CACHEDB_H
+#define CACHEDB_H
+
+#ifdef __GNUG__
+#pragma interface "cachedb.h"
+#endif
+
+#include <db2/db.h>
+#include <string>
+#include <apt-pkg/debfile.h>
+#include <inttypes.h>
+#include <sys/stat.h>
+#include <errno.h>
+
+#include "contents.h"
+
+class CacheDB
+{
+ protected:
+
+ // Database state/access
+ DBT Key;
+ DBT Data;
+ char TmpKey[600];
+ DB *Dbp;
+ bool DBLoaded;
+ bool ReadOnly;
+ string DBFile;
+
+ // Generate a key for the DB of a given type
+ inline void InitQuery(const char *Type)
+ {
+ memset(&Key,0,sizeof(Key));
+ memset(&Data,0,sizeof(Data));
+ Key.data = TmpKey;
+ Key.size = snprintf(TmpKey,sizeof(TmpKey),"%s:%s",Type,FileName.c_str());
+ }
+
+ inline bool Get()
+ {
+ return Dbp->get(Dbp,0,&Key,&Data,0) == 0;
+ };
+ inline bool Put(const void *In,unsigned long Length)
+ {
+ if (ReadOnly == true)
+ return true;
+ Data.size = Length;
+ Data.data = (void *)In;
+ if (DBLoaded == true && (errno = Dbp->put(Dbp,0,&Key,&Data,0)) != 0)
+ {
+ DBLoaded = false;
+ return false;
+ }
+ return true;
+ }
+
+ // Stat info stored in the DB, Fixed types since it is written to disk.
+ enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2)};
+ struct StatStore
+ {
+ uint32_t st_mtime;
+ uint32_t Flags;
+ } CurStat;
+ struct StatStore OldStat;
+
+ // 'set' state
+ string FileName;
+ struct stat FileStat;
+ FileFd *Fd;
+ debDebFile *DebFile;
+
+ public:
+
+ // Data collection helpers
+ debDebFile::MemControlExtract Control;
+ ContentsExtract Contents;
+
+ // Runtime statistics
+ struct Stats
+ {
+ double Bytes;
+ double MD5Bytes;
+ unsigned long Packages;
+ unsigned long Misses;
+ unsigned long DeLinkBytes;
+
+ inline void Add(const Stats &S) {Bytes += S.Bytes; MD5Bytes += S.MD5Bytes;
+ Packages += S.Packages; Misses += S.Misses; DeLinkBytes += S.DeLinkBytes;};
+ Stats() : Bytes(0), MD5Bytes(0), Packages(0), Misses(0), DeLinkBytes(0) {};
+ } Stats;
+
+ bool ReadyDB(string DB);
+ inline bool DBFailed() {return Dbp != 0 && DBLoaded == false;};
+ inline bool Loaded() {return DBLoaded == true;};
+
+ bool SetFile(string FileName,struct stat St,FileFd *Fd);
+ bool LoadControl();
+ bool LoadContents(bool GenOnly);
+ bool GetMD5(string &MD5Res,bool GenOnly);
+ bool Finish();
+
+ bool Clean();
+
+ CacheDB(string DB) : Dbp(0), DebFile(0) {ReadyDB(DB);};
+ ~CacheDB() {ReadyDB(string()); delete DebFile;};
+};
+
+#endif
diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc
new file mode 100644
index 000000000..145a68781
--- /dev/null
+++ b/ftparchive/contents.cc
@@ -0,0 +1,401 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: contents.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ contents - Archive contents generator
+
+ The GenContents class is a back end for an archive contents generator.
+ It takes a list of per-deb file name and merges it into a memory
+ database of all previous output. This database is stored as a set
+ of binary trees linked across directories to form a tree of all files+dirs
+ given to it. The tree will also be sorted as it is built up thus
+ removing the massive sort time overhead.
+
+ By breaking all the pathnames into components and storing them
+ separately a space savings is realized by not duplicating the string
+ over and over again. Ultimately this saving is sacrificed to storage of
+ the tree structure itself but the tree structure yields a speed gain
+ in the sorting and processing. Ultimately it takes about 5 seconds to
+ do 141000 nodes and about 5 meg of ram.
+
+ The tree looks something like:
+
+ usr/
+ / \ / libslang
+ bin/ lib/ --> libc6
+ / \ \ libfoo
+ games/ sbin/
+
+ The ---> is the DirDown link
+
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include "contents.h"
+
+#include <apt-pkg/extracttar.h>
+#include <apt-pkg/error.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <malloc.h>
+ /*}}}*/
+
+// GenContents::~GenContents - Free allocated memory /*{{{*/
+// ---------------------------------------------------------------------
+/* Since all our allocations are static big-block allocations all that is
+ needed is to free all of them. */
+GenContents::~GenContents()
+{
+ while (BlockList != 0)
+ {
+ BigBlock *Old = BlockList;
+ BlockList = Old->Next;
+ free(Old->Block);
+ delete Old;
+ }
+}
+ /*}}}*/
+// GenContents::Mystrdup - Custom strdup /*{{{*/
+// ---------------------------------------------------------------------
+/* This strdup also uses a large block allocator to eliminate glibc
+ overhead */
+char *GenContents::Mystrdup(const char *From)
+{
+ unsigned int Len = strlen(From) + 1;
+ if (StrLeft <= Len)
+ {
+ StrLeft = 4096*10;
+ StrPool = (char *)malloc(StrLeft);
+
+ BigBlock *Block = new BigBlock;
+ Block->Block = StrPool;
+ Block->Next = BlockList;
+ BlockList = Block;
+ }
+
+ memcpy(StrPool,From,Len);
+ StrLeft -= Len;
+
+ char *Res = StrPool;
+ StrPool += Len;
+ return Res;
+}
+ /*}}}*/
+// GenContents::Node::operator new - Big block allocator /*{{{*/
+// ---------------------------------------------------------------------
+/* This eliminates glibc's malloc overhead by allocating large blocks and
+ having a continuous set of Nodes. This takes about 8 bytes off each nodes
+ space needs. Freeing is not supported. */
+void *GenContents::Node::operator new(size_t Amount,GenContents *Owner)
+{
+ if (Owner->NodeLeft == 0)
+ {
+ Owner->NodeLeft = 10000;
+ Owner->NodePool = (Node *)malloc(Amount*Owner->NodeLeft);
+ BigBlock *Block = new BigBlock;
+ Block->Block = Owner->NodePool;
+ Block->Next = Owner->BlockList;
+ Owner->BlockList = Block;
+ }
+
+ Owner->NodeLeft--;
+ return Owner->NodePool++;
+}
+ /*}}}*/
+// GenContents::Grab - Grab a new node representing Name under Top /*{{{*/
+// ---------------------------------------------------------------------
+/* This grabs a new node representing the pathname component Name under
+ the node Top. The node is given the name Package. It is assumed that Name
+ is inside of top. If a duplicate already entered name is found then
+ a note is made on the Dup list and the previous in-tree node is returned. */
+GenContents::Node *GenContents::Grab(GenContents::Node *Top,const char *Name,
+ const char *Package)
+{
+ /* We drop down to the next dir level each call. This simplifies
+ the calling routine */
+ if (Top->DirDown == 0)
+ {
+ Node *Item = new(this) Node;
+ Item->Path = Mystrdup(Name);
+ Item->Package = Package;
+ Top->DirDown = Item;
+ return Item;
+ }
+ Top = Top->DirDown;
+
+ int Res;
+ while (1)
+ {
+ Res = strcmp(Name,Top->Path);
+
+ // Collision!
+ if (Res == 0)
+ {
+ // See if this the the same package (multi-version dup)
+ if (Top->Package == Package ||
+ strcasecmp(Top->Package,Package) == 0)
+ return Top;
+
+ // Look for an already existing Dup
+ for (Node *I = Top->Dups; I != 0; I = I->Dups)
+ if (I->Package == Package ||
+ strcasecmp(I->Package,Package) == 0)
+ return Top;
+
+ // Add the dup in
+ Node *Item = new(this) Node;
+ Item->Path = Top->Path;
+ Item->Package = Package;
+ Item->Dups = Top->Dups;
+ Top->Dups = Item;
+ return Top;
+ }
+
+ // Continue to traverse the tree
+ if (Res < 0)
+ {
+ if (Top->BTreeLeft == 0)
+ break;
+ Top = Top->BTreeLeft;
+ }
+ else
+ {
+ if (Top->BTreeRight == 0)
+ break;
+ Top = Top->BTreeRight;
+ }
+ }
+
+ // The item was not found in the tree
+ Node *Item = new(this) Node;
+ Item->Path = Mystrdup(Name);
+ Item->Package = Package;
+
+ // Link it into the tree
+ if (Res < 0)
+ {
+ Item->BTreeLeft = Top->BTreeLeft;
+ Top->BTreeLeft = Item;
+ }
+ else
+ {
+ Item->BTreeRight = Top->BTreeRight;
+ Top->BTreeRight = Item;
+ }
+
+ return Item;
+}
+ /*}}}*/
+// GenContents::Add - Add a path to the tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This takes a full pathname and adds it into the tree. We split the
+ pathname into directory fragments adding each one as we go. Technically
+ in output from tar this should result in hitting previous items. */
+void GenContents::Add(const char *Dir,const char *Package)
+{
+ Node *Root = &this->Root;
+
+ // Drop leading slashes
+ while (*Dir == '/' && *Dir != 0)
+ Dir++;
+
+ // Run over the string and grab out each bit up to and including a /
+ const char *Start = Dir;
+ const char *I = Dir;
+ while (*I != 0)
+ {
+ if (*I != '/' || I - Start <= 1)
+ {
+ I++;
+ continue;
+ }
+ I++;
+
+ // Copy the path fragment over
+ char Tmp[1024];
+ strncpy(Tmp,Start,I - Start);
+ Tmp[I - Start] = 0;
+
+ // Grab a node for it
+ Root = Grab(Root,Tmp,Package);
+
+ Start = I;
+ }
+
+ // The final component if it does not have a trailing /
+ if (I - Start >= 1)
+ Root = Grab(Root,Start,Package);
+}
+ /*}}}*/
+// GenContents::WriteSpace - Write a given number of white space chars /*{{{*/
+// ---------------------------------------------------------------------
+/* We mod 8 it and write tabs where possible. */
+void GenContents::WriteSpace(FILE *Out,unsigned int Current,unsigned int Target)
+{
+ if (Target <= Current)
+ Target = Current + 1;
+
+ /* Now we write tabs so long as the next tab stop would not pass
+ the target */
+ for (; (Current/8 + 1)*8 < Target; Current = (Current/8 + 1)*8)
+ fputc('\t',Out);
+
+ // Fill the last bit with spaces
+ for (; Current < Target; Current++)
+ fputc(' ',Out);
+}
+ /*}}}*/
+// GenContents::Print - Display the tree /*{{{*/
+// ---------------------------------------------------------------------
+/* This is the final result function. It takes the tree and recursively
+ calls itself and runs over each section of the tree printing out
+ the pathname and the hit packages. We use Buf to build the pathname
+ summed over all the directory parents of this node. */
+void GenContents::Print(FILE *Out)
+{
+ char Buffer[1024];
+ DoPrint(Out,&Root,Buffer);
+}
+void GenContents::DoPrint(FILE *Out,GenContents::Node *Top, char *Buf)
+{
+ if (Top == 0)
+ return;
+
+ // Go left
+ DoPrint(Out,Top->BTreeLeft,Buf);
+
+ // Print the current dir location and then descend to lower dirs
+ char *OldEnd = Buf + strlen(Buf);
+ if (Top->Path != 0)
+ {
+ strcat(Buf,Top->Path);
+
+ // Do not show the item if it is a directory with dups
+ if (Top->Path[strlen(Top->Path)-1] != '/' /*|| Top->Dups == 0*/)
+ {
+ fputs(Buf,Out);
+ WriteSpace(Out,strlen(Buf),60);
+ for (Node *I = Top; I != 0; I = I->Dups)
+ {
+ if (I != Top)
+ fputc(',',Out);
+ fputs(I->Package,Out);
+ }
+ fputc('\n',Out);
+ }
+ }
+
+ // Go along the directory link
+ DoPrint(Out,Top->DirDown,Buf);
+ *OldEnd = 0;
+
+ // Go right
+ DoPrint(Out,Top->BTreeRight,Buf);
+}
+ /*}}}*/
+
+// ContentsExtract::Read - Read the archive /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ContentsExtract::Read(debDebFile &Deb)
+{
+ Reset();
+
+ // Get the archive member and positition the file
+ const ARArchive::Member *Member = Deb.GotoMember("data.tar.gz");
+ if (Member == 0)
+ return false;
+
+ // Extract it.
+ ExtractTar Tar(Deb.GetFile(),Member->Size);
+ if (Tar.Go(*this) == false)
+ return false;
+ return true;
+}
+ /*}}}*/
+// ContentsExtract::DoItem - Extract an item /*{{{*/
+// ---------------------------------------------------------------------
+/* This just tacks the name onto the end of our memory buffer */
+bool ContentsExtract::DoItem(Item &Itm,int &Fd)
+{
+ unsigned long Len = strlen(Itm.Name);
+
+ // Strip leading ./'s
+ if (Itm.Name[0] == '.' && Itm.Name[1] == '/')
+ {
+ // == './'
+ if (Len == 2)
+ return true;
+
+ Len -= 2;
+ Itm.Name += 2;
+ }
+
+ // Allocate more storage for the string list
+ if (CurSize + Len + 2 >= MaxSize || Data == 0)
+ {
+ if (MaxSize == 0)
+ MaxSize = 512*1024/2;
+ char *NewData = (char *)realloc(Data,MaxSize*2);
+ if (NewData == 0)
+ return _error->Error("realloc - Failed to allocate memory");
+ Data = NewData;
+ MaxSize *= 2;
+ }
+
+ strcpy(Data+CurSize,Itm.Name);
+ CurSize += Len + 1;
+ return true;
+}
+ /*}}}*/
+// ContentsExtract::TakeContents - Load the contents data /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ContentsExtract::TakeContents(const void *NewData,unsigned long Length)
+{
+ if (Length == 0)
+ {
+ CurSize = 0;
+ return true;
+ }
+
+ // Allocate more storage for the string list
+ if (Length + 2 >= MaxSize || Data == 0)
+ {
+ if (MaxSize == 0)
+ MaxSize = 512*1024/2;
+ while (MaxSize*2 <= Length)
+ MaxSize *= 2;
+
+ char *NewData = (char *)realloc(Data,MaxSize*2);
+ if (NewData == 0)
+ return _error->Error("realloc - Failed to allocate memory");
+ Data = NewData;
+ MaxSize *= 2;
+ }
+ memcpy(Data,NewData,Length);
+ CurSize = Length;
+
+ return Data[CurSize-1] == 0;
+}
+ /*}}}*/
+// ContentsExtract::Add - Read the contents data into the sorter /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void ContentsExtract::Add(GenContents &Contents,string Package)
+{
+ const char *Start = Data;
+ char *Pkg = Contents.Mystrdup(Package.c_str());
+ for (const char *I = Data; I < Data + CurSize; I++)
+ {
+ if (*I == 0)
+ {
+ Contents.Add(Start,Pkg);
+ Start = ++I;
+ }
+ }
+}
+ /*}}}*/
diff --git a/ftparchive/contents.h b/ftparchive/contents.h
new file mode 100644
index 000000000..d8457cd45
--- /dev/null
+++ b/ftparchive/contents.h
@@ -0,0 +1,89 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: contents.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ contents - Contents of archive things.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef CONTENTS_H
+#define CONTENTS_H
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/dirstream.h>
+
+class GenContents
+{
+ struct Node
+ {
+ // Binary Tree links
+ Node *BTreeLeft;
+ Node *BTreeRight;
+ Node *DirDown;
+ Node *Dups;
+ const char *Path;
+ const char *Package;
+
+ void *operator new(size_t Amount,GenContents *Owner);
+ void operator delete(void *) {};
+
+ Node() : BTreeLeft(0), BTreeRight(0), DirDown(0), Dups(0),
+ Path(0), Package(0) {};
+ };
+ friend struct Node;
+
+ struct BigBlock
+ {
+ void *Block;
+ BigBlock *Next;
+ };
+
+ Node Root;
+
+ // Big block allocation pools
+ BigBlock *BlockList;
+ char *StrPool;
+ unsigned long StrLeft;
+ Node *NodePool;
+ unsigned long NodeLeft;
+
+ Node *Grab(Node *Top,const char *Name,const char *Package);
+ void WriteSpace(FILE *Out,unsigned int Current,unsigned int Target);
+ void DoPrint(FILE *Out,Node *Top, char *Buf);
+
+ public:
+
+ char *Mystrdup(const char *From);
+ void Add(const char *Dir,const char *Package);
+ void Print(FILE *Out);
+
+ GenContents() : BlockList(0), StrPool(0), StrLeft(0),
+ NodePool(0), NodeLeft(0) {};
+ ~GenContents();
+};
+
+class ContentsExtract : public pkgDirStream
+{
+ public:
+
+ // The Data Block
+ char *Data;
+ unsigned long MaxSize;
+ unsigned long CurSize;
+ void AddData(const char *Text);
+
+ bool Read(debDebFile &Deb);
+
+ virtual bool DoItem(Item &Itm,int &Fd);
+ void Reset() {CurSize = 0;};
+ bool TakeContents(const void *Data,unsigned long Length);
+ void Add(GenContents &Contents,string Package);
+
+ ContentsExtract() : Data(0), MaxSize(0), CurSize(0) {};
+ virtual ~ContentsExtract() {delete [] Data;};
+};
+
+#endif
diff --git a/ftparchive/makefile b/ftparchive/makefile
new file mode 100644
index 000000000..ca6d8f9fe
--- /dev/null
+++ b/ftparchive/makefile
@@ -0,0 +1,20 @@
+# -*- make -*-
+BASE=..
+SUBDIR=ftparchive
+
+# Bring in the default rules
+include ../buildlib/defaults.mak
+
+# The apt-ftparchive program
+ifdef DB2LIB
+PROGRAM=apt-ftparchive
+SLIBS = -lapt-pkg -lapt-inst $(DB2LIB)
+LIB_MAKES = apt-pkg/makefile apt-inst/makefile
+SOURCE = apt-ftparchive.cc cachedb.cc writer.cc contents.cc override.cc \
+ multicompress.cc
+include $(PROGRAM_H)
+else
+PROGRAM=apt-ftparchive
+MESSAGE="Must have db2 to build apt-ftparchive"
+include $(FAIL_H)
+endif # ifdef DB2LIB
diff --git a/ftparchive/multicompress.cc b/ftparchive/multicompress.cc
new file mode 100644
index 000000000..932ad6758
--- /dev/null
+++ b/ftparchive/multicompress.cc
@@ -0,0 +1,494 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: multicompress.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ MultiCompressor
+
+ This class is very complicated in order to optimize for the common
+ case of its use, writing a large set of compressed files that are
+ different from the old set. It spawns off compressors in parallel
+ to maximize compression throughput and has a separate task managing
+ the data going into the compressors.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "multicompress.h"
+#endif
+
+#include "multicompress.h"
+
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/md5.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <utime.h>
+#include <unistd.h>
+ /*}}}*/
+
+const MultiCompress::CompType MultiCompress::Compressors[] =
+ {{".","",0,0,0,1},
+ {"gzip",".gz","gzip","-9n","-d",2},
+ {"bzip2",".bz2","bzip2","-9","-d",3},
+ {}};
+
+// MultiCompress::MultiCompress - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Setup the file outputs, compression modes and fork the writer child */
+MultiCompress::MultiCompress(string Output,string Compress,
+ mode_t Permissions,bool Write)
+{
+ Outputs = 0;
+ Outputter = -1;
+ Input = 0;
+ UpdateMTime = 0;
+ this->Permissions = Permissions;
+
+ /* Parse the compression string, a space separated lists of compresison
+ types */
+ string::const_iterator I = Compress.begin();
+ for (; I != Compress.end();)
+ {
+ for (; I != Compress.end() && isspace(*I); I++);
+
+ // Grab a word
+ string::const_iterator Start = I;
+ for (; I != Compress.end() && !isspace(*I); I++);
+
+ // Find the matching compressor
+ const CompType *Comp = Compressors;
+ for (; Comp->Name != 0; Comp++)
+ if (stringcmp(Start,I,Comp->Name) == 0)
+ break;
+
+ // Hmm.. unknown.
+ if (Comp->Name == 0)
+ {
+ _error->Warning("Unknown Compresison Algorithm '%s'",string(Start,I).c_str());
+ continue;
+ }
+
+ // Create and link in a new output
+ Files *NewOut = new Files;
+ NewOut->Next = Outputs;
+ Outputs = NewOut;
+ NewOut->CompressProg = Comp;
+ NewOut->Output = Output+Comp->Extension;
+
+ struct stat St;
+ if (stat(NewOut->Output.c_str(),&St) == 0)
+ NewOut->OldMTime = St.st_mtime;
+ else
+ NewOut->OldMTime = 0;
+ }
+
+ if (Write == false)
+ return;
+
+ /* Open all the temp files now so we can report any errors. File is
+ made unreable to prevent people from touching it during creating. */
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ I->TmpFile.Open(I->Output + ".new",FileFd::WriteEmpty,0600);
+ if (_error->PendingError() == true)
+ return;
+
+ if (Outputs == 0)
+ {
+ _error->Error("Compressed output %s needs a compression set",Output.c_str());
+ return;
+ }
+
+ Start();
+}
+ /*}}}*/
+// MultiCompress::~MultiCompress - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Just erase the file linked list. */
+MultiCompress::~MultiCompress()
+{
+ Die();
+
+ for (; Outputs != 0;)
+ {
+ Files *Tmp = Outputs->Next;
+ delete Outputs;
+ Outputs = Tmp;
+ }
+}
+ /*}}}*/
+// MultiCompress::GetStat - Get stat information for compressed files /*{{{*/
+// ---------------------------------------------------------------------
+/* This checks each compressed file to make sure it exists and returns
+ stat information for a random file from the collection. False means
+ one or more of the files is missing. */
+bool MultiCompress::GetStat(string Output,string Compress,struct stat &St)
+{
+ /* Parse the compression string, a space separated lists of compresison
+ types */
+ string::const_iterator I = Compress.begin();
+ bool DidStat = false;
+ for (; I != Compress.end();)
+ {
+ for (; I != Compress.end() && isspace(*I); I++);
+
+ // Grab a word
+ string::const_iterator Start = I;
+ for (; I != Compress.end() && !isspace(*I); I++);
+
+ // Find the matching compressor
+ const CompType *Comp = Compressors;
+ for (; Comp->Name != 0; Comp++)
+ if (stringcmp(Start,I,Comp->Name) == 0)
+ break;
+
+ // Hmm.. unknown.
+ if (Comp->Name == 0)
+ continue;
+
+ string Name = Output+Comp->Extension;
+ if (stat(Name.c_str(),&St) != 0)
+ return false;
+ DidStat = true;
+ }
+ return DidStat;
+}
+ /*}}}*/
+// MultiCompress::Start - Start up the writer child /*{{{*/
+// ---------------------------------------------------------------------
+/* Fork a child and setup the communication pipe. */
+bool MultiCompress::Start()
+{
+ // Create a data pipe
+ int Pipe[2] = {-1,-1};
+ if (pipe(Pipe) != 0)
+ return _error->Errno("pipe","Failed to create IPC pipe to subprocess");
+ for (int I = 0; I != 2; I++)
+ SetCloseExec(Pipe[I],true);
+
+ // The child..
+ Outputter = fork();
+ if (Outputter == 0)
+ {
+ close(Pipe[1]);
+ Child(Pipe[0]);
+ if (_error->PendingError() == true)
+ {
+ _error->DumpErrors();
+ _exit(100);
+ }
+ _exit(0);
+ };
+
+ /* Tidy up the temp files, we open them in the constructor so as to
+ get proper error reporting. Close them now. */
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ I->TmpFile.Close();
+
+ close(Pipe[0]);
+ Input = fdopen(Pipe[1],"w");
+ if (Input == 0)
+ return _error->Errno("fdopen","Failed to create FILE*");
+
+ if (Outputter == -1)
+ return _error->Errno("fork","Failed to fork");
+ return true;
+}
+ /*}}}*/
+// MultiCompress::Die - Clean up the writer /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool MultiCompress::Die()
+{
+ if (Input == 0)
+ return true;
+
+ fclose(Input);
+ Input = 0;
+ bool Res = ExecWait(Outputter,"Compress Child",false);
+ Outputter = -1;
+ return Res;
+}
+ /*}}}*/
+// MultiCompress::Finalize - Finish up writing /*{{{*/
+// ---------------------------------------------------------------------
+/* This is only necessary for statistics reporting. */
+bool MultiCompress::Finalize(unsigned long &OutSize)
+{
+ OutSize = 0;
+ if (Input == 0 || Die() == false)
+ return false;
+
+ time_t Now;
+ time(&Now);
+
+ // Check the mtimes to see if the files were replaced.
+ bool Changed = false;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ struct stat St;
+ if (stat(I->Output.c_str(),&St) != 0)
+ return _error->Error("Internal Error, Failed to create %s",
+ I->Output.c_str());
+
+ if (I->OldMTime != St.st_mtime)
+ Changed = true;
+ else
+ {
+ // Update the mtime if necessary
+ if (UpdateMTime > 0 &&
+ (Now - St.st_mtime > (signed)UpdateMTime || St.st_mtime > Now))
+ {
+ struct utimbuf Buf;
+ Buf.actime = Buf.modtime = Now;
+ utime(I->Output.c_str(),&Buf);
+ Changed = true;
+ }
+ }
+
+ // Force the file permissions
+ if (St.st_mode != Permissions)
+ chmod(I->Output.c_str(),Permissions);
+
+ OutSize += St.st_size;
+ }
+
+ if (Changed == false)
+ OutSize = 0;
+
+ return true;
+}
+ /*}}}*/
+// MultiCompress::OpenCompress - Open the compressor /*{{{*/
+// ---------------------------------------------------------------------
+/* This opens the compressor, either in compress mode or decompress
+ mode. FileFd is always the compressor input/output file,
+ OutFd is the created pipe, Input for Compress, Output for Decompress. */
+bool MultiCompress::OpenCompress(const CompType *Prog,int &Pid,int FileFd,
+ int &OutFd,bool Comp)
+{
+ Pid = -1;
+
+ // No compression
+ if (Prog->Binary == 0)
+ {
+ OutFd = dup(FileFd);
+ return true;
+ }
+
+ // Create a data pipe
+ int Pipe[2] = {-1,-1};
+ if (pipe(Pipe) != 0)
+ return _error->Errno("pipe","Failed to create subprocess IPC");
+ for (int J = 0; J != 2; J++)
+ SetCloseExec(Pipe[J],true);
+
+ if (Comp == true)
+ OutFd = Pipe[1];
+ else
+ OutFd = Pipe[0];
+
+ // The child..
+ Pid = ExecFork();
+ if (Pid == 0)
+ {
+ if (Comp == true)
+ {
+ dup2(FileFd,STDOUT_FILENO);
+ dup2(Pipe[0],STDIN_FILENO);
+ }
+ else
+ {
+ dup2(FileFd,STDIN_FILENO);
+ dup2(Pipe[1],STDOUT_FILENO);
+ }
+
+ SetCloseExec(STDOUT_FILENO,false);
+ SetCloseExec(STDIN_FILENO,false);
+
+ const char *Args[3];
+ Args[0] = Prog->Binary;
+ if (Comp == true)
+ Args[1] = Prog->CompArgs;
+ else
+ Args[1] = Prog->UnCompArgs;
+ Args[2] = 0;
+ execvp(Args[0],(char **)Args);
+ cerr << "Failed to exec compressor " << Args[0] << endl;
+ _exit(100);
+ };
+ if (Comp == true)
+ close(Pipe[0]);
+ else
+ close(Pipe[1]);
+ return true;
+}
+ /*}}}*/
+// MultiCompress::OpenOld - Open an old file /*{{{*/
+// ---------------------------------------------------------------------
+/* This opens one of the original output files, possibly decompressing it. */
+bool MultiCompress::OpenOld(int &Fd,int &Proc)
+{
+ Files *Best = Outputs;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ if (Best->CompressProg->Cost > I->CompressProg->Cost)
+ Best = I;
+
+ // Open the file
+ FileFd F(Best->Output,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Decompress the file so we can read it
+ if (OpenCompress(Best->CompressProg,Proc,F.Fd(),Fd,false) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
+// MultiCompress::CloseOld - Close the old file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool MultiCompress::CloseOld(int Fd,int Proc)
+{
+ close(Fd);
+ if (Proc != -1)
+ if (ExecWait(Proc,"decompressor",false) == false)
+ return false;
+ return true;
+}
+ /*}}}*/
+// MultiCompress::Child - The writer child /*{{{*/
+// ---------------------------------------------------------------------
+/* The child process forks a bunch of compression children and takes
+ input on FD and passes it to all the compressor childer. On the way it
+ computes the MD5 of the raw data. After this the raw data in the
+ original files is compared to see if this data is new. If the data
+ is new then the temp files are renamed, otherwise they are erased. */
+bool MultiCompress::Child(int FD)
+{
+ // Start the compression children.
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (OpenCompress(I->CompressProg,I->CompressProc,I->TmpFile.Fd(),
+ I->Fd,true) == false)
+ return false;
+ }
+
+ /* Okay, now we just feed data from FD to all the other FDs. Also
+ stash a hash of the data to use later. */
+ SetNonBlock(FD,false);
+ unsigned char Buffer[32*1024];
+ unsigned long FileSize = 0;
+ MD5Summation MD5;
+ while (1)
+ {
+ WaitFd(FD,false);
+ int Res = read(FD,Buffer,sizeof(Buffer));
+ if (Res == 0)
+ break;
+ if (Res < 0)
+ continue;
+
+ MD5.Add(Buffer,Res);
+ FileSize += Res;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (write(I->Fd,Buffer,Res) != Res)
+ {
+ _error->Errno("write","IO to subprocess/file failed");
+ break;
+ }
+ }
+ }
+
+ // Close all the writers
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ close(I->Fd);
+
+ // Wait for the compressors to exit
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (I->CompressProc != -1)
+ ExecWait(I->CompressProc,I->CompressProg->Binary,false);
+ }
+
+ if (_error->PendingError() == true)
+ return false;
+
+ /* Now we have to copy the files over, or erase them if they
+ have not changed. First find the cheapest decompressor */
+ bool Missing = false;
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ if (I->OldMTime == 0)
+ {
+ Missing = true;
+ break;
+ }
+ }
+
+ // Check the MD5 of the lowest cost entity.
+ while (Missing == false)
+ {
+ int CompFd = -1;
+ int Proc = -1;
+ if (OpenOld(CompFd,Proc) == false)
+ {
+ _error->Discard();
+ break;
+ }
+
+ // Compute the hash
+ MD5Summation OldMD5;
+ unsigned long NewFileSize = 0;
+ while (1)
+ {
+ int Res = read(CompFd,Buffer,sizeof(Buffer));
+ if (Res == 0)
+ break;
+ if (Res < 0)
+ return _error->Errno("read","Failed to read while computing MD5");
+ NewFileSize += Res;
+ OldMD5.Add(Buffer,Res);
+ }
+
+ // Tidy the compressor
+ if (CloseOld(CompFd,Proc) == false)
+ return false;
+
+ // Check the hash
+ if (OldMD5.Result() == MD5.Result() &&
+ FileSize == NewFileSize)
+ {
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ I->TmpFile.Close();
+ if (unlink(I->TmpFile.Name().c_str()) != 0)
+ _error->Errno("unlink","Problem unlinking %s",
+ I->TmpFile.Name().c_str());
+ }
+ return !_error->PendingError();
+ }
+ break;
+ }
+
+ // Finalize
+ for (Files *I = Outputs; I != 0; I = I->Next)
+ {
+ // Set the correct file modes
+ fchmod(I->TmpFile.Fd(),Permissions);
+
+ if (rename(I->TmpFile.Name().c_str(),I->Output.c_str()) != 0)
+ _error->Errno("rename","Failed to rename %s to %s",
+ I->TmpFile.Name().c_str(),I->Output.c_str());
+ I->TmpFile.Close();
+ }
+
+ return !_error->PendingError();
+}
+ /*}}}*/
+
diff --git a/ftparchive/multicompress.h b/ftparchive/multicompress.h
new file mode 100644
index 000000000..212dec63d
--- /dev/null
+++ b/ftparchive/multicompress.h
@@ -0,0 +1,80 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: multicompress.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ MultiCompressor
+
+ Multiple output class. Takes a single FILE* and writes it simultaneously
+ to many compressed files. Then checks if the resulting output is
+ different from any previous output and overwrites the old files. Care is
+ taken to ensure that the new files are not generally readable while they
+ are being written.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef MULTICOMPRESS_H
+#define MULTICOMPRESS_H
+
+#ifdef __GNUG__
+#pragma interface "multicompress.h"
+#endif
+
+#include <string>
+#include <apt-pkg/fileutl.h>
+#include <stdio.h>
+#include <sys/types.h>
+
+class MultiCompress
+{
+ // Enumeration of all supported compressors
+ struct CompType
+ {
+ const char *Name;
+ const char *Extension;
+ const char *Binary;
+ const char *CompArgs;
+ const char *UnCompArgs;
+ unsigned char Cost;
+ };
+
+ // An output file
+ struct Files
+ {
+ string Output;
+ const CompType *CompressProg;
+ Files *Next;
+ FileFd TmpFile;
+ pid_t CompressProc;
+ time_t OldMTime;
+ int Fd;
+ };
+
+ Files *Outputs;
+ pid_t Outputter;
+ mode_t Permissions;
+ static const CompType Compressors[];
+
+ bool OpenCompress(const CompType *Prog,int &Pid,int FileFd,
+ int &OutFd,bool Comp);
+ bool Child(int Fd);
+ bool Start();
+ bool Die();
+
+ public:
+
+ // The FD to write to for compression.
+ FILE *Input;
+ unsigned long UpdateMTime;
+
+ bool Finalize(unsigned long &OutSize);
+ bool OpenOld(int &Fd,int &Proc);
+ bool CloseOld(int Fd,int Proc);
+ static bool GetStat(string Output,string Compress,struct stat &St);
+
+ MultiCompress(string Output,string Compress,mode_t Permissions,
+ bool Write = true);
+ ~MultiCompress();
+};
+
+#endif
diff --git a/ftparchive/override.cc b/ftparchive/override.cc
new file mode 100644
index 000000000..93cc34e85
--- /dev/null
+++ b/ftparchive/override.cc
@@ -0,0 +1,180 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: override.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Override
+
+ Store the override file.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "override.h"
+#endif
+
+#include "override.h"
+
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/error.h>
+
+#include <stdio.h>
+
+#include "override.h"
+ /*}}}*/
+
+// Override::ReadOverride - Read the override file /*{{{*/
+// ---------------------------------------------------------------------
+/* This parses the override file and reads it into the map */
+bool Override::ReadOverride(string File,bool Source)
+{
+ if (File.empty() == true)
+ return true;
+
+ FILE *F = fopen(File.c_str(),"r");
+ if (F == 0)
+ return _error->Errno("fopen","Unable to open %s",File.c_str());
+
+ char Line[500];
+ unsigned long Counter = 0;
+ while (fgets(Line,sizeof(Line),F) != 0)
+ {
+ Counter++;
+ Item Itm;
+
+ // Silence
+ for (char *I = Line; *I != 0; I++)
+ if (*I == '#')
+ *I = 0;
+
+ // Strip space leading up to the package name, skip blank lines
+ char *Pkg = Line;
+ for (; isspace(*Pkg) && *Pkg != 0;Pkg++);
+ if (Pkg == 0)
+ continue;
+
+ // Find the package and zero..
+ char *Start = Pkg;
+ char *End = Pkg;
+ for (; isspace(*End) == 0 && *End != 0; End++);
+ if (*End == 0)
+ {
+ _error->Warning("Malformed override %s line %lu #1",File.c_str(),
+ Counter);
+ continue;
+ }
+ *End = 0;
+
+ // Find the priority
+ if (Source == false)
+ {
+ for (End++; isspace(*End) != 0 && *End != 0; End++);
+ Start = End;
+ for (; isspace(*End) == 0 && *End != 0; End++);
+ if (*End == 0)
+ {
+ _error->Warning("Malformed override %s line %lu #2",File.c_str(),
+ Counter);
+ continue;
+ }
+ *End = 0;
+ Itm.Priority = Start;
+ }
+
+ // Find the Section
+ for (End++; isspace(*End) != 0 && *End != 0; End++);
+ Start = End;
+ for (; isspace(*End) == 0 && *End != 0; End++);
+ if (*End == 0)
+ {
+ _error->Warning("Malformed override %s line %lu #3",File.c_str(),
+ Counter);
+ continue;
+ }
+ *End = 0;
+ Itm.Section = Start;
+
+ // Source override files only have the two columns
+ if (Source == true)
+ {
+ Mapping[Pkg] = Itm;
+ continue;
+ }
+
+ // Find the =>
+ for (End++; isspace(*End) != 0 && *End != 0; End++);
+ if (*End != 0)
+ {
+ Start = End;
+ for (; *End != 0 && (End[0] != '=' || End[1] != '>'); End++);
+ if (*End == 0 || strlen(End) < 4)
+ {
+ Itm.OldMaint = "*";
+ Itm.NewMaint = _strstrip(Start);
+ }
+ else
+ {
+ *End = 0;
+ Itm.OldMaint = _strstrip(Start);
+
+ End += 3;
+ Itm.NewMaint = _strstrip(End);
+ }
+ }
+
+ Mapping[Pkg] = Itm;
+ }
+
+ if (ferror(F))
+ _error->Errno("fgets","Failed to read the override file %s",File.c_str());
+ fclose(F);
+ return true;
+}
+ /*}}}*/
+// Override::Item::SwapMaint - Swap the maintainer field if necessary /*{{{*/
+// ---------------------------------------------------------------------
+/* Returns the new maintainer string after evaluating the rewriting rule. If
+ there is a rule but it does not match then the empty string is returned,
+ also if there was no rewrite rule the empty string is returned. Failed
+ indicates if there was some kind of problem while rewriting. */
+string Override::Item::SwapMaint(string Orig,bool &Failed)
+{
+ Failed = false;
+
+ // Degenerate case..
+ if (NewMaint.empty() == true)
+ return OldMaint;
+
+ if (OldMaint == "*")
+ return NewMaint;
+
+ /* James: ancient, eliminate it, however it is still being used in the main
+ override file. Thus it persists.*/
+#if 1
+ // Break OldMaint up into little bits on double slash boundaries.
+ string::iterator End = OldMaint.begin();
+ while (1)
+ {
+ string::iterator Start = End;
+ for (; End < OldMaint.end() &&
+ (End + 3 >= OldMaint.end() || End[0] != ' ' ||
+ End[1] != '/' || End[2] != '/'); End++);
+ if (stringcasecmp(Start,End,Orig.begin(),Orig.end()) == 0)
+ return NewMaint;
+
+ if (End >= OldMaint.end())
+ break;
+
+ // Skip the divider and white space
+ for (; End < OldMaint.end() && (*End == '/' || *End == ' '); End++);
+ }
+#else
+ if (stringcasecmp(OldMaint.begin(),OldMaint.end(),Orig.begin(),Orig.end()) == 0)
+ return NewMaint;
+#endif
+
+ Failed = true;
+ return string();
+}
+ /*}}}*/
diff --git a/ftparchive/override.h b/ftparchive/override.h
new file mode 100644
index 000000000..63f123c42
--- /dev/null
+++ b/ftparchive/override.h
@@ -0,0 +1,50 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: override.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Override
+
+ Store the override file.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef OVERRIDE_H
+#define OVERRIDE_H
+
+#ifdef __GNUG__
+#pragma interface "override.h"
+#endif
+
+#include <map>
+#include <string>
+
+class Override
+{
+ public:
+
+ struct Item
+ {
+ string Priority;
+ string Section;
+ string OldMaint;
+ string NewMaint;
+
+ string SwapMaint(string Orig,bool &Failed);
+ };
+
+ map<string,Item> Mapping;
+
+ inline Item *GetItem(string Package)
+ {
+ map<string,Item>::iterator I = Mapping.find(Package);
+ if (I == Mapping.end())
+ return 0;
+ return &I->second;
+ };
+
+ bool ReadOverride(string File,bool Source = false);
+};
+
+#endif
+
diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc
new file mode 100644
index 000000000..7aea89317
--- /dev/null
+++ b/ftparchive/writer.cc
@@ -0,0 +1,756 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: writer.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Writer
+
+ The file writer classes. These write various types of output, sources,
+ packages and contents.
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#ifdef __GNUG__
+#pragma implementation "writer.h"
+#endif
+
+#include "writer.h"
+
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/md5.h>
+#include <apt-pkg/deblistparser.h>
+
+#include <sys/types.h>
+#include <unistd.h>
+#include <ftw.h>
+
+#include "cachedb.h"
+#include "apt-ftparchive.h"
+#include "multicompress.h"
+ /*}}}*/
+
+FTWScanner *FTWScanner::Owner;
+
+// FTWScanner::FTWScanner - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+FTWScanner::FTWScanner()
+{
+ ErrorPrinted = false;
+ NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
+ TmpExt = 0;
+ Ext[0] = 0;
+ RealPath = 0;
+ long PMax = pathconf(".",_PC_PATH_MAX);
+ if (PMax > 0)
+ RealPath = new char[PMax];
+}
+ /*}}}*/
+// FTWScanner::Scanner - FTW Scanner /*{{{*/
+// ---------------------------------------------------------------------
+/* This is the FTW scanner, it processes each directory element in the
+ directory tree. */
+int FTWScanner::Scanner(const char *File,const struct stat *sb,int Flag)
+{
+ if (Flag == FTW_DNR)
+ {
+ Owner->NewLine(1);
+ c1out << "W: Unable to read directory " << File << endl;
+ }
+ if (Flag == FTW_NS)
+ {
+ Owner->NewLine(1);
+ c1out << "W: Unable to stat " << File << endl;
+ }
+ if (Flag != FTW_F)
+ return 0;
+
+ // See if it is a .deb
+ if (strlen(File) < 4)
+ return 0;
+
+ unsigned CurExt = 0;
+ for (; Owner->Ext[CurExt] != 0; CurExt++)
+ if (strcmp(File+strlen(File)-strlen(Owner->Ext[CurExt]),
+ Owner->Ext[CurExt]) == 0)
+ break;
+ if (Owner->Ext[CurExt] == 0)
+ return 0;
+
+ /* Process it. If the file is a link then resolve it into an absolute
+ name.. This works best if the directory components the scanner are
+ given are not links themselves. */
+ char Jnk[2];
+ Owner->OriginalPath = File;
+ if (Owner->RealPath != 0 && readlink(File,Jnk,sizeof(Jnk)) != -1 &&
+ realpath(File,Owner->RealPath) != 0)
+ Owner->DoPackage(Owner->RealPath);
+ else
+ Owner->DoPackage(File);
+
+ if (_error->empty() == false)
+ {
+ // Print any errors or warnings found
+ string Err;
+ bool SeenPath = false;
+ while (_error->empty() == false)
+ {
+ Owner->NewLine(1);
+
+ bool Type = _error->PopMessage(Err);
+ if (Type == true)
+ c1out << "E: " << Err << endl;
+ else
+ c1out << "W: " << Err << endl;
+
+ if (Err.find(File) != string::npos)
+ SeenPath = true;
+ }
+
+ if (SeenPath == false)
+ cerr << "E: Errors apply to file '" << File << "'" << endl;
+ return 0;
+ }
+
+ return 0;
+}
+ /*}}}*/
+// FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::RecursiveScan(string Dir)
+{
+ /* If noprefix is set then jam the scan root in, so we don't generate
+ link followed paths out of control */
+ if (InternalPrefix.empty() == true)
+ {
+ if (realpath(Dir.c_str(),RealPath) == 0)
+ return _error->Errno("realpath","Failed to resolve %s",Dir.c_str());
+ InternalPrefix = RealPath;
+ }
+
+ // Do recursive directory searching
+ Owner = this;
+ int Res = ftw(Dir.c_str(),Scanner,30);
+
+ // Error treewalking?
+ if (Res != 0)
+ {
+ if (_error->PendingError() == false)
+ _error->Errno("ftw","Tree walking failed");
+ return false;
+ }
+
+ return true;
+}
+ /*}}}*/
+// FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
+// ---------------------------------------------------------------------
+/* This is an alternative to using FTW to locate files, it reads the list
+ of files from another file. */
+bool FTWScanner::LoadFileList(string Dir,string File)
+{
+ /* If noprefix is set then jam the scan root in, so we don't generate
+ link followed paths out of control */
+ if (InternalPrefix.empty() == true)
+ {
+ if (realpath(Dir.c_str(),RealPath) == 0)
+ return _error->Errno("realpath","Failed to resolve %s",Dir.c_str());
+ InternalPrefix = RealPath;
+ }
+
+ Owner = this;
+ FILE *List = fopen(File.c_str(),"r");
+ if (List == 0)
+ return _error->Errno("fopen","Failed to open %s",File.c_str());
+
+ /* We are a tad tricky here.. We prefix the buffer with the directory
+ name, that way if we need a full path with just use line.. Sneaky and
+ fully evil. */
+ char Line[1000];
+ char *FileStart;
+ if (Dir.empty() == true || Dir.end()[-1] != '/')
+ FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str());
+ else
+ FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str());
+ while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0)
+ {
+ char *FileName = _strstrip(FileStart);
+ if (FileName[0] == 0)
+ continue;
+
+ if (FileName[0] != '/')
+ {
+ if (FileName != FileStart)
+ memmove(FileStart,FileName,strlen(FileStart));
+ FileName = Line;
+ }
+
+ struct stat St;
+ int Flag = FTW_F;
+ if (stat(FileName,&St) != 0)
+ Flag = FTW_NS;
+
+ if (Scanner(FileName,&St,Flag) != 0)
+ break;
+ }
+
+ fclose(List);
+ return true;
+}
+ /*}}}*/
+// FTWScanner::Delink - Delink symlinks /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
+ unsigned long &DeLinkBytes,
+ struct stat &St)
+{
+ // See if this isn't an internaly prefix'd file name.
+ if (InternalPrefix.empty() == false &&
+ InternalPrefix.length() < FileName.length() &&
+ stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(),
+ InternalPrefix.begin(),InternalPrefix.end()) != 0)
+ {
+ if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit)
+ {
+ // Tidy up the display
+ if (DeLinkBytes == 0)
+ cout << endl;
+
+ NewLine(1);
+ c1out << " DeLink " << (OriginalPath + InternalPrefix.length())
+ << " [" << SizeToStr(St.st_size) << "B]" << endl << flush;
+
+ if (NoLinkAct == false)
+ {
+ char OldLink[400];
+ if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1)
+ _error->Errno("readlink","Failed to readlink %s",OriginalPath);
+ else
+ {
+ if (unlink(OriginalPath) != 0)
+ _error->Errno("unlink","Failed to unlink %s",OriginalPath);
+ else
+ {
+ if (link(FileName.c_str(),OriginalPath) != 0)
+ {
+ // Panic! Restore the symlink
+ symlink(OldLink,OriginalPath);
+ return _error->Errno("link","*** Failed to link %s to %s",
+ FileName.c_str(),
+ OriginalPath);
+ }
+ }
+ }
+ }
+
+ DeLinkBytes += St.st_size;
+ if (DeLinkBytes/1024 >= DeLinkLimit)
+ c1out << " DeLink limit of " << SizeToStr(DeLinkBytes) << "B hit." << endl;
+ }
+
+ FileName = OriginalPath;
+ }
+
+ return true;
+}
+ /*}}}*/
+// FTWScanner::SetExts - Set extensions to support /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool FTWScanner::SetExts(string Vals)
+{
+ delete [] TmpExt;
+ TmpExt = new char[Vals.length()+1];
+ strcpy(TmpExt,Vals.c_str());
+ return TokSplitString(' ',TmpExt,(char **)Ext,sizeof(Ext)/sizeof(Ext[0]));
+}
+ /*}}}*/
+
+// PackagesWriter::PackagesWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+PackagesWriter::PackagesWriter(string DB,string Overrides) :
+ Db(DB),Stats(Db.Stats)
+{
+ Output = stdout;
+ Ext[0] = ".deb";
+ Ext[1] = 0;
+ DeLinkLimit = 0;
+
+ // Process the command line options
+ DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
+ DoContents = _config->FindB("APT::FTPArchive::Contents",true);
+ NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
+
+ if (Db.Loaded() == false)
+ DoContents = false;
+
+ // Read the override file
+ if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
+ return;
+ else
+ NoOverride = true;
+ _error->DumpErrors();
+}
+ /*}}}*/
+// PackagesWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* This method takes a package and gets its control information and
+ MD5 then writes out a control record with the proper fields rewritten
+ and the path/size/hash appended. */
+bool PackagesWriter::DoPackage(string FileName)
+{
+ // Open the archive
+ FileFd F(FileName,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Stat the file for later
+ struct stat St;
+ if (fstat(F.Fd(),&St) != 0)
+ return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
+
+ // Pull all the data we need form the DB
+ string MD5Res;
+ if (Db.SetFile(FileName,St,&F) == false ||
+ Db.LoadControl() == false ||
+ (DoContents == true && Db.LoadContents(true) == false) ||
+ (DoMD5 == true && Db.GetMD5(MD5Res,false) == false))
+ return false;
+
+ if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,St) == false)
+ return false;
+
+ // Lookup the overide information
+ pkgTagSection &Tags = Db.Control.Section;
+ string Package = Tags.FindS("Package");
+ Override::Item Tmp;
+ Override::Item *OverItem = Over.GetItem(Package);
+
+ if (Package.empty() == true)
+ return _error->Error("Archive had no package field");
+
+ // If we need to do any rewriting of the header do it now..
+ if (OverItem == 0)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ c1out << " " << Package << " has no override entry" << endl;
+ }
+
+ OverItem = &Tmp;
+ Tmp.Section = Tags.FindS("Section");
+ Tmp.Priority = Tags.FindS("Priority");
+ }
+
+ char Size[40];
+ sprintf(Size,"%lu",St.st_size);
+
+ // Strip the DirStrip prefix from the FileName and add the PathPrefix
+ string NewFileName;
+ if (DirStrip.empty() == false &&
+ FileName.length() > DirStrip.length() &&
+ stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
+ DirStrip.begin(),DirStrip.end()) == 0)
+ NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
+ else
+ NewFileName = FileName;
+ if (PathPrefix.empty() == false)
+ NewFileName = flCombine(PathPrefix,NewFileName);
+
+ // This lists all the changes to the fields we are going to make.
+ TFRewriteData Changes[] = {{"Size",Size},
+ {"MD5sum",MD5Res.c_str()},
+ {"Filename",NewFileName.c_str()},
+ {"Section",OverItem->Section.c_str()},
+ {"Priority",OverItem->Priority.c_str()},
+ {"Status",0},
+ {"Optional",0},
+ {}, // For maintainer
+ {}, // For Suggests
+ {}};
+ unsigned int End = 0;
+ for (End = 0; Changes[End].Tag != 0; End++);
+
+ // Rewrite the maintainer field if necessary
+ bool MaintFailed;
+ string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
+ if (MaintFailed == true)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ c1out << " " << Package << " maintainer is " <<
+ Tags.FindS("Maintainer") << " not " <<
+ OverItem->OldMaint << endl;
+ }
+ }
+
+ if (NewMaint.empty() == false)
+ {
+ Changes[End].Rewrite = NewMaint.c_str();
+ Changes[End++].Tag = "Maintainer";
+ }
+
+ /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
+ dpkg-scanpackages does.. Well sort of. dpkg-scanpackages just does renaming
+ but dpkg does this append bit. So we do the append bit, at least that way the
+ status file and package file will remain similar. There are other transforms
+ but optional is the only legacy one still in use for some lazy reason. */
+ string OptionalStr = Tags.FindS("Optional");
+ if (OptionalStr.empty() == false)
+ {
+ if (Tags.FindS("Suggests").empty() == false)
+ OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
+ Changes[End].Rewrite = OptionalStr.c_str();
+ Changes[End++].Tag = "Suggests";
+ }
+
+ // Rewrite and store the fields.
+ if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes) == false)
+ return false;
+ fprintf(Output,"\n");
+
+ return Db.Finish();
+}
+ /*}}}*/
+
+// SourcesWriter::SourcesWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+SourcesWriter::SourcesWriter(string BOverrides,string SOverrides)
+{
+ Output = stdout;
+ Ext[0] = ".dsc";
+ Ext[1] = 0;
+ DeLinkLimit = 0;
+ Buffer = 0;
+ BufSize = 0;
+
+ // Process the command line options
+ NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
+
+ // Read the override file
+ if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
+ return;
+ else
+ NoOverride = true;
+
+ if (SOverrides.empty() == false && FileExists(SOverrides) == true &&
+ SOver.ReadOverride(SOverrides,true) == false)
+ return;
+// _error->DumpErrors();
+}
+ /*}}}*/
+// SourcesWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool SourcesWriter::DoPackage(string FileName)
+{
+ // Open the archive
+ FileFd F(FileName,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Stat the file for later
+ struct stat St;
+ if (fstat(F.Fd(),&St) != 0)
+ return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
+
+ if (St.st_size > 128*1024)
+ return _error->Error("DSC file '%s' is too large!",FileName.c_str());
+
+ if (BufSize < (unsigned)St.st_size+1)
+ {
+ BufSize = St.st_size+1;
+ Buffer = (char *)realloc(Buffer,St.st_size+1);
+ }
+
+ if (F.Read(Buffer,St.st_size) == false)
+ return false;
+
+ // Hash the file
+ char *Start = Buffer;
+ char *BlkEnd = Buffer + St.st_size;
+ MD5Summation MD5;
+ MD5.Add((unsigned char *)Start,BlkEnd - Start);
+
+ // Add an extra \n to the end, just in case
+ *BlkEnd++ = '\n';
+
+ /* Remove the PGP trailer. Some .dsc's have this without a blank line
+ before */
+ const char *Key = "-----BEGIN PGP SIGNATURE-----";
+ for (char *MsgEnd = Start; MsgEnd < BlkEnd - strlen(Key) -1; MsgEnd++)
+ {
+ if (*MsgEnd == '\n' && strncmp(MsgEnd+1,Key,strlen(Key)) == 0)
+ {
+ MsgEnd[1] = '\n';
+ break;
+ }
+ }
+
+ /* Read records until we locate the Source record. This neatly skips the
+ GPG header (which is RFC822 formed) without any trouble. */
+ pkgTagSection Tags;
+ do
+ {
+ unsigned Pos;
+ if (Tags.Scan(Start,BlkEnd - Start) == false)
+ return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
+ if (Tags.Find("Source",Pos) == true)
+ break;
+ Start += Tags.size();
+ }
+ while (1);
+ Tags.Trim();
+
+ // Lookup the overide information, finding first the best priority.
+ string BestPrio;
+ char Buffer[1000];
+ string Bins = Tags.FindS("Binary");
+ Override::Item *OverItem = 0;
+ if (Bins.empty() == false && Bins.length() < sizeof(Buffer))
+ {
+ strcpy(Buffer,Bins.c_str());
+
+ // Ignore too-long errors.
+ char *BinList[400];
+ TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0]));
+
+ // Look at all the binaries
+ unsigned char BestPrioV = pkgCache::State::Extra;
+ for (unsigned I = 0; BinList[I] != 0; I++)
+ {
+ Override::Item *Itm = BOver.GetItem(BinList[I]);
+ if (Itm == 0)
+ continue;
+ if (OverItem == 0)
+ OverItem = Itm;
+
+ unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority);
+ if (NewPrioV < BestPrioV || BestPrio.empty() == true)
+ {
+ BestPrioV = NewPrioV;
+ BestPrio = Itm->Priority;
+ }
+ }
+ }
+
+ // If we need to do any rewriting of the header do it now..
+ Override::Item Tmp;
+ if (OverItem == 0)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ c1out << " " << Tags.FindS("Source") << " has no override entry" << endl;
+ }
+
+ OverItem = &Tmp;
+ }
+
+ Override::Item *SOverItem = SOver.GetItem(Tags.FindS("Source"));
+ if (SOverItem == 0)
+ {
+ SOverItem = BOver.GetItem(Tags.FindS("Source"));
+ if (SOverItem == 0)
+ SOverItem = OverItem;
+ }
+
+ // Add the dsc to the files hash list
+ char Files[1000];
+ snprintf(Files,sizeof(Files),"\n %s %lu %s\n %s",
+ string(MD5.Result()).c_str(),St.st_size,
+ flNotDir(FileName).c_str(),
+ Tags.FindS("Files").c_str());
+
+ // Strip the DirStrip prefix from the FileName and add the PathPrefix
+ string NewFileName;
+ if (DirStrip.empty() == false &&
+ FileName.length() > DirStrip.length() &&
+ stringcmp(OriginalPath,OriginalPath + DirStrip.length(),
+ DirStrip.begin(),DirStrip.end()) == 0)
+ NewFileName = string(OriginalPath + DirStrip.length());
+ else
+ NewFileName = OriginalPath;
+ if (PathPrefix.empty() == false)
+ NewFileName = flCombine(PathPrefix,NewFileName);
+
+ string Directory = flNotFile(OriginalPath);
+ string Package = Tags.FindS("Source");
+
+ // Perform the delinking operation over all of the files
+ string ParseJnk;
+ const char *C = Files;
+ for (;isspace(*C); C++);
+ while (*C != 0)
+ {
+ // Parse each of the elements
+ if (ParseQuoteWord(C,ParseJnk) == false ||
+ ParseQuoteWord(C,ParseJnk) == false ||
+ ParseQuoteWord(C,ParseJnk) == false)
+ return _error->Error("Error parsing file record");
+
+ char Jnk[2];
+ string OriginalPath = Directory + ParseJnk;
+ if (RealPath != 0 && readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
+ realpath(OriginalPath.c_str(),RealPath) != 0)
+ {
+ string RP = RealPath;
+ if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St) == false)
+ return false;
+ }
+ }
+
+ Directory = flNotFile(NewFileName);
+ if (Directory.length() > 2)
+ Directory.erase(Directory.end()-1);
+
+ // This lists all the changes to the fields we are going to make.
+ TFRewriteData Changes[] = {{"Source",Package.c_str(),"Package"},
+ {"Files",Files},
+ {"Directory",Directory.c_str()},
+ {"Section",SOverItem->Section.c_str()},
+ {"Priority",BestPrio.c_str()},
+ {"Status",0},
+ {}, // For maintainer
+ {}};
+ unsigned int End = 0;
+ for (End = 0; Changes[End].Tag != 0; End++);
+
+ // Rewrite the maintainer field if necessary
+ bool MaintFailed;
+ string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
+ if (MaintFailed == true)
+ {
+ if (NoOverride == false)
+ {
+ NewLine(1);
+ c1out << " " << Package << " maintainer is " <<
+ Tags.FindS("Maintainer") << " not " <<
+ OverItem->OldMaint << endl;
+ }
+ }
+ if (NewMaint.empty() == false)
+ {
+ Changes[End].Rewrite = NewMaint.c_str();
+ Changes[End++].Tag = "Maintainer";
+ }
+
+ // Rewrite and store the fields.
+ if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes) == false)
+ return false;
+ fprintf(Output,"\n");
+
+ Stats.Packages++;
+
+ return true;
+}
+ /*}}}*/
+
+// ContentsWriter::ContentsWriter - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+ContentsWriter::ContentsWriter(string DB) :
+ Db(DB), Stats(Db.Stats)
+
+{
+ Ext[0] = ".deb";
+ Ext[1] = 0;
+ Output = stdout;
+}
+ /*}}}*/
+// ContentsWriter::DoPackage - Process a single package /*{{{*/
+// ---------------------------------------------------------------------
+/* If Package is the empty string the control record will be parsed to
+ determine what the package name is. */
+bool ContentsWriter::DoPackage(string FileName,string Package)
+{
+ // Open the archive
+ FileFd F(FileName,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Stat the file for later
+ struct stat St;
+ if (fstat(F.Fd(),&St) != 0)
+ return _error->Errno("fstat","Failed too stat %s",FileName.c_str());
+
+ // Ready the DB
+ if (Db.SetFile(FileName,St,&F) == false ||
+ Db.LoadContents(false) == false)
+ return false;
+
+ // Parse the package name
+ if (Package.empty() == true)
+ {
+ if (Db.LoadControl() == false)
+ return false;
+ Package = Db.Control.Section.FindS("Package");
+ }
+
+ Db.Contents.Add(Gen,Package);
+
+ return Db.Finish();
+}
+ /*}}}*/
+// ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress)
+{
+ MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Open the package file
+ int CompFd = -1;
+ int Proc = -1;
+ if (Pkgs.OpenOld(CompFd,Proc) == false)
+ return false;
+
+ // No auto-close FD
+ FileFd Fd(CompFd,false);
+ pkgTagFile Tags(&Fd);
+ if (_error->PendingError() == true)
+ {
+ Pkgs.CloseOld(CompFd,Proc);
+ return false;
+ }
+
+ // Parse.
+ pkgTagSection Section;
+ while (Tags.Step(Section) == true)
+ {
+ string File = flCombine(Prefix,Section.FindS("FileName"));
+ string Package = Section.FindS("Section");
+ if (Package.empty() == false && Package.end()[-1] != '/')
+ {
+ Package += '/';
+ Package += Section.FindS("Package");
+ }
+ else
+ Package += Section.FindS("Package");
+
+ DoPackage(File,Package);
+ if (_error->empty() == false)
+ {
+ _error->Error("Errors apply to file '%s'",File.c_str());
+ _error->DumpErrors();
+ }
+ }
+
+ // Tidy the compressor
+ if (Pkgs.CloseOld(CompFd,Proc) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
diff --git a/ftparchive/writer.h b/ftparchive/writer.h
new file mode 100644
index 000000000..a5fb6f52b
--- /dev/null
+++ b/ftparchive/writer.h
@@ -0,0 +1,145 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: writer.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ Writer
+
+ The file writer classes. These write various types of output, sources,
+ packages and contents.
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef WRITER_H
+#define WRITER_H
+
+#ifdef __GNUG__
+#pragma interface "writer.h"
+#endif
+
+#include <string>
+#include <stdio.h>
+
+#include "cachedb.h"
+#include "override.h"
+#include "apt-ftparchive.h"
+
+class FTWScanner
+{
+ protected:
+
+ char *TmpExt;
+ const char *Ext[10];
+ const char *OriginalPath;
+ char *RealPath;
+ bool ErrorPrinted;
+
+ // Stuff for the delinker
+ bool NoLinkAct;
+
+ static FTWScanner *Owner;
+ static int Scanner(const char *File,const struct stat *sb,int Flag);
+
+ bool Delink(string &FileName,const char *OriginalPath,
+ unsigned long &Bytes,struct stat &St);
+
+ inline void NewLine(unsigned Priority)
+ {
+ if (ErrorPrinted == false && Quiet <= Priority)
+ {
+ cout << endl;
+ ErrorPrinted = true;
+ }
+ }
+
+ public:
+
+ unsigned long DeLinkLimit;
+ string InternalPrefix;
+
+ virtual bool DoPackage(string FileName) = 0;
+ bool RecursiveScan(string Dir);
+ bool LoadFileList(string BaseDir,string File);
+ bool SetExts(string Vals);
+
+ FTWScanner();
+ virtual ~FTWScanner() {delete [] RealPath; delete [] TmpExt;};
+};
+
+class PackagesWriter : public FTWScanner
+{
+ Override Over;
+ CacheDB Db;
+
+ public:
+
+ // Some flags
+ bool DoMD5;
+ bool NoOverride;
+ bool DoContents;
+
+ // General options
+ string PathPrefix;
+ string DirStrip;
+ FILE *Output;
+ struct CacheDB::Stats &Stats;
+
+ inline bool ReadOverride(string File) {return Over.ReadOverride(File);};
+ virtual bool DoPackage(string FileName);
+
+ PackagesWriter(string DB,string Overrides);
+ virtual ~PackagesWriter() {};
+};
+
+class ContentsWriter : public FTWScanner
+{
+ CacheDB Db;
+
+ GenContents Gen;
+
+ public:
+
+ // General options
+ FILE *Output;
+ struct CacheDB::Stats &Stats;
+ string Prefix;
+
+ bool DoPackage(string FileName,string Package);
+ virtual bool DoPackage(string FileName)
+ {return DoPackage(FileName,string());};
+ bool ReadFromPkgs(string PkgFile,string PkgCompress);
+
+ void Finish() {Gen.Print(Output);};
+ inline bool ReadyDB(string DB) {return Db.ReadyDB(DB);};
+
+ ContentsWriter(string DB);
+ virtual ~ContentsWriter() {};
+};
+
+class SourcesWriter : public FTWScanner
+{
+ Override BOver;
+ Override SOver;
+ char *Buffer;
+ unsigned long BufSize;
+
+ public:
+
+ bool NoOverride;
+
+ // General options
+ string PathPrefix;
+ string DirStrip;
+ FILE *Output;
+ struct CacheDB::Stats Stats;
+
+/* inline bool ReadBinOverride(string File) {return BOver.ReadOverride(File);};
+ bool ReadSrcOverride(string File); // {return BOver.ReadOverride(File);};*/
+ virtual bool DoPackage(string FileName);
+
+ SourcesWriter(string BOverrides,string SOverrides);
+ virtual ~SourcesWriter() {free(Buffer);};
+};
+
+
+#endif
diff --git a/methods/connect.cc b/methods/connect.cc
index d0dcde698..f3470af46 100644
--- a/methods/connect.cc
+++ b/methods/connect.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: connect.cc,v 1.6 2000/05/28 04:34:44 jgg Exp $
+// $Id: connect.cc,v 1.7 2001/02/20 07:03:18 jgg Exp $
/* ######################################################################
Connect - Replacement connect call
@@ -30,6 +30,18 @@ static int LastPort = 0;
static struct addrinfo *LastHostAddr = 0;
static struct addrinfo *LastUsed = 0;
+// RotateDNS - Select a new server from a DNS rotation /*{{{*/
+// ---------------------------------------------------------------------
+/* This is called during certain errors in order to recover by selecting a
+ new server */
+void RotateDNS()
+{
+ if (LastUsed != 0 && LastUsed->ai_next != 0)
+ LastUsed = LastUsed->ai_next;
+ else
+ LastUsed = LastHostAddr;
+}
+ /*}}}*/
// DoConnect - Attempt a connect operation /*{{{*/
// ---------------------------------------------------------------------
/* This helper function attempts a connection to a single address. */
@@ -39,17 +51,30 @@ static bool DoConnect(struct addrinfo *Addr,string Host,
// Show a status indicator
char Name[NI_MAXHOST];
char Service[NI_MAXSERV];
- Name[0] = 0;
+
+ Name[0] = 0;
Service[0] = 0;
getnameinfo(Addr->ai_addr,Addr->ai_addrlen,
Name,sizeof(Name),Service,sizeof(Service),
NI_NUMERICHOST|NI_NUMERICSERV);
Owner->Status("Connecting to %s (%s)",Host.c_str(),Name);
-
+
+ /* If this is an IP rotation store the IP we are using.. If something goes
+ wrong this will get tacked onto the end of the error message */
+ if (LastHostAddr->ai_next != 0)
+ {
+ char Name2[NI_MAXHOST + NI_MAXSERV + 10];
+ snprintf(Name2,sizeof(Name2),"[IP: %s %s]",Name,Service);
+ Owner->SetFailExtraMsg(string(Name2));
+ }
+ else
+ Owner->SetFailExtraMsg("");
+
// Get a socket
if ((Fd = socket(Addr->ai_family,Addr->ai_socktype,
Addr->ai_protocol)) < 0)
- return _error->Errno("socket","Could not create a socket");
+ return _error->Errno("socket","Could not create a socket for %s (f=%u t=%u p=%u)",
+ Name,Addr->ai_family,Addr->ai_socktype,Addr->ai_protocol);
SetNonBlock(Fd,true);
if (connect(Fd,Addr->ai_addr,Addr->ai_addrlen) < 0 &&
@@ -62,7 +87,7 @@ static bool DoConnect(struct addrinfo *Addr,string Host,
if (WaitFd(Fd,true,TimeOut) == false)
return _error->Error("Could not connect to %s:%s (%s), "
"connection timed out",Host.c_str(),Service,Name);
-
+
// Check the socket for an error condition
unsigned int Err;
unsigned int Len = sizeof(Err);
@@ -134,8 +159,8 @@ bool Connect(string Host,int Port,const char *Service,int DefPort,int &Fd,
return _error->Error("Could not resolve '%s'",Host.c_str());
}
- return _error->Error("Something wicked happend resolving '%s:%s'",
- Host.c_str(),ServStr);
+ return _error->Error("Something wicked happened resolving '%s:%s' (%i)",
+ Host.c_str(),ServStr,Res);
}
break;
}
@@ -165,14 +190,22 @@ bool Connect(string Host,int Port,const char *Service,int DefPort,int &Fd,
CurHost = CurHost->ai_next;
}
while (CurHost != 0 && CurHost->ai_family == AF_UNIX);
-
- LastUsed = 0;
+
+ /* If we reached the end of the search list then wrap around to the
+ start */
+ if (CurHost == 0 && LastUsed != 0)
+ CurHost = LastHostAddr;
+
+ // Reached the end of the search cycle
+ if (CurHost == LastUsed)
+ break;
+
if (CurHost != 0)
_error->Discard();
- }
+ }
if (_error->PendingError() == true)
- return false;
- return _error->Error("Unable to connect to %s:",Host.c_str(),ServStr);
+ return false;
+ return _error->Error("Unable to connect to %s %s:",Host.c_str(),ServStr);
}
/*}}}*/
diff --git a/methods/connect.h b/methods/connect.h
index 1786a2480..6f208e31d 100644
--- a/methods/connect.h
+++ b/methods/connect.h
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: connect.h,v 1.2 1999/07/18 23:06:56 jgg Exp $
+// $Id: connect.h,v 1.3 2001/02/20 07:03:18 jgg Exp $
/* ######################################################################
Connect - Replacement connect call
@@ -15,5 +15,6 @@
bool Connect(string To,int Port,const char *Service,int DefPort,
int &Fd,unsigned long TimeOut,pkgAcqMethod *Owner);
+void RotateDNS();
#endif
diff --git a/methods/ftp.cc b/methods/ftp.cc
index 57095c4bf..0d617dd8f 100644
--- a/methods/ftp.cc
+++ b/methods/ftp.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: ftp.cc,v 1.20 2000/06/18 04:19:39 jgg Exp $
+// $Id: ftp.cc,v 1.21 2001/02/20 07:03:18 jgg Exp $
/* ######################################################################
HTTP Aquire Method - This is the FTP aquire method for APT.
@@ -40,6 +40,20 @@
#include "ftp.h"
/*}}}*/
+/* This table is for the EPRT and EPSV commands, it maps the OS address
+ family to the IETF address families */
+struct AFMap
+{
+ unsigned long Family;
+ unsigned long IETFFamily;
+};
+
+#ifndef AF_INET6
+struct AFMap AFMap[] = {{AF_INET,1},{}};
+#else
+struct AFMap AFMap[] = {{AF_INET,1},{AF_INET6,2},{}};
+#endif
+
unsigned long TimeOut = 120;
URI Proxy;
string FtpMethod::FailFile;
@@ -53,7 +67,7 @@ FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
DataListenFd(-1), ServerName(Srv)
{
Debug = _config->FindB("Debug::Acquire::Ftp",false);
- memset(&PasvAddr,0,sizeof(PasvAddr));
+ PasvAddr = 0;
}
/*}}}*/
// FTPConn::~FTPConn - Destructor /*{{{*/
@@ -75,7 +89,10 @@ void FTPConn::Close()
DataFd = -1;
close(DataListenFd);
DataListenFd = -1;
- memset(&PasvAddr,0,sizeof(PasvAddr));
+
+ if (PasvAddr != 0)
+ freeaddrinfo(PasvAddr);
+ PasvAddr = 0;
}
/*}}}*/
// FTPConn::Open - Open a new connection /*{{{*/
@@ -89,7 +106,7 @@ bool FTPConn::Open(pkgAcqMethod *Owner)
return true;
Close();
-
+
// Determine the proxy setting
if (getenv("ftp_proxy") == 0)
{
@@ -124,13 +141,22 @@ bool FTPConn::Open(pkgAcqMethod *Owner)
Host = Proxy.Host;
}
- // Connect to the remote server
+ /* Connect to the remote server. Since FTP is connection oriented we
+ want to make sure we get a new server every time we reconnect */
+ RotateDNS();
if (Connect(Host,Port,"ftp",21,ServerFd,TimeOut,Owner) == false)
return false;
- socklen_t Len = sizeof(Peer);
- if (getpeername(ServerFd,(sockaddr *)&Peer,&Len) != 0)
+
+ // Get the remote server's address
+ PeerAddrLen = sizeof(PeerAddr);
+ if (getpeername(ServerFd,(sockaddr *)&PeerAddr,&PeerAddrLen) != 0)
return _error->Errno("getpeername","Unable to determine the peer name");
+ // Get the local machine's address
+ ServerAddrLen = sizeof(ServerAddr);
+ if (getsockname(ServerFd,(sockaddr *)&ServerAddr,&ServerAddrLen) != 0)
+ return _error->Errno("getsockname","Unable to determine the local name");
+
Owner->Status("Logging in");
return Login();
}
@@ -179,7 +205,7 @@ bool FTPConn::Login()
if (_config->Exists("Acquire::FTP::Passive::" + ServerName.Host) == true)
TryPassive = _config->FindB("Acquire::FTP::Passive::" + ServerName.Host,true);
else
- TryPassive = _config->FindB("Acquire::FTP::Passive",true);
+ TryPassive = _config->FindB("Acquire::FTP::Passive",true);
}
else
{
@@ -236,6 +262,12 @@ bool FTPConn::Login()
}
}
+ // Force the use of extended commands
+ if (_config->Exists("Acquire::FTP::ForceExtended::" + ServerName.Host) == true)
+ ForceExtended = _config->FindB("Acquire::FTP::ForceExtended::" + ServerName.Host,true);
+ else
+ ForceExtended = _config->FindB("Acquire::FTP::ForceExtended",false);
+
// Binary mode
if (WriteMsg(Tag,Msg,"TYPE I") == false)
return false;
@@ -283,6 +315,8 @@ bool FTPConn::ReadLine(string &Text)
// Suck it back
int Res = read(ServerFd,Buffer + Len,sizeof(Buffer) - Len);
+ if (Res == 0)
+ _error->Error("Server closed the connection");
if (Res <= 0)
{
_error->Errno("read","Read error");
@@ -409,10 +443,19 @@ bool FTPConn::WriteMsg(unsigned int &Ret,string &Text,const char *Fmt,...)
// ---------------------------------------------------------------------
/* Try to enter passive mode, the return code does not indicate if passive
mode could or could not be established, only if there was a fatal error.
- Borrowed mostly from lftp. We have to enter passive mode every time
- we make a data connection :| */
+ We have to enter passive mode every time we make a data connection :| */
bool FTPConn::GoPasv()
{
+ /* The PASV command only works on IPv4 sockets, even though it could
+ in theory suppory IPv6 via an all zeros reply */
+ if (((struct sockaddr *)&PeerAddr)->sa_family != AF_INET ||
+ ForceExtended == true)
+ return ExtGoPasv();
+
+ if (PasvAddr != 0)
+ freeaddrinfo(PasvAddr);
+ PasvAddr = 0;
+
// Try to enable pasv mode
unsigned int Tag;
string Msg;
@@ -422,41 +465,139 @@ bool FTPConn::GoPasv()
// Unsupported function
string::size_type Pos = Msg.find('(');
if (Tag >= 400 || Pos == string::npos)
- {
- memset(&PasvAddr,0,sizeof(PasvAddr));
return true;
- }
// Scan it
unsigned a0,a1,a2,a3,p0,p1;
if (sscanf(Msg.c_str() + Pos,"(%u,%u,%u,%u,%u,%u)",&a0,&a1,&a2,&a3,&p0,&p1) != 6)
+ return true;
+
+ /* Some evil servers return 0 to mean their addr. We can actually speak
+ to these servers natively using IPv6 */
+ if (a0 == 0 && a1 == 0 && a2 == 0 && a3 == 0)
{
- memset(&PasvAddr,0,sizeof(PasvAddr));
+ // Get the IP in text form
+ char Name[NI_MAXHOST];
+ char Service[NI_MAXSERV];
+ getnameinfo((struct sockaddr *)&PeerAddr,PeerAddrLen,
+ Name,sizeof(Name),Service,sizeof(Service),
+ NI_NUMERICHOST|NI_NUMERICSERV);
+
+ struct addrinfo Hints;
+ memset(&Hints,0,sizeof(Hints));
+ Hints.ai_socktype = SOCK_STREAM;
+ Hints.ai_family = ((struct sockaddr *)&PeerAddr)->sa_family;
+ Hints.ai_flags |= AI_NUMERICHOST;
+
+ // Get a new passive address.
+ char Port[100];
+ snprintf(Port,sizeof(Port),"%u",(p0 << 8) + p1);
+ if (getaddrinfo(Name,Port,&Hints,&PasvAddr) != 0)
+ return true;
return true;
}
- // lftp used this horrid byte order manipulation.. Ik.
- PasvAddr.sin_family = AF_INET;
- unsigned char *a;
- unsigned char *p;
- a = (unsigned char *)&PasvAddr.sin_addr;
- p = (unsigned char *)&PasvAddr.sin_port;
+ struct addrinfo Hints;
+ memset(&Hints,0,sizeof(Hints));
+ Hints.ai_socktype = SOCK_STREAM;
+ Hints.ai_family = AF_INET;
+ Hints.ai_flags |= AI_NUMERICHOST;
- // Some evil servers return 0 to mean their addr
- if (a0 == 0 && a1 == 0 && a2 == 0 && a3 == 0)
+ // Get a new passive address.
+ char Port[100];
+ snprintf(Port,sizeof(Port),"%u",(p0 << 8) + p1);
+ char Name[100];
+ snprintf(Name,sizeof(Name),"%u.%u.%u.%u",a0,a1,a2,a3);
+ if (getaddrinfo(Name,Port,&Hints,&PasvAddr) != 0)
+ return true;
+ return true;
+}
+ /*}}}*/
+// FTPConn::ExtGoPasv - Enter Extended Passive mode /*{{{*/
+// ---------------------------------------------------------------------
+/* Try to enter extended passive mode. See GoPasv above and RFC 2428 */
+bool FTPConn::ExtGoPasv()
+{
+ if (PasvAddr != 0)
+ freeaddrinfo(PasvAddr);
+ PasvAddr = 0;
+
+ // Try to enable pasv mode
+ unsigned int Tag;
+ string Msg;
+ if (WriteMsg(Tag,Msg,"EPSV") == false)
+ return false;
+
+ // Unsupported function
+ string::size_type Pos = Msg.find('(');
+ if (Tag >= 400 || Pos == string::npos)
+ return true;
+
+ // Scan it
+ string::const_iterator List[4];
+ unsigned Count = 0;
+ Pos++;
+ for (string::const_iterator I = Msg.begin() + Pos; I < Msg.end(); I++)
+ {
+ if (*I != Msg[Pos])
+ continue;
+ if (Count >= 4)
+ return true;
+ List[Count++] = I;
+ }
+ if (Count != 4)
+ return true;
+
+ // Break it up ..
+ unsigned long Proto = 0;
+ unsigned long Port = 0;
+ string IP;
+ IP = string(List[1]+1,List[2]);
+ Port = atoi(string(List[2]+1,List[3]).c_str());
+ if (IP.empty() == false)
+ Proto = atoi(string(List[0]+1,List[1]).c_str());
+
+ if (Port == 0)
+ return false;
+
+ // String version of the port
+ char PStr[100];
+ snprintf(PStr,sizeof(PStr),"%lu",Port);
+
+ // Get the IP in text form
+ struct addrinfo Hints;
+ memset(&Hints,0,sizeof(Hints));
+ Hints.ai_socktype = SOCK_STREAM;
+ Hints.ai_flags |= AI_NUMERICHOST;
+
+ /* The RFC defined case, connect to the old IP/protocol using the
+ new port. */
+ if (IP.empty() == true)
{
- PasvAddr.sin_addr = Peer.sin_addr;
+ // Get the IP in text form
+ char Name[NI_MAXHOST];
+ char Service[NI_MAXSERV];
+ getnameinfo((struct sockaddr *)&PeerAddr,PeerAddrLen,
+ Name,sizeof(Name),Service,sizeof(Service),
+ NI_NUMERICHOST|NI_NUMERICSERV);
+ IP = Name;
+ Hints.ai_family = ((struct sockaddr *)&PeerAddr)->sa_family;
}
else
{
- a[0] = a0;
- a[1] = a1;
- a[2] = a2;
- a[3] = a3;
+ // Get the family..
+ Hints.ai_family = 0;
+ for (unsigned J = 0; AFMap[J].Family != 0; J++)
+ if (AFMap[J].IETFFamily == Proto)
+ Hints.ai_family = AFMap[J].Family;
+ if (Hints.ai_family == 0)
+ return true;
}
- p[0] = p0;
- p[1] = p1;
+ // Get a new passive address.
+ int Res;
+ if ((Res = getaddrinfo(IP.c_str(),PStr,&Hints,&PasvAddr)) != 0)
+ return true;
return true;
}
@@ -517,20 +658,21 @@ bool FTPConn::CreateDataFd()
return false;
// Oops, didn't work out, don't bother trying again.
- if (PasvAddr.sin_port == 0)
+ if (PasvAddr == 0)
TryPassive = false;
}
// Passive mode?
- if (PasvAddr.sin_port != 0)
+ if (PasvAddr != 0)
{
// Get a socket
- if ((DataFd = socket(AF_INET,SOCK_STREAM,0)) < 0)
+ if ((DataFd = socket(PasvAddr->ai_family,PasvAddr->ai_socktype,
+ PasvAddr->ai_protocol)) < 0)
return _error->Errno("socket","Could not create a socket");
// Connect to the server
SetNonBlock(DataFd,true);
- if (connect(DataFd,(sockaddr *)&PasvAddr,sizeof(PasvAddr)) < 0 &&
+ if (connect(DataFd,PasvAddr->ai_addr,PasvAddr->ai_addrlen) < 0 &&
errno != EINPROGRESS)
return _error->Errno("socket","Could not create a socket");
@@ -543,8 +685,8 @@ bool FTPConn::CreateDataFd()
if (getsockopt(DataFd,SOL_SOCKET,SO_ERROR,&Err,&Len) != 0)
return _error->Errno("getsockopt","Failed");
if (Err != 0)
- return _error->Error("Could not connect.");
-
+ return _error->Error("Could not connect passive socket.");
+
return true;
}
@@ -552,43 +694,91 @@ bool FTPConn::CreateDataFd()
close(DataListenFd);
DataListenFd = -1;
- // Get a socket
- if ((DataListenFd = socket(AF_INET,SOCK_STREAM,0)) < 0)
+ // Get the information for a listening socket.
+ struct addrinfo *BindAddr = 0;
+ struct addrinfo Hints;
+ memset(&Hints,0,sizeof(Hints));
+ Hints.ai_socktype = SOCK_STREAM;
+ Hints.ai_flags |= AI_PASSIVE;
+ Hints.ai_family = ((struct sockaddr *)&ServerAddr)->sa_family;
+ int Res;
+ if ((Res = getaddrinfo(0,"0",&Hints,&BindAddr)) != 0)
+ return _error->Error("getaddrinfo was unable to get a listening socket");
+
+ // Construct the socket
+ if ((DataListenFd = socket(BindAddr->ai_family,BindAddr->ai_socktype,
+ BindAddr->ai_protocol)) < 0)
+ {
+ freeaddrinfo(BindAddr);
return _error->Errno("socket","Could not create a socket");
+ }
// Bind and listen
- sockaddr_in Addr;
- memset(&Addr,0,sizeof(Addr));
- if (bind(DataListenFd,(sockaddr *)&Addr,sizeof(Addr)) < 0)
+ if (bind(DataListenFd,BindAddr->ai_addr,BindAddr->ai_addrlen) < 0)
+ {
+ freeaddrinfo(BindAddr);
return _error->Errno("bind","Could not bind a socket");
+ }
+ freeaddrinfo(BindAddr);
if (listen(DataListenFd,1) < 0)
return _error->Errno("listen","Could not listen on the socket");
SetNonBlock(DataListenFd,true);
// Determine the name to send to the remote
- sockaddr_in Addr2;
- socklen_t Jnk = sizeof(Addr);
- if (getsockname(DataListenFd,(sockaddr *)&Addr,&Jnk) < 0)
+ struct sockaddr_storage Addr;
+ socklen_t AddrLen = sizeof(Addr);
+ if (getsockname(DataListenFd,(sockaddr *)&Addr,&AddrLen) < 0)
return _error->Errno("getsockname","Could not determine the socket's name");
- Jnk = sizeof(Addr2);
- if (getsockname(ServerFd,(sockaddr *)&Addr2,&Jnk) < 0)
- return _error->Errno("getsockname","Could not determine the socket's name");
-
- // This bit ripped from qftp
- unsigned long badr = ntohl(*(unsigned long *)&Addr2.sin_addr);
- unsigned long bp = ntohs(Addr.sin_port);
- // Send the port command
+ // Reverse the address. We need the server address and the data port.
+ char Name[NI_MAXHOST];
+ char Service[NI_MAXSERV];
+ char Service2[NI_MAXSERV];
+ getnameinfo((struct sockaddr *)&Addr,AddrLen,
+ Name,sizeof(Name),Service,sizeof(Service),
+ NI_NUMERICHOST|NI_NUMERICSERV);
+ getnameinfo((struct sockaddr *)&ServerAddr,ServerAddrLen,
+ Name,sizeof(Name),Service2,sizeof(Service2),
+ NI_NUMERICHOST|NI_NUMERICSERV);
+
+ // Send off an IPv4 address in the old port format
+ if (((struct sockaddr *)&Addr)->sa_family == AF_INET &&
+ ForceExtended == false)
+ {
+ // Convert the dots in the quad into commas
+ for (char *I = Name; *I != 0; I++)
+ if (*I == '.')
+ *I = ',';
+ unsigned long Port = atoi(Service);
+
+ // Send the port command
+ unsigned int Tag;
+ string Msg;
+ if (WriteMsg(Tag,Msg,"PORT %s,%d,%d",
+ Name,
+ (int)(Port >> 8) & 0xff, (int)(Port & 0xff)) == false)
+ return false;
+ if (Tag >= 400)
+ return _error->Error("Unable to send PORT command");
+ return true;
+ }
+
+ // Construct an EPRT command
+ unsigned Proto = 0;
+ for (unsigned J = 0; AFMap[J].Family != 0; J++)
+ if (AFMap[J].Family == ((struct sockaddr *)&Addr)->sa_family)
+ Proto = AFMap[J].IETFFamily;
+ if (Proto == 0)
+ return _error->Error("Unkonwn address family %u (AF_*)",
+ ((struct sockaddr *)&Addr)->sa_family);
+
+ // Send the EPRT command
unsigned int Tag;
string Msg;
- if (WriteMsg(Tag,Msg,"PORT %d,%d,%d,%d,%d,%d",
- (int) (badr >> 24) & 0xff, (int) (badr >> 16) & 0xff,
- (int) (badr >> 8) & 0xff, (int) badr & 0xff,
- (int) (bp >> 8) & 0xff, (int) bp & 0xff) == false)
+ if (WriteMsg(Tag,Msg,"EPRT |%u|%s|%s|",Proto,Name,Service) == false)
return false;
if (Tag >= 400)
- return _error->Error("Unable to send port command");
-
+ return _error->Error("EPRT failed, server said: %s",Msg.c_str());
return true;
}
/*}}}*/
@@ -599,7 +789,7 @@ bool FTPConn::CreateDataFd()
bool FTPConn::Finalize()
{
// Passive mode? Do nothing
- if (PasvAddr.sin_port != 0)
+ if (PasvAddr != 0)
return true;
// Close any old socket..
diff --git a/methods/ftp.h b/methods/ftp.h
index 7416589a0..f791195b3 100644
--- a/methods/ftp.h
+++ b/methods/ftp.h
@@ -1,5 +1,6 @@
// -*- mode: cpp; mode: fold -*-
-// Description /*{{{*/// $Id: ftp.h,v 1.2 1999/03/15 07:20:41 jgg Exp $
+// Description /*{{{*/// $Id: ftp.h,v 1.3 2001/02/20 07:03:18 jgg Exp $
+// $Id: ftp.h,v 1.3 2001/02/20 07:03:18 jgg Exp $
/* ######################################################################
FTP Aquire Method - This is the FTP aquire method for APT.
@@ -17,12 +18,20 @@ class FTPConn
int DataFd;
int DataListenFd;
URI ServerName;
+ bool ForceExtended;
bool TryPassive;
bool Debug;
-
- struct sockaddr_in PasvAddr;
- struct sockaddr_in Peer;
+ struct addrinfo *PasvAddr;
+
+ // Generic Peer Address
+ struct sockaddr_storage PeerAddr;
+ socklen_t PeerAddrLen;
+
+ // Generic Server Address (us)
+ struct sockaddr_storage ServerAddr;
+ socklen_t ServerAddrLen;
+
// Private helper functions
bool ReadLine(string &Text);
bool Login();
@@ -41,6 +50,7 @@ class FTPConn
bool Open(pkgAcqMethod *Owner);
void Close();
bool GoPasv();
+ bool ExtGoPasv();
// Query
bool Size(const char *Path,unsigned long &Size);
diff --git a/methods/http.cc b/methods/http.cc
index f52459377..7347e8349 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: http.cc,v 1.46 2000/05/28 04:33:59 jgg Exp $
+// $Id: http.cc,v 1.47 2001/02/20 07:03:18 jgg Exp $
/* ######################################################################
HTTP Aquire Method - This is the HTTP aquire method for APT.
@@ -285,7 +285,7 @@ bool ServerState::Open()
else
Proxy = getenv("http_proxy");
- // Parse no_proxy, a , seperated list of hosts
+ // Parse no_proxy, a , separated list of hosts
if (getenv("no_proxy") != 0)
{
const char *Start = getenv("no_proxy");
@@ -376,6 +376,10 @@ int ServerState::RunHeaders()
I = J;
}
+ // 100 Continue is a Nop...
+ if (Result == 100)
+ continue;
+
// Tidy up the connection persistance state.
if (Encoding == Closes && HaveContent == true)
Persistent = false;
@@ -537,7 +541,7 @@ bool ServerState::HeaderLine(string Line)
else
Persistent = true;
}
-
+
return true;
}
@@ -676,6 +680,10 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
Req += string("Proxy-Authorization: Basic ") +
Base64Encode(Proxy.User + ":" + Proxy.Password) + "\r\n";
+ if (Uri.User.empty() == false || Uri.Password.empty() == false)
+ Req += string("Authorization: Basic ") +
+ Base64Encode(Uri.User + ":" + Uri.Password) + "\r\n";
+
Req += "User-Agent: Debian APT-HTTP/1.2\r\n\r\n";
if (Debug == true)
@@ -1058,6 +1066,7 @@ int HttpMethod::Loop()
{
_error->Error("Bad header Data");
Fail(true);
+ RotateDNS();
continue;
}
@@ -1076,6 +1085,7 @@ int HttpMethod::Loop()
FailCounter = 0;
}
+ RotateDNS();
continue;
}
};
@@ -1093,6 +1103,11 @@ int HttpMethod::Loop()
// Run the data
bool Result = Server->RunData();
+ /* If the server is sending back sizeless responses then fill in
+ the size now */
+ if (Res.Size == 0)
+ Res.Size = File->Size();
+
// Close the file, destroy the FD object and timestamp it
FailFd = -1;
delete File;
@@ -1108,7 +1123,7 @@ int HttpMethod::Loop()
// Send status to APT
if (Result == true)
{
- Res.MD5Sum = Server->In.MD5->Result();
+ Res.MD5Sum = Server->In.MD5->Result();
URIDone(Res);
}
else
@@ -1135,6 +1150,7 @@ int HttpMethod::Loop()
case 5:
{
Fail();
+ RotateDNS();
Server->Close();
break;
}
diff --git a/methods/http.h b/methods/http.h
index 0c916707b..2569c2921 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -1,5 +1,6 @@
// -*- mode: cpp; mode: fold -*-
-// Description /*{{{*/// $Id: http.h,v 1.8 2000/05/28 04:33:59 jgg Exp $
+// Description /*{{{*/// $Id: http.h,v 1.9 2001/02/20 07:03:18 jgg Exp $
+// $Id: http.h,v 1.9 2001/02/20 07:03:18 jgg Exp $
/* ######################################################################
HTTP Aquire Method - This is the HTTP aquire method for APT.
@@ -133,7 +134,7 @@ class HttpMethod : public pkgAcqMethod
static void SigTerm(int);
public:
- friend ServerState;
+ friend class ServerState;
FileFd *File;
ServerState *Server;
diff --git a/methods/makefile b/methods/makefile
index e8eaec230..dfab23ab2 100644
--- a/methods/makefile
+++ b/methods/makefile
@@ -47,3 +47,19 @@ SLIBS = -lapt-pkg $(SOCKETLIBS)
LIB_MAKES = apt-pkg/makefile
SOURCE = ftp.cc rfc2553emu.cc connect.cc
include $(PROGRAM_H)
+
+# The rsh method
+PROGRAM=rsh
+SLIBS = -lapt-pkg
+LIB_MAKES = apt-pkg/makefile
+SOURCE = rsh.cc
+include $(PROGRAM_H)
+
+# SSH method symlink
+all: $(BIN)/ssh
+veryclean: clean-$(BIN)/ssh
+$(BIN)/ssh:
+ echo "Installing ssh method link"
+ ln -fs rsh $(BIN)/ssh
+clean-$(BIN)/ssh:
+ rm $(BIN)/ssh
diff --git a/methods/rfc2553emu.cc b/methods/rfc2553emu.cc
index 22daa2231..66bc906e9 100644
--- a/methods/rfc2553emu.cc
+++ b/methods/rfc2553emu.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: rfc2553emu.cc,v 1.7 2000/06/18 06:04:45 jgg Exp $
+// $Id: rfc2553emu.cc,v 1.8 2001/02/20 07:03:18 jgg Exp $
/* ######################################################################
RFC 2553 Emulation - Provides emulation for RFC 2553 getaddrinfo,
@@ -36,20 +36,6 @@ int getaddrinfo(const char *nodename, const char *servname,
const char *End;
char **CurAddr;
- Addr = gethostbyname(nodename);
- if (Addr == 0)
- {
- if (h_errno == TRY_AGAIN)
- return EAI_AGAIN;
- if (h_errno == NO_RECOVERY)
- return EAI_FAIL;
- return EAI_NONAME;
- }
-
- // No A records
- if (Addr->h_addr_list[0] == 0)
- return EAI_NONAME;
-
// Try to convert the service as a number
Port = htons(strtol(servname,(char **)&End,0));
Proto = SOCK_STREAM;
@@ -86,10 +72,32 @@ int getaddrinfo(const char *nodename, const char *servname,
hints->ai_socktype != 0)
return EAI_SERVICE;
}
+
+ // Hostname lookup, only if this is not a listening socket
+ if (hints != 0 && (hints->ai_flags & AI_PASSIVE) != AI_PASSIVE)
+ {
+ Addr = gethostbyname(nodename);
+ if (Addr == 0)
+ {
+ if (h_errno == TRY_AGAIN)
+ return EAI_AGAIN;
+ if (h_errno == NO_RECOVERY)
+ return EAI_FAIL;
+ return EAI_NONAME;
+ }
+
+ // No A records
+ if (Addr->h_addr_list[0] == 0)
+ return EAI_NONAME;
+
+ CurAddr = Addr->h_addr_list;
+ }
+ else
+ CurAddr = (char **)&End; // Fake!
// Start constructing the linked list
*res = 0;
- for (CurAddr = Addr->h_addr_list; *CurAddr != 0; CurAddr++)
+ for (; *CurAddr != 0; CurAddr++)
{
// New result structure
*Result = (struct addrinfo *)calloc(sizeof(**Result),1);
@@ -124,8 +132,15 @@ int getaddrinfo(const char *nodename, const char *servname,
// Set the address
((struct sockaddr_in *)(*Result)->ai_addr)->sin_family = AF_INET;
((struct sockaddr_in *)(*Result)->ai_addr)->sin_port = Port;
- ((struct sockaddr_in *)(*Result)->ai_addr)->sin_addr = *(in_addr *)(*CurAddr);
-
+
+ if (hints != 0 && (hints->ai_flags & AI_PASSIVE) != AI_PASSIVE)
+ ((struct sockaddr_in *)(*Result)->ai_addr)->sin_addr = *(in_addr *)(*CurAddr);
+ else
+ {
+ // Already zerod by calloc.
+ break;
+ }
+
Result = &(*Result)->ai_next;
}
diff --git a/methods/rsh.cc b/methods/rsh.cc
new file mode 100644
index 000000000..9e521edec
--- /dev/null
+++ b/methods/rsh.cc
@@ -0,0 +1,486 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+// $Id: rsh.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ RSH method - Transfer files via rsh compatible program
+
+ Written by Ben Collins <bcollins@debian.org>, Copyright (c) 2000
+ Licensed under the GNU General Public License v2 [no exception clauses]
+
+ ##################################################################### */
+ /*}}}*/
+// Iclude Files /*{{{*/
+#include "rsh.h"
+#include <apt-pkg/error.h>
+
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <utime.h>
+#include <unistd.h>
+#include <signal.h>
+#include <stdio.h>
+#include <errno.h>
+#include <stdarg.h>
+ /*}}}*/
+
+const char *Prog;
+unsigned long TimeOut = 120;
+time_t RSHMethod::FailTime = 0;
+string RSHMethod::FailFile;
+int RSHMethod::FailFd = -1;
+
+// RSHConn::RSHConn - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+RSHConn::RSHConn(URI Srv) : Len(0), WriteFd(-1), ReadFd(-1),
+ ServerName(Srv), Process(-1) {}
+ /*}}}*/
+// RSHConn::RSHConn - Destructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+RSHConn::~RSHConn()
+{
+ Close();
+}
+ /*}}}*/
+// RSHConn::Close - Forcibly terminate the connection /*{{{*/
+// ---------------------------------------------------------------------
+/* Often this is called when things have gone wrong to indicate that the
+ connection is no longer usable. */
+void RSHConn::Close()
+{
+ if (Process == -1)
+ return;
+
+ close(WriteFd);
+ close(ReadFd);
+ kill(Process,SIGINT);
+ ExecWait(Process,"",true);
+ WriteFd = -1;
+ ReadFd = -1;
+ Process = -1;
+}
+ /*}}}*/
+// RSHConn::Open - Connect to a host /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool RSHConn::Open()
+{
+ // Use the already open connection if possible.
+ if (Process != -1)
+ return true;
+
+ if (Connect(ServerName.Host,ServerName.User) == false)
+ return false;
+
+ return true;
+}
+ /*}}}*/
+// RSHConn::Connect - Fire up rsh and connect /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool RSHConn::Connect(string Host, string User)
+{
+ // Create the pipes
+ int Pipes[4] = {-1,-1,-1,-1};
+ if (pipe(Pipes) != 0 || pipe(Pipes+2) != 0)
+ {
+ _error->Errno("pipe","Failed to create IPC pipe to subprocess");
+ for (int I = 0; I != 4; I++)
+ close(Pipes[I]);
+ return false;
+ }
+ for (int I = 0; I != 4; I++)
+ SetCloseExec(Pipes[I],true);
+
+ Process = ExecFork();
+
+ // The child
+ if (Process == 0)
+ {
+ const char *Args[6];
+ int i = 0;
+
+ dup2(Pipes[1],STDOUT_FILENO);
+ dup2(Pipes[2],STDIN_FILENO);
+
+ // Probably should do
+ // dup2(open("/dev/null",O_RDONLY),STDERR_FILENO);
+
+ Args[i++] = Prog;
+ if (User.empty() == false) {
+ Args[i++] = "-l";
+ Args[i++] = User.c_str();
+ }
+ if (Host.empty() == false) {
+ Args[i++] = Host.c_str();
+ }
+ Args[i++] = "/bin/sh";
+ Args[i] = 0;
+ execvp(Args[0],(char **)Args);
+ exit(100);
+ }
+
+ ReadFd = Pipes[0];
+ WriteFd = Pipes[3];
+ SetNonBlock(Pipes[0],true);
+ SetNonBlock(Pipes[3],true);
+ close(Pipes[1]);
+ close(Pipes[2]);
+
+ return true;
+}
+ /*}}}*/
+// RSHConn::ReadLine - Very simple buffered read with timeout /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool RSHConn::ReadLine(string &Text)
+{
+ if (Process == -1 || ReadFd == -1)
+ return false;
+
+ // Suck in a line
+ while (Len < sizeof(Buffer))
+ {
+ // Scan the buffer for a new line
+ for (unsigned int I = 0; I != Len; I++)
+ {
+ // Escape some special chars
+ if (Buffer[I] == 0)
+ Buffer[I] = '?';
+
+ // End of line?
+ if (Buffer[I] != '\n')
+ continue;
+
+ I++;
+ Text = string(Buffer,I);
+ memmove(Buffer,Buffer+I,Len - I);
+ Len -= I;
+ return true;
+ }
+
+ // Wait for some data..
+ if (WaitFd(ReadFd,false,TimeOut) == false)
+ {
+ Close();
+ return _error->Error("Connection timeout");
+ }
+
+ // Suck it back
+ int Res = read(ReadFd,Buffer + Len,sizeof(Buffer) - Len);
+ if (Res <= 0)
+ {
+ _error->Errno("read","Read error");
+ Close();
+ return false;
+ }
+ Len += Res;
+ }
+
+ return _error->Error("A response overflowed the buffer.");
+}
+ /*}}}*/
+// RSHConn::WriteMsg - Send a message with optional remote sync. /*{{{*/
+// ---------------------------------------------------------------------
+/* The remote sync flag appends a || echo which will insert blank line
+ once the command completes. */
+bool RSHConn::WriteMsg(string &Text,bool Sync,const char *Fmt,...)
+{
+ va_list args;
+ va_start(args,Fmt);
+
+ // sprintf the description
+ char S[512];
+ vsnprintf(S,sizeof(S) - 4,Fmt,args);
+ if (Sync == true)
+ strcat(S," 2> /dev/null || echo\n");
+ else
+ strcat(S," 2> /dev/null\n");
+
+ // Send it off
+ unsigned long Len = strlen(S);
+ unsigned long Start = 0;
+ while (Len != 0)
+ {
+ if (WaitFd(WriteFd,true,TimeOut) == false)
+ {
+
+ Close();
+ return _error->Error("Connection timeout");
+ }
+
+ int Res = write(WriteFd,S + Start,Len);
+ if (Res <= 0)
+ {
+ _error->Errno("write","Write Error");
+ Close();
+ return false;
+ }
+
+ Len -= Res;
+ Start += Res;
+ }
+
+ if (Sync == true)
+ return ReadLine(Text);
+ return true;
+}
+ /*}}}*/
+// RSHConn::Size - Return the size of the file /*{{{*/
+// ---------------------------------------------------------------------
+/* Right now for successfull transfer the file size must be known in
+ advance. */
+bool RSHConn::Size(const char *Path,unsigned long &Size)
+{
+ // Query the size
+ string Msg;
+ Size = 0;
+
+ if (WriteMsg(Msg,true,"find %s -follow -printf '%%s\\n'",Path) == false)
+ return false;
+
+ // FIXME: Sense if the bad reply is due to a File Not Found.
+
+ char *End;
+ Size = strtoul(Msg.c_str(),&End,10);
+ if (End == Msg.c_str())
+ return _error->Error("File Not Found");
+ return true;
+}
+ /*}}}*/
+// RSHConn::ModTime - Get the modification time in UTC /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool RSHConn::ModTime(const char *Path, time_t &Time)
+{
+ Time = time(&Time);
+ // Query the mod time
+ string Msg;
+
+ if (WriteMsg(Msg,true,"TZ=UTC find %s -follow -printf '%%TY%%Tm%%Td%%TH%%TM%%TS\\n'",Path) == false)
+ return false;
+
+ // Parse it
+ StrToTime(Msg,Time);
+ return true;
+}
+ /*}}}*/
+// RSHConn::Get - Get a file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool RSHConn::Get(const char *Path,FileFd &To,unsigned long Resume,
+ MD5Summation &MD5,bool &Missing, unsigned long Size)
+{
+ Missing = false;
+
+ // Round to a 2048 byte block
+ Resume = Resume - (Resume % 2048);
+
+ if (To.Truncate(Resume) == false)
+ return false;
+ if (To.Seek(0) == false)
+ return false;
+
+ if (Resume != 0) {
+ if (MD5.AddFD(To.Fd(),Resume) == false) {
+ _error->Errno("read","Problem hashing file");
+ return false;
+ }
+ }
+
+ // FIXME: Detect file-not openable type errors.
+ string Jnk;
+ if (WriteMsg(Jnk,false,"dd if=%s bs=2048 skip=%u", Path, Resume / 2048) == false)
+ return false;
+
+ // Copy loop
+ unsigned int MyLen = Resume;
+ unsigned char Buffer[4096];
+ while (MyLen < Size)
+ {
+ // Wait for some data..
+ if (WaitFd(ReadFd,false,TimeOut) == false)
+ {
+ Close();
+ return _error->Error("Data socket timed out");
+ }
+
+ // Read the data..
+ int Res = read(ReadFd,Buffer,sizeof(Buffer));
+ if (Res == 0)
+ {
+ Close();
+ return _error->Error("Connection closed prematurely");
+ }
+
+ if (Res < 0)
+ {
+ if (errno == EAGAIN)
+ continue;
+ break;
+ }
+ MyLen += Res;
+
+ MD5.Add(Buffer,Res);
+ if (To.Write(Buffer,Res) == false)
+ {
+ Close();
+ return false;
+ }
+ }
+
+ return true;
+}
+ /*}}}*/
+
+// RSHMethod::RSHMethod - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+RSHMethod::RSHMethod() : pkgAcqMethod("1.0")
+{
+ signal(SIGTERM,SigTerm);
+ signal(SIGINT,SigTerm);
+ Server = 0;
+ FailFd = -1;
+};
+ /*}}}*/
+// RSHMethod::SigTerm - Clean up and timestamp the files on exit /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void RSHMethod::SigTerm(int sig)
+{
+ if (FailFd == -1)
+ _exit(100);
+ close(FailFd);
+
+ // Timestamp
+ struct utimbuf UBuf;
+ UBuf.actime = FailTime;
+ UBuf.modtime = FailTime;
+ utime(FailFile.c_str(),&UBuf);
+
+ _exit(100);
+}
+ /*}}}*/
+// RSHMethod::Fetch - Fetch a URI /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool RSHMethod::Fetch(FetchItem *Itm)
+{
+ URI Get = Itm->Uri;
+ const char *File = Get.Path.c_str();
+ FetchResult Res;
+ Res.Filename = Itm->DestFile;
+ Res.IMSHit = false;
+
+ // Connect to the server
+ if (Server == 0 || Server->Comp(Get) == false) {
+ delete Server;
+ Server = new RSHConn(Get);
+ }
+
+ // Could not connect is a transient error..
+ if (Server->Open() == false) {
+ Server->Close();
+ Fail(true);
+ return true;
+ }
+
+ // We say this mainly because the pause here is for the
+ // ssh connection that is still going
+ Status("Connecting to %s", Get.Host.c_str());
+
+ // Get the files information
+ unsigned long Size;
+ if (Server->Size(File,Size) == false ||
+ Server->ModTime(File,FailTime) == false)
+ {
+ //Fail(true);
+ //_error->Error("File Not Found"); // Will be handled by Size
+ return false;
+ }
+ Res.Size = Size;
+
+ // See if it is an IMS hit
+ if (Itm->LastModified == FailTime) {
+ Res.Size = 0;
+ Res.IMSHit = true;
+ URIDone(Res);
+ return true;
+ }
+
+ // See if the file exists
+ struct stat Buf;
+ if (stat(Itm->DestFile.c_str(),&Buf) == 0) {
+ if (Size == (unsigned)Buf.st_size && FailTime == Buf.st_mtime) {
+ Res.Size = Buf.st_size;
+ Res.LastModified = Buf.st_mtime;
+ Res.ResumePoint = Buf.st_size;
+ URIDone(Res);
+ return true;
+ }
+
+ // Resume?
+ if (FailTime == Buf.st_mtime && Size > (unsigned)Buf.st_size)
+ Res.ResumePoint = Buf.st_size;
+ }
+
+ // Open the file
+ MD5Summation MD5;
+ {
+ FileFd Fd(Itm->DestFile,FileFd::WriteAny);
+ if (_error->PendingError() == true)
+ return false;
+
+ URIStart(Res);
+
+ FailFile = Itm->DestFile;
+ FailFile.c_str(); // Make sure we dont do a malloc in the signal handler
+ FailFd = Fd.Fd();
+
+ bool Missing;
+ if (Server->Get(File,Fd,Res.ResumePoint,MD5,Missing,Res.Size) == false)
+ {
+ Fd.Close();
+
+ // Timestamp
+ struct utimbuf UBuf;
+ UBuf.actime = FailTime;
+ UBuf.modtime = FailTime;
+ utime(FailFile.c_str(),&UBuf);
+
+ // If the file is missing we hard fail otherwise transient fail
+ if (Missing == true)
+ return false;
+ Fail(true);
+ return true;
+ }
+
+ Res.Size = Fd.Size();
+ }
+
+ Res.LastModified = FailTime;
+ Res.MD5Sum = MD5.Result();
+
+ // Timestamp
+ struct utimbuf UBuf;
+ UBuf.actime = FailTime;
+ UBuf.modtime = FailTime;
+ utime(Queue->DestFile.c_str(),&UBuf);
+ FailFd = -1;
+
+ URIDone(Res);
+
+ return true;
+}
+ /*}}}*/
+
+int main(int argc, const char *argv[])
+{
+ RSHMethod Mth;
+ Prog = strrchr(argv[0],'/');
+ Prog++;
+ return Mth.Run();
+}
diff --git a/methods/rsh.h b/methods/rsh.h
new file mode 100644
index 000000000..c3f3258e9
--- /dev/null
+++ b/methods/rsh.h
@@ -0,0 +1,69 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/// $Id: rsh.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+// $Id: rsh.h,v 1.2 2001/02/20 07:03:18 jgg Exp $
+/* ######################################################################
+
+ RSH method - Transfer files via rsh compatible program
+
+ ##################################################################### */
+ /*}}}*/
+#ifndef APT_RSH_H
+#define APT_RSH_H
+
+#include <string>
+#include <apt-pkg/strutl.h>
+#include <apt-pkg/md5.h>
+#include <apt-pkg/acquire-method.h>
+#include <apt-pkg/fileutl.h>
+
+class RSHConn
+{
+ char Buffer[1024*10];
+ unsigned long Len;
+ int WriteFd;
+ int ReadFd;
+ URI ServerName;
+
+ // Private helper functions
+ bool ReadLine(string &Text);
+
+ public:
+
+ int Process;
+
+ // Raw connection IO
+ bool WriteMsg(string &Text,bool Sync,const char *Fmt,...);
+ bool Connect(string Host, string User);
+ bool Comp(URI Other) {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
+
+ // Connection control
+ bool Open();
+ void Close();
+
+ // Query
+ bool Size(const char *Path,unsigned long &Size);
+ bool ModTime(const char *Path, time_t &Time);
+ bool Get(const char *Path,FileFd &To,unsigned long Resume,
+ MD5Summation &MD5,bool &Missing, unsigned long Size);
+
+ RSHConn(URI Srv);
+ ~RSHConn();
+};
+
+class RSHMethod : public pkgAcqMethod
+{
+ virtual bool Fetch(FetchItem *Itm);
+
+ RSHConn *Server;
+
+ static string FailFile;
+ static int FailFd;
+ static time_t FailTime;
+ static void SigTerm(int);
+
+ public:
+
+ RSHMethod();
+};
+
+#endif
diff --git a/test/conf.cc b/test/conf.cc
new file mode 100644
index 000000000..c44161426
--- /dev/null
+++ b/test/conf.cc
@@ -0,0 +1,35 @@
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/error.h>
+
+int main(int argc,const char *argv[])
+{
+ Configuration Cnf;
+
+ ReadConfigFile(Cnf,argv[1],true);
+
+ // Process 'simple-key' type sections
+ const Configuration::Item *Top = Cnf.Tree("simple-key");
+ for (Top = (Top == 0?0:Top->Child); Top != 0; Top = Top->Next)
+ {
+ Configuration Block(Top);
+
+ string VendorID = Top->Tag;
+ string FingerPrint = Block.Find("Fingerprint");
+ string Name = Block.Find("Name"); // Description?
+
+ if (FingerPrint.empty() == true || Name.empty() == true)
+ _error->Error("Block %s is invalid",VendorID.c_str());
+
+ cout << VendorID << ' ' << FingerPrint << ' ' << Name << endl;
+ }
+
+ // Print any errors or warnings found during parsing
+ if (_error->empty() == false)
+ {
+ bool Errors = _error->PendingError();
+ _error->DumpErrors();
+ return Errors == true?100:0;
+ }
+
+ return 0;
+}
diff --git a/test/extract-control.cc b/test/extract-control.cc
new file mode 100644
index 000000000..125088896
--- /dev/null
+++ b/test/extract-control.cc
@@ -0,0 +1,40 @@
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/error.h>
+
+#include <iostream>
+#include <unistd.h>
+
+bool ExtractMember(const char *File,const char *Member)
+{
+ FileFd Fd(File,FileFd::ReadOnly);
+ debDebFile Deb(Fd);
+ if(_error->PendingError() == true)
+ return false;
+
+ debDebFile::MemControlExtract Extract(Member);
+ if (Extract.Read(Deb) == false)
+ return false;
+
+ if (Extract.Control == 0)
+ return true;
+
+ write(STDOUT_FILENO,Extract.Control,Extract.Length);
+ return true;
+}
+
+int main(int argc, const char *argv[])
+{
+ if (argc < 2)
+ {
+ cerr << "Need two arguments, a .deb and the control member" << endl;
+ return 100;
+ }
+
+ if (ExtractMember(argv[1],argv[2]) == false)
+ {
+ _error->DumpErrors();
+ return 100;
+ }
+
+ return 0;
+}
diff --git a/test/makefile b/test/makefile
index e2f4c048f..e3b2ac524 100644
--- a/test/makefile
+++ b/test/makefile
@@ -30,3 +30,29 @@ SLIBS = -lapt-pkg
LIB_MAKES = apt-pkg/makefile
SOURCE = versiontest.cc
include $(PROGRAM_H)
+
+# Version compare tester
+PROGRAM=testextract
+SLIBS = -lapt-pkg -lapt-inst
+LIB_MAKES = apt-pkg/makefile apt-inst/makefile
+SOURCE = testextract.cc
+include $(PROGRAM_H)
+
+# Program for testing the config file parser
+PROGRAM=conftest
+SLIBS = -lapt-pkg
+SOURCE = conf.cc
+include $(PROGRAM_H)
+
+# Program for testing the tar/deb extractor
+PROGRAM=testdeb
+SLIBS = -lapt-pkg -lapt-inst
+SOURCE = testdeb.cc
+include $(PROGRAM_H)
+
+# Program for testing tar extraction
+PROGRAM=extract-control
+SLIBS = -lapt-pkg -lapt-inst
+SOURCE = extract-control.cc
+include $(PROGRAM_H)
+
diff --git a/test/scratch.cc b/test/scratch.cc
index c8888a160..b52608150 100644
--- a/test/scratch.cc
+++ b/test/scratch.cc
@@ -1,3 +1,4 @@
+#define APT_COMPATIBILITY 1
#include <apt-pkg/dpkgdb.h>
#include <apt-pkg/debfile.h>
#include <apt-pkg/error.h>
diff --git a/test/testdeb.cc b/test/testdeb.cc
new file mode 100644
index 000000000..5986621bb
--- /dev/null
+++ b/test/testdeb.cc
@@ -0,0 +1,39 @@
+#include <apt-pkg/dirstream.h>
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/extracttar.h>
+
+class NullStream : public pkgDirStream
+{
+ public:
+ virtual bool DoItem(Item &Itm,int &Fd) {return true;};
+};
+
+bool Test(const char *File)
+{
+ FileFd Fd(File,FileFd::ReadOnly);
+ debDebFile Deb(Fd);
+
+ if (_error->PendingError() == true)
+ return false;
+
+ // Get the archive member and positition the file
+ const ARArchive::Member *Member = Deb.GotoMember("data.tar.gz");
+ if (Member == 0)
+ return false;
+
+ // Extract it.
+ ExtractTar Tar(Deb.GetFile(),Member->Size);
+ NullStream Dir;
+ if (Tar.Go(Dir) == false)
+ return false;
+
+ return true;
+}
+
+int main(int argc, const char *argv[])
+{
+ Test(argv[1]);
+ _error->DumpErrors();
+ return 0;
+}
diff --git a/test/testextract.cc b/test/testextract.cc
new file mode 100644
index 000000000..41a197068
--- /dev/null
+++ b/test/testextract.cc
@@ -0,0 +1,96 @@
+#define APT_COMPATIBILITY 1
+#include <apt-pkg/dpkgdb.h>
+#include <apt-pkg/debfile.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/progress.h>
+#include <apt-pkg/extract.h>
+#include <apt-pkg/init.h>
+#include <apt-pkg/strutl.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+
+bool Go(int argc,char *argv[])
+{
+ // Init the database
+ debDpkgDB Db;
+ {
+ OpTextProgress Prog;
+
+ if (Db.ReadyPkgCache(Prog) == false)
+ return false;
+ Prog.Done();
+
+ if (Db.ReadyFileList(Prog) == false)
+ return false;
+ }
+
+ for (int I = 1; I < argc; I++)
+ {
+ const char *Fake = 0;
+ for (unsigned J = 0; argv[I][J] != 0; J++)
+ {
+ if (argv[I][J] != ',')
+ continue;
+ Fake = argv[I] + J + 1;
+ argv[I][J] = 0;
+ }
+
+ FileFd F(argv[I],FileFd::ReadOnly);
+ debDebFile Deb(F);
+
+ if (_error->PendingError() == true)
+ return false;
+
+ if (Deb.ExtractControl(Db) == false)
+ return false;
+ cout << argv[I] << endl;
+
+ pkgCache::VerIterator Ver = Deb.MergeControl(Db);
+ if (Ver.end() == true)
+ return false;
+
+ cout << Ver.ParentPkg().Name() << ' ' << Ver.VerStr() << endl;
+
+ pkgExtract Extract(Db.GetFLCache(),Ver);
+
+ if (Fake != 0)
+ {
+ pkgExtract::Item Itm;
+ memset(&Itm,0,sizeof(Itm));
+ FILE *F = fopen(Fake,"r");
+ while (feof(F) == 0)
+ {
+ char Line[300];
+ fgets(Line,sizeof(Line),F);
+ Itm.Name = _strstrip(Line);
+ Itm.Type = pkgDirStream::Item::File;
+ if (Line[strlen(Line)-1] == '/')
+ Itm.Type = pkgDirStream::Item::Directory;
+
+ int Fd;
+ if (Extract.DoItem(Itm,Fd) == false)
+ return false;
+ }
+ }
+ else
+ if (Deb.ExtractArchive(Extract) == false)
+ return false;
+ }
+ return true;
+}
+
+int main(int argc,char *argv[])
+{
+ pkgInitialize(*_config);
+ _config->Set("Dir::State::status","/tmp/testing/status");
+
+ Go(argc,argv);
+
+ if (_error->PendingError() == true)
+ {
+ _error->DumpErrors();
+ return 0;
+ }
+}
diff --git a/test/versiontest.cc b/test/versiontest.cc
index d3bfe18fe..20da49b72 100644
--- a/test/versiontest.cc
+++ b/test/versiontest.cc
@@ -1,6 +1,6 @@
// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: versiontest.cc,v 1.1 1998/11/26 23:29:20 jgg Exp $
+// $Id: versiontest.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
/* ######################################################################
Version Test - Simple program to run through a file and comare versions.
@@ -14,13 +14,14 @@
##################################################################### */
/*}}}*/
+#define APT_COMPATIBILITY 1
#include <system.h>
#include <apt-pkg/error.h>
#include <apt-pkg/version.h>
#include <iostream.h>
#include <fstream.h>
-static int verrevcmp(const char *val, const char *ref)
+ static int verrevcmp(const char *val, const char *ref)
{
int vc, rc;
long vl, rl;